summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml90
-rw-r--r--classpath.SAMPLE12
-rw-r--r--lib/ant/ant-contrib.jar.desired.sha12
-rw-r--r--lib/ant/ant-dotnet-1.0.jar.desired.sha12
-rw-r--r--lib/ant/ant.jar.desired.sha12
-rw-r--r--lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha12
-rw-r--r--lib/ant/vizant.jar.desired.sha12
-rw-r--r--lib/fjbg.jar.desired.sha12
-rw-r--r--lib/forkjoin.jar.desired.sha12
-rw-r--r--lib/jline.jar.desired.sha12
-rw-r--r--lib/msil.jar.desired.sha12
-rw-r--r--lib/scala-compiler-src.jar.desired.sha11
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--lib/scala-reflect-src.jar.desired.sha11
-rw-r--r--lib/scala-reflect.jar.desired.sha12
-rw-r--r--src/compiler/scala/reflect/makro/runtime/ExprUtils.scala2
-rw-r--r--src/compiler/scala/reflect/makro/runtime/Mirrors.scala9
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala29
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala6
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala10
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala15
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala2
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala9
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala152
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala165
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala668
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala487
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala10
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala82
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala16
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala34
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/Page.scala34
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala169
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala53
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala66
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala499
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala227
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.pngbin0 -> 3910 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css135
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js324
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.pngbin0 -> 3903 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js10
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.pngbin0 -> 3882 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala131
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala692
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala311
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Body.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala146
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala144
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala262
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala258
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala14
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala9
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala9
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala1
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala23
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala80
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/PostErasure.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala96
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala116
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala780
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala182
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala81
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala6
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala147
-rw-r--r--src/compiler/scala/tools/reflect/ReflectGlobal.scala (renamed from src/compiler/scala/tools/nsc/ReflectGlobal.scala)9
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala (renamed from src/compiler/scala/tools/nsc/ReflectMain.scala)12
-rw-r--r--src/compiler/scala/tools/reflect/ReflectSetup.scala (renamed from src/compiler/scala/tools/nsc/ReflectSetup.scala)7
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala9
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala3
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala15
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala40
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala15
-rw-r--r--src/eclipse/README.md52
-rw-r--r--src/eclipse/asm/.classpath6
-rw-r--r--src/eclipse/asm/.project29
-rw-r--r--src/eclipse/fjbg/.classpath7
-rw-r--r--src/eclipse/fjbg/.project (renamed from project.SAMPLE)14
-rw-r--r--src/eclipse/reflect/.classpath8
-rw-r--r--src/eclipse/reflect/.project30
-rw-r--r--src/eclipse/scala-compiler/.classpath14
-rw-r--r--src/eclipse/scala-compiler/.project35
-rw-r--r--src/eclipse/scala-library/.classpath7
-rw-r--r--src/eclipse/scala-library/.project30
-rw-r--r--src/library/scala/Cloneable.scala14
-rw-r--r--src/library/scala/Predef.scala8
-rw-r--r--src/library/scala/StringContext.scala38
-rw-r--r--src/library/scala/annotation/cloneable.scala (renamed from src/library/scala/cloneable.scala)5
-rw-r--r--src/library/scala/collection/DefaultMap.scala4
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala9
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala4
-rw-r--r--src/library/scala/collection/IterableViewLike.scala2
-rw-r--r--src/library/scala/collection/Map.scala1
-rw-r--r--src/library/scala/collection/MapLike.scala10
-rw-r--r--src/library/scala/collection/SeqViewLike.scala2
-rw-r--r--src/library/scala/collection/SortedMap.scala19
-rw-r--r--src/library/scala/collection/SortedMapLike.scala23
-rw-r--r--src/library/scala/collection/TraversableLike.scala2
-rw-r--r--src/library/scala/collection/TraversableOnce.scala12
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala3
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala (renamed from src/library/scala/collection/generic/FromRepr.scala)36
-rw-r--r--src/library/scala/collection/generic/IsTraversableOnce.scala62
-rw-r--r--src/library/scala/collection/generic/package.scala8
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala22
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala33
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala31
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala2
-rw-r--r--src/library/scala/collection/mutable/Buffer.scala4
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala2
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala5
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/Queue.scala1
-rw-r--r--src/library/scala/collection/mutable/Stack.scala1
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala39
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala63
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala3
-rw-r--r--src/library/scala/concurrent/BlockContext.scala81
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala34
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala5
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala74
-rw-r--r--src/library/scala/concurrent/Future.scala32
-rw-r--r--src/library/scala/concurrent/Promise.scala9
-rw-r--r--src/library/scala/concurrent/SyncVar.scala20
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala62
-rw-r--r--src/library/scala/concurrent/impl/Future.scala103
-rw-r--r--src/library/scala/concurrent/impl/NonFatal.scala37
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala34
-rw-r--r--src/library/scala/math/BigInt.scala11
-rw-r--r--src/library/scala/package.scala13
-rw-r--r--src/library/scala/reflect/ClassManifest.scala91
-rw-r--r--src/library/scala/reflect/ClassTag.scala17
-rw-r--r--src/library/scala/reflect/Manifest.scala42
-rw-r--r--src/library/scala/reflect/NoManifest.scala2
-rw-r--r--src/library/scala/reflect/OptManifest.scala2
-rw-r--r--src/library/scala/reflect/base/Base.scala20
-rw-r--r--src/library/scala/reflect/base/StandardDefinitions.scala1
-rw-r--r--src/library/scala/reflect/base/Symbols.scala8
-rw-r--r--src/library/scala/reflect/base/TagInterop.scala11
-rw-r--r--src/library/scala/reflect/base/TypeTags.scala12
-rw-r--r--src/library/scala/reflect/base/Types.scala5
-rw-r--r--src/library/scala/reflect/compat.scala33
-rw-r--r--src/library/scala/reflect/makro/internal/package.scala2
-rw-r--r--src/library/scala/reflect/package.scala43
-rw-r--r--src/library/scala/util/Either.scala (renamed from src/library/scala/Either.scala)2
-rw-r--r--src/library/scala/util/Try.scala202
-rw-r--r--src/library/scala/util/control/NonFatal.scala45
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala1
-rw-r--r--src/partest/scala/tools/partest/ScaladocModelTest.scala4
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala39
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala26
-rw-r--r--src/reflect/scala/reflect/api/Types.scala4
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala24
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala82
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala93
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala75
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala17
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala5
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala430
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala11
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala240
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala13
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala51
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala95
-rw-r--r--src/reflect/scala/reflect/makro/Universe.scala24
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala22
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala15
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--test/disabled/run/syncchannel.check (renamed from test/files/run/syncchannel.check)0
-rw-r--r--test/disabled/run/syncchannel.scala (renamed from test/files/run/syncchannel.scala)0
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.check2
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.check4
-rw-r--r--test/files/buildmanager/t2657/t2657.check4
-rw-r--r--test/files/buildmanager/t2789/t2789.check4
-rw-r--r--test/files/codelib/code.jar.desired.sha12
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.check4
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.scala18
-rw-r--r--test/files/continuations-run/ts-1681-2.check5
-rw-r--r--test/files/continuations-run/ts-1681-2.scala44
-rw-r--r--test/files/continuations-run/ts-1681-3.check4
-rw-r--r--test/files/continuations-run/ts-1681-3.scala27
-rw-r--r--test/files/continuations-run/ts-1681.check3
-rw-r--r--test/files/continuations-run/ts-1681.scala29
-rw-r--r--test/files/jvm/actmig-loop-react.scala188
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala124
-rw-r--r--test/files/jvm/future-spec/PromiseTests.scala15
-rw-r--r--test/files/jvm/manifests-new.check116
-rw-r--r--test/files/jvm/non-fatal-tests.scala47
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala315
-rw-r--r--test/files/jvm/try-type-tests.scala250
-rw-r--r--test/files/lib/annotations.jar.desired.sha12
-rw-r--r--test/files/lib/enums.jar.desired.sha12
-rw-r--r--test/files/lib/genericNest.jar.desired.sha12
-rw-r--r--test/files/lib/methvsfield.jar.desired.sha12
-rw-r--r--test/files/lib/nest.jar.desired.sha12
-rw-r--r--test/files/lib/scalacheck.jar.desired.sha12
-rw-r--r--test/files/neg/catch-all.check12
-rw-r--r--test/files/neg/catch-all.scala30
-rw-r--r--test/files/neg/classmanifests_new_deprecations.check61
-rw-r--r--test/files/neg/classmanifests_new_deprecations.flags1
-rw-r--r--test/files/neg/classmanifests_new_deprecations.scala37
-rw-r--r--test/files/neg/exhausting.check2
-rw-r--r--test/files/neg/interop_abstypetags_arenot_classmanifests.check2
-rw-r--r--test/files/neg/interop_typetags_arenot_classmanifests.check2
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree.check4
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala2
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.check70
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.scala31
-rw-r--r--test/files/neg/switch.check5
-rw-r--r--test/files/neg/switch.scala4
-rw-r--r--test/files/neg/t2442.check9
-rw-r--r--test/files/neg/t2442.flags1
-rw-r--r--test/files/neg/t2442/MyEnum.java3
-rw-r--r--test/files/neg/t2442/MySecondEnum.java6
-rw-r--r--test/files/neg/t2442/t2442.scala15
-rw-r--r--test/files/neg/t2796.check4
-rw-r--r--test/files/neg/t2796.flags1
-rw-r--r--test/files/neg/t2796.scala28
-rw-r--r--test/files/neg/t3692-old.check11
-rw-r--r--test/files/neg/t3836.check13
-rw-r--r--test/files/neg/t3836.scala28
-rw-r--r--test/files/neg/t4541.check4
-rw-r--r--test/files/neg/t4541b.check4
-rw-r--r--test/files/neg/t4691_exhaust_extractor.check13
-rw-r--r--test/files/neg/t4691_exhaust_extractor.flags1
-rw-r--r--test/files/neg/t4691_exhaust_extractor.scala33
-rw-r--r--test/files/neg/t4842.check7
-rw-r--r--test/files/neg/t4842.scala (renamed from test/files/neg/t4842b.scala)4
-rw-r--r--test/files/neg/t4842a.check4
-rw-r--r--test/files/neg/t4842a.scala3
-rw-r--r--test/files/neg/t4842b.check4
-rw-r--r--test/files/neg/t5148.check2
-rw-r--r--test/files/neg/t5148.scala4
-rw-r--r--test/files/neg/t5504.check4
-rw-r--r--test/files/neg/t5589neg.check4
-rw-r--r--test/files/neg/t5761.check16
-rw-r--r--test/files/neg/t5761.scala16
-rw-r--r--test/files/neg/t5830.check4
-rw-r--r--test/files/neg/t5830.flags1
-rw-r--r--test/files/neg/t5830.scala9
-rw-r--r--test/files/neg/t5839.check6
-rw-r--r--test/files/neg/t5839.scala7
-rw-r--r--test/files/neg/t5969.check7
-rw-r--r--test/files/neg/t5969.scala11
-rw-r--r--test/files/neg/t6013.check7
-rw-r--r--test/files/neg/t6013/Abstract.java7
-rw-r--r--test/files/neg/t6013/Base.java10
-rw-r--r--test/files/neg/t6013/DerivedScala.scala7
-rw-r--r--test/files/neg/t6042.check4
-rw-r--r--test/files/neg/t6042.scala8
-rw-r--r--test/files/neg/t649.check2
-rw-r--r--test/files/pos/exponential-spec.scala47
-rw-r--r--test/files/pos/rangepos-anonapply.flags1
-rw-r--r--test/files/pos/rangepos-anonapply.scala9
-rw-r--r--test/files/pos/t3836.scala14
-rw-r--r--test/files/pos/t4176b.scala5
-rw-r--r--test/files/pos/t5504/s_1.scala (renamed from test/files/neg/t5504/s_1.scala)0
-rw-r--r--test/files/pos/t5504/s_2.scala (renamed from test/files/neg/t5504/s_2.scala)0
-rw-r--r--test/files/pos/t5846.scala10
-rw-r--r--test/files/pos/t5899.flags1
-rw-r--r--test/files/pos/t5899.scala19
-rw-r--r--test/files/pos/t5932.flags1
-rw-r--r--test/files/pos/t5932.scala15
-rw-r--r--test/files/pos/t5953.scala2
-rw-r--r--test/files/pos/t5967.scala6
-rw-r--r--test/files/pos/t5968.flags1
-rw-r--r--test/files/pos/t5968.scala8
-rw-r--r--test/files/pos/t6008.flags1
-rw-r--r--test/files/pos/t6008.scala12
-rw-r--r--test/files/pos/t6022.flags1
-rw-r--r--test/files/pos/t6022.scala7
-rw-r--r--test/files/pos/t6033.scala5
-rw-r--r--test/files/presentation/hyperlinks.flags2
-rw-r--r--test/files/presentation/hyperlinks/Runner.scala4
-rw-r--r--test/files/presentation/ide-bug-1000469/Runner.scala4
-rw-r--r--test/files/presentation/ide-bug-1000531.check2
-rw-r--r--test/files/presentation/ide-bug-1000531.flags18
-rw-r--r--test/files/presentation/ide-t1000976.check1
-rw-r--r--test/files/presentation/ide-t1000976.flags1
-rw-r--r--test/files/presentation/ide-t1000976/Test.scala30
-rw-r--r--test/files/presentation/ide-t1000976/src/a/A.scala7
-rw-r--r--test/files/presentation/ide-t1000976/src/b/B.scala7
-rw-r--r--test/files/presentation/ide-t1000976/src/c/C.scala3
-rw-r--r--test/files/presentation/ide-t1000976/src/d/D.scala7
-rw-r--r--test/files/presentation/memory-leaks/MemoryLeaksTest.scala5
-rw-r--r--test/files/presentation/t5708/Test.scala4
-rw-r--r--test/files/presentation/visibility/Test.scala4
-rw-r--r--test/files/res/t5489.check4
-rw-r--r--test/files/res/t5489.res2
-rw-r--r--test/files/res/t5489/t5489.scala14
-rw-r--r--test/files/run/abstypetags_core.check58
-rw-r--r--test/files/run/abstypetags_core.scala6
-rw-r--r--test/files/run/classmanifests_new_alias.check2
-rw-r--r--test/files/run/classmanifests_new_alias.scala5
-rw-r--r--test/files/run/classmanifests_new_core.check2
-rw-r--r--test/files/run/classmanifests_new_core.scala4
-rw-r--r--test/files/run/classtags_core.check6
-rw-r--r--test/files/run/classtags_core.scala6
-rw-r--r--test/files/run/collection-conversions.check22
-rw-r--r--test/files/run/collection-conversions.scala20
-rw-r--r--test/files/run/enrich-gentraversable.check4
-rw-r--r--test/files/run/enrich-gentraversable.scala81
-rw-r--r--test/files/run/inline-ex-handlers.check58
-rw-r--r--test/files/run/interop_classtags_are_classmanifests.check6
-rw-r--r--test/files/run/macro-reify-splice-splice/Macros_1.scala1
-rw-r--r--test/files/run/newTags.check8
-rw-r--r--test/files/run/patmat-finally.scala25
-rw-r--r--test/files/run/reflect-resolveoverload-bynameparam.scala32
-rw-r--r--test/files/run/reflect-resolveoverload-expected.scala43
-rw-r--r--test/files/run/reflect-resolveoverload-invalid.scala43
-rw-r--r--test/files/run/reflect-resolveoverload-named.scala26
-rw-r--r--test/files/run/reflect-resolveoverload-targs.scala29
-rw-r--r--test/files/run/reflect-resolveoverload-tparm-substitute.scala77
-rw-r--r--test/files/run/reflect-resolveoverload-variadic.scala27
-rw-r--r--test/files/run/reflect-resolveoverload1.scala (renamed from test/files/run/reflect-overload.scala)4
-rw-r--r--test/files/run/reflect-resolveoverload2.scala40
-rw-r--r--test/files/run/reflection-equality.check53
-rw-r--r--test/files/run/reflection-equality.scala22
-rw-r--r--test/files/run/reflection-magicsymbols.check22
-rw-r--r--test/files/run/reflection-magicsymbols.scala11
-rw-r--r--test/files/run/reflection-repl.check2
-rw-r--r--test/files/run/reify-aliases.check1
-rw-r--r--test/files/run/reify-aliases.scala5
-rw-r--r--test/files/run/reify_ann1a.check14
-rw-r--r--test/files/run/reify_ann1b.check4
-rw-r--r--test/files/run/reify_ann2a.check18
-rw-r--r--test/files/run/reify_ann3.check4
-rw-r--r--test/files/run/reify_ann4.check4
-rw-r--r--test/files/run/reify_ann5.check4
-rw-r--r--test/files/run/reify_classfileann_a.check4
-rw-r--r--test/files/run/reify_classfileann_b.check4
-rw-r--r--test/files/run/reify_magicsymbols.check13
-rw-r--r--test/files/run/reify_magicsymbols.scala17
-rw-r--r--test/files/run/reify_newimpl_09.check1
-rw-r--r--test/files/run/reify_newimpl_10.check1
-rw-r--r--test/files/run/reify_newimpl_16.check1
-rw-r--r--test/files/run/reify_newimpl_17.check2
-rw-r--r--test/files/run/reify_newimpl_26.check2
-rw-r--r--test/files/run/reify_newimpl_28.check1
-rw-r--r--test/files/run/reify_newimpl_32.check1
-rw-r--r--test/files/run/reify_newimpl_34.check1
-rw-r--r--test/files/run/reify_printf.scala4
-rw-r--r--test/files/run/showraw_mods.check2
-rw-r--r--test/files/run/showraw_tree.check4
-rw-r--r--test/files/run/showraw_tree_ids.check4
-rw-r--r--test/files/run/showraw_tree_ids.scala5
-rw-r--r--test/files/run/showraw_tree_kinds.check4
-rw-r--r--test/files/run/showraw_tree_types_ids.check20
-rw-r--r--test/files/run/showraw_tree_types_ids.scala5
-rw-r--r--test/files/run/showraw_tree_types_typed.check16
-rw-r--r--test/files/run/showraw_tree_types_untyped.check4
-rw-r--r--test/files/run/showraw_tree_ultimate.check20
-rw-r--r--test/files/run/showraw_tree_ultimate.scala5
-rw-r--r--test/files/run/stringinterpolation_macro-run.check62
-rw-r--r--test/files/run/stringinterpolation_macro-run.scala103
-rw-r--r--test/files/run/t3326.check8
-rw-r--r--test/files/run/t3326.scala74
-rw-r--r--test/files/run/t3613.scala2
-rw-r--r--test/files/run/t4027.check12
-rw-r--r--test/files/run/t4027.scala27
-rw-r--r--test/files/run/t4935.check1
-rw-r--r--test/files/run/t4935.flags1
-rw-r--r--test/files/run/t4935.scala9
-rw-r--r--test/files/run/t5009.check1
-rw-r--r--test/files/run/t5009.scala7
-rw-r--r--test/files/run/t5224.check2
-rw-r--r--test/files/run/t5271_3.check2
-rw-r--r--test/files/run/t5704.check2
-rw-r--r--test/files/run/t5830.check6
-rw-r--r--test/files/run/t5830.flags1
-rw-r--r--test/files/run/t5830.scala56
-rw-r--r--test/files/run/t5907.check31
-rw-r--r--test/files/run/t5907.scala118
-rw-r--r--test/files/run/t5914.check1
-rw-r--r--test/files/run/t5914.scala23
-rw-r--r--test/files/run/t5966.check3
-rw-r--r--test/files/run/t5966.scala9
-rw-r--r--test/files/run/t5971.check4
-rw-r--r--test/files/run/t5971.scala23
-rw-r--r--test/files/run/t5974.check1
-rw-r--r--test/files/run/t5974.scala10
-rw-r--r--test/files/run/t5986.check15
-rw-r--r--test/files/run/t5986.scala36
-rw-r--r--test/files/run/test-cpp.check138
-rw-r--r--test/files/run/typetags_core.check (renamed from test/files/run/concretetypetags_core.check)6
-rw-r--r--test/files/run/typetags_core.scala (renamed from test/files/run/concretetypetags_core.scala)6
-rw-r--r--test/files/run/typetags_multi.check (renamed from test/files/run/concretetypetags_multi.check)0
-rw-r--r--test/files/run/typetags_multi.scala (renamed from test/files/run/concretetypetags_multi.scala)0
-rw-r--r--test/files/scalacheck/redblacktree.scala4
-rw-r--r--test/files/speclib/instrumented.jar.desired.sha12
-rw-r--r--test/pending/run/reify_implicits-new.check (renamed from test/files/run/reify_implicits-new.check)0
-rw-r--r--test/pending/run/reify_implicits-new.scala (renamed from test/files/run/reify_implicits-new.scala)0
-rw-r--r--test/pending/run/reify_implicits-old.check (renamed from test/files/run/reify_implicits-old.check)0
-rw-r--r--test/pending/run/reify_implicits-old.scala (renamed from test/files/run/reify_implicits-old.scala)0
-rw-r--r--test/pending/run/reify_newimpl_09.scala (renamed from test/files/run/reify_newimpl_09.scala)0
-rw-r--r--test/pending/run/reify_newimpl_09a.scala13
-rw-r--r--test/pending/run/reify_newimpl_09b.scala14
-rw-r--r--test/pending/run/reify_newimpl_09c.scala20
-rw-r--r--test/pending/run/reify_newimpl_10.scala (renamed from test/files/run/reify_newimpl_10.scala)0
-rw-r--r--test/pending/run/reify_newimpl_16.scala (renamed from test/files/run/reify_newimpl_16.scala)0
-rw-r--r--test/pending/run/reify_newimpl_17.scala (renamed from test/files/run/reify_newimpl_17.scala)0
-rw-r--r--test/pending/run/reify_newimpl_28.scala (renamed from test/files/run/reify_newimpl_28.scala)0
-rw-r--r--test/pending/run/reify_newimpl_32.scala (renamed from test/files/run/reify_newimpl_32.scala)0
-rw-r--r--test/pending/run/reify_newimpl_34.scala (renamed from test/files/run/reify_newimpl_34.scala)0
-rw-r--r--test/scaladoc/resources/doc-root/Any.scala114
-rw-r--r--test/scaladoc/resources/doc-root/AnyRef.scala131
-rw-r--r--test/scaladoc/resources/doc-root/Nothing.scala23
-rw-r--r--test/scaladoc/resources/doc-root/Null.scala17
-rw-r--r--test/scaladoc/resources/implicits-ambiguating-res.scala72
-rw-r--r--test/scaladoc/resources/implicits-base-res.scala16
-rw-r--r--test/scaladoc/resources/implicits-elimination-res.scala6
-rw-r--r--test/scaladoc/run/SI-5373.scala4
-rw-r--r--test/scaladoc/run/SI-5780.check (renamed from test/scaladoc/run/implicits-elimination.check)0
-rw-r--r--test/scaladoc/run/SI-5780.scala25
-rw-r--r--test/scaladoc/run/diagrams-base.check1
-rw-r--r--test/scaladoc/run/diagrams-base.scala73
-rw-r--r--test/scaladoc/run/diagrams-determinism.check1
-rw-r--r--test/scaladoc/run/diagrams-determinism.scala67
-rw-r--r--test/scaladoc/run/diagrams-filtering.check1
-rw-r--r--test/scaladoc/run/diagrams-filtering.scala93
-rw-r--r--test/scaladoc/run/diagrams-inherited-nodes.check1
-rw-r--r--test/scaladoc/run/diagrams-inherited-nodes.scala69
-rw-r--r--test/scaladoc/run/implicits-ambiguating.check1
-rw-r--r--test/scaladoc/run/implicits-ambiguating.scala114
-rw-r--r--test/scaladoc/run/implicits-base.scala39
-rw-r--r--test/scaladoc/run/implicits-elimination.scala23
-rw-r--r--test/scaladoc/run/implicits-shadowing.scala35
-rw-r--r--test/scaladoc/run/implicits-var-exp.check1
-rw-r--r--test/scaladoc/run/implicits-var-exp.scala43
-rw-r--r--test/scaladoc/run/package-object.check3
-rw-r--r--test/scaladoc/run/package-object.scala3
-rw-r--r--test/scaladoc/scalacheck/CommentFactoryTest.scala6
-rwxr-xr-xtools/binary-repo-lib.sh20
-rwxr-xr-xtools/new-starr6
-rwxr-xr-xtools/scaladoc-compare50
474 files changed, 11185 insertions, 4058 deletions
diff --git a/build.xml b/build.xml
index 33ebbfe377..0e0bda934a 100644
--- a/build.xml
+++ b/build.xml
@@ -116,6 +116,12 @@ END-USER TARGETS
<antcall target="test.done"/>
</target>
+ <target name="replacestarrwin-opt"
+ description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
+ <antcall target="replacestarrwin">
+ <param name="scalac.args.optimise" value="-optimise"/>
+ </antcall>
+ </target>
<target name="replacelocker"
description="Replaces the Locker compiler and library by fresh ones built from current sources.">
<antcall target="palo.clean"/>
@@ -1144,7 +1150,7 @@ QUICK BUILD (QUICK)
</javac>
<scalacfork
destdir="${build-quick.dir}/classes/compiler"
- compilerpathref="starr.classpath"
+ compilerpathref="locker.classpath"
params="${scalac.args.all}"
srcdir="${src.dir}/msil"
jvmargs="${scalacfork.jvmargs}">
@@ -1401,6 +1407,7 @@ QUICK BUILD (QUICK)
<path refid="forkjoin.classpath"/>
<path refid="fjbg.classpath"/>
<path refid="aux.libs"/>
+ <path refid="asm.classpath"/>
<pathelement location="${jline.jar}"/>
</path>
<taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
@@ -1816,7 +1823,7 @@ BOOTSTRAPPING BUILD (STRAP)
</javac>
<scalacfork
destdir="${build-strap.dir}/classes/compiler"
- compilerpathref="starr.classpath"
+ compilerpathref="pack.classpath"
params="${scalac.args.all}"
srcdir="${src.dir}/msil"
jvmargs="${scalacfork.jvmargs}">
@@ -2064,6 +2071,9 @@ DOCUMENTATION
<!-- Compute the URL and show it -->
<property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
<echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
+
+ <!-- Unless set with -Dscaladoc.raw.output, it won't be activated -->
+ <property name="scaladoc.raw.output" value="no"/>
</target>
<target name="docs.pre-lib" depends="docs.start">
@@ -2091,7 +2101,7 @@ DOCUMENTATION
classpathref="pack.classpath"
addparams="${scalac.args.all}"
docRootContent="${src.dir}/library/rootdoc.txt"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<src>
<files includes="${src.dir}/actors-migration"/>
<files includes="${src.dir}/actors"/>
@@ -2175,7 +2185,7 @@ DOCUMENTATION
srcdir="${src.dir}/compiler"
docRootContent="${src.dir}/compiler/rootdoc.txt"
addparams="${scalac.args.all}"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/compiler.complete" verbose="no"/>
@@ -2197,7 +2207,7 @@ DOCUMENTATION
classpathref="pack.classpath"
srcdir="${src.dir}/jline/src/main/java"
addparams="${scalac.args.all}"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<include name="**/*.scala"/>
<include name="**/*.java"/>
</scaladoc>
@@ -2221,7 +2231,7 @@ DOCUMENTATION
classpathref="pack.classpath"
srcdir="${src.dir}/scalap"
addparams="${scalac.args.all}"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/scalap.complete" verbose="no"/>
@@ -2243,7 +2253,7 @@ DOCUMENTATION
classpathref="pack.classpath"
srcdir="${src.dir}/partest"
addparams="${scalac.args.all}"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/partest.complete" verbose="no"/>
@@ -2265,7 +2275,7 @@ DOCUMENTATION
classpathref="pack.classpath"
srcdir="${src.dir}/continuations/plugin"
addparams="${scalac.args.all}"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/continuations-plugin.complete" verbose="no"/>
@@ -2287,7 +2297,7 @@ DOCUMENTATION
classpathref="pack.classpath"
srcdir="${src.dir}/actors-migration"
addparams="${scalac.args.all}"
- implicits="on" diagrams="on">
+ implicits="on" diagrams="on" rawOutput="${scaladoc.raw.output}">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/actors-migration.complete" verbose="no"/>
@@ -2561,53 +2571,39 @@ STABLE REFERENCE (STARR)
<delete file="${basedir}/lib/scala-reflect.jar"/>
<delete file="${basedir}/lib/scala-compiler.jar"/>
<delete file="${basedir}/lib/scala-library-src.jar"/>
+ <delete file="${basedir}/lib/scala-reflect-src.jar"/>
+ <delete file="${basedir}/lib/scala-compiler-src.jar"/>
</target>
- <target name="starr.lib" depends="starr.start">
- <jar destfile="${basedir}/lib/scala-library.jar">
- <fileset dir="${basedir}/build/quick/classes/library"/>
- </jar>
- </target>
-
- <target name="starr.reflect" depends="starr.lib">
- <jar destfile="${basedir}/lib/scala-reflect.jar">
- <fileset dir="${basedir}/build/quick/classes/reflect"/>
- </jar>
- </target>
-
- <target name="starr.comp" depends="starr.reflect">
- <jar destfile="${basedir}/lib/scala-compiler.jar">
- <fileset dir="${basedir}/build/quick/classes/compiler"/>
- </jar>
+ <target name="starr.jars" depends="starr.start">
+ <copy toDir="${basedir}/lib/" overwrite="yes">
+ <fileset dir="${build-pack.dir}/lib">
+ <include name="scala-library.jar"/>
+ <include name="scala-reflect.jar"/>
+ <include name="scala-compiler.jar"/>
+ </fileset>
+ </copy>
</target>
- <target name="starr.src" depends="starr.comp">
+ <target name="starr.src" depends="starr.jars">
<jar destfile="${basedir}/lib/scala-library-src.jar">
<fileset dir="${basedir}/src/library"/>
<fileset dir="${basedir}/src/swing"/>
<fileset dir="${basedir}/src/actors"/>
+ <fileset dir="${basedir}/src/forkjoin"/>
+ </jar>
+ <jar destfile="${basedir}/lib/scala-reflect-src.jar">
+ <fileset dir="${basedir}/src/reflect"/>
+ </jar>
+ <jar destfile="${basedir}/lib/scala-compiler-src.jar">
+ <fileset dir="${basedir}/src/compiler"/>
+ <fileset dir="${basedir}/src/asm"/>
+ <fileset dir="${basedir}/src/fjbg"/>
+ <fileset dir="${basedir}/src/msil"/>
</jar>
</target>
- <target name="starr.libs" depends="starr.src, forkjoin.done, fjbg.done">
- <!-- TODO - Do we even *need* this in starr? -->
- <copy toDir="${lib.dir}" overwrite="yes">
- <fileset dir="${build-libs.dir}">
- <include name="fjbg.jar"/>
- <include name="forkjoin.jar"/>
- </fileset>
- </copy>
- <!-- remove SHA1 files for no starr, so we don't loose artifacts. -->
- <delete>
- <fileset dir="${lib.dir}">
- <include name="fjbg.jar.desired.sha1"/>
- <include name="msil.jar.desired.sha1"/>
- <include name="forkjoin.jar.desired.sha1"/>
- </fileset>
- </delete>
- </target>
-
- <target name="starr.removesha1" depends="starr.libs">
+ <target name="starr.removesha1" depends="starr.src">
<!-- remove SHA1 files for no starr, so we don't loose artifacts. -->
<delete>
<fileset dir="${lib.dir}">
@@ -2615,11 +2611,13 @@ STABLE REFERENCE (STARR)
<include name="scala-reflect.jar.desired.sha1"/>
<include name="scala-library.jar.desired.sha1"/>
<include name="scala-library-src.jar.desired.sha1"/>
+ <include name="scala-reflect-src.jar.desired.sha1"/>
+ <include name="scala-compiler-src.jar.desired.sha1"/>
</fileset>
</delete>
</target>
- <target name="starr.done" depends="starr.libs, starr.removesha1"/>
+ <target name="starr.done" depends="starr.jars, starr.removesha1"/>
<!-- ===========================================================================
FORWARDED TARGETS FOR PACKAGING
diff --git a/classpath.SAMPLE b/classpath.SAMPLE
deleted file mode 100644
index 9e607a41d9..0000000000
--- a/classpath.SAMPLE
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src/compiler"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/fjbg.jar"/>
- <classpathentry kind="lib" path="lib/forkjoin.jar"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="output" path="build/quick/classes/compiler"/>
-</classpath>
diff --git a/lib/ant/ant-contrib.jar.desired.sha1 b/lib/ant/ant-contrib.jar.desired.sha1
index 56745657c5..65bcd122bf 100644
--- a/lib/ant/ant-contrib.jar.desired.sha1
+++ b/lib/ant/ant-contrib.jar.desired.sha1
@@ -1 +1 @@
-943cd5c8802b2a3a64a010efb86ec19bac142e40 ?ant-contrib.jar
+943cd5c8802b2a3a64a010efb86ec19bac142e40 *ant-contrib.jar
diff --git a/lib/ant/ant-dotnet-1.0.jar.desired.sha1 b/lib/ant/ant-dotnet-1.0.jar.desired.sha1
index 1f1dc9b8c1..d8b6a1ca85 100644
--- a/lib/ant/ant-dotnet-1.0.jar.desired.sha1
+++ b/lib/ant/ant-dotnet-1.0.jar.desired.sha1
@@ -1 +1 @@
-3fc1e35ca8c991fc3488548f7a276bd9053c179d ?ant-dotnet-1.0.jar
+3fc1e35ca8c991fc3488548f7a276bd9053c179d *ant-dotnet-1.0.jar
diff --git a/lib/ant/ant.jar.desired.sha1 b/lib/ant/ant.jar.desired.sha1
index 852b3397cb..bcb610d6de 100644
--- a/lib/ant/ant.jar.desired.sha1
+++ b/lib/ant/ant.jar.desired.sha1
@@ -1 +1 @@
-7b456ca6b93900f96e58cc8371f03d90a9c1c8d1 ?ant.jar
+7b456ca6b93900f96e58cc8371f03d90a9c1c8d1 *ant.jar
diff --git a/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
index 06dcb1e312..53f87c3461 100644
--- a/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
+++ b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
@@ -1 +1 @@
-7e50e3e227d834695f1e0bf018a7326e06ee4c86 ?maven-ant-tasks-2.1.1.jar
+7e50e3e227d834695f1e0bf018a7326e06ee4c86 *maven-ant-tasks-2.1.1.jar
diff --git a/lib/ant/vizant.jar.desired.sha1 b/lib/ant/vizant.jar.desired.sha1
index 00ff17d159..998da4643a 100644
--- a/lib/ant/vizant.jar.desired.sha1
+++ b/lib/ant/vizant.jar.desired.sha1
@@ -1 +1 @@
-2c61d6e9a912b3253194d5d6d3e1db7e2545ac4b ?vizant.jar
+2c61d6e9a912b3253194d5d6d3e1db7e2545ac4b *vizant.jar
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
index d24a5d01fc..6f3ccc77bd 100644
--- a/lib/fjbg.jar.desired.sha1
+++ b/lib/fjbg.jar.desired.sha1
@@ -1 +1 @@
-c3f9b576c91cb9761932ad936ccc4a71f33d2ef2 ?fjbg.jar
+8acc87f222210b4a5eb2675477602fc1759e7684 *fjbg.jar
diff --git a/lib/forkjoin.jar.desired.sha1 b/lib/forkjoin.jar.desired.sha1
index d31539daf4..8b24962e2c 100644
--- a/lib/forkjoin.jar.desired.sha1
+++ b/lib/forkjoin.jar.desired.sha1
@@ -1 +1 @@
-b5baf94dff8d3ca991d44a59add7bcbf6640379b ?forkjoin.jar
+f93a2525b5616d3a4bee7848fabbb2856b56f653 *forkjoin.jar
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
index 8acb0c9b14..b0426130ac 100644
--- a/lib/jline.jar.desired.sha1
+++ b/lib/jline.jar.desired.sha1
@@ -1 +1 @@
-a5261e70728c1847639e2b47d953441d0b217bcb ?jline.jar
+a5261e70728c1847639e2b47d953441d0b217bcb *jline.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
index 2c2fe79dda..9396b273ab 100644
--- a/lib/msil.jar.desired.sha1
+++ b/lib/msil.jar.desired.sha1
@@ -1 +1 @@
-d48cb950ceded82a5e0ffae8ef2c68d0923ed00c ?msil.jar
+d48cb950ceded82a5e0ffae8ef2c68d0923ed00c *msil.jar
diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1
new file mode 100644
index 0000000000..48b3d3284e
--- /dev/null
+++ b/lib/scala-compiler-src.jar.desired.sha1
@@ -0,0 +1 @@
+259fd9f0a50ed6003248a01a366a97a5549aa386 ?scala-compiler-src.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index edd4647c4c..a8dbdb0a38 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-a6cc9171d15cc36297bf2a93d8bd0bcc7c601eaa ?scala-compiler.jar
+42f7367cc6ac59022d098e6091e5425390b9c925 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index 1ec21bc6ee..3379287733 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-7db4c7523f0e268ce58de2ab4ae6a3dd0e903f43 ?scala-library-src.jar
+e31e38414fd19c10add3e65bf77c2fd7c6c26f7d ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 8c5de75eeb..bef528ce26 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-81f424c3b2d8ba2c061d65003b90096e83efddf4 ?scala-library.jar
+2418c95bf7db34f87ebda4a5eaa918fe85047afb ?scala-library.jar
diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1
new file mode 100644
index 0000000000..b3a5f03efe
--- /dev/null
+++ b/lib/scala-reflect-src.jar.desired.sha1
@@ -0,0 +1 @@
+51c64d77ad4c4233a06cea7ea80b0fb77e9867c4 ?scala-reflect-src.jar
diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1
index fd3cf2aec2..4d913d73ab 100644
--- a/lib/scala-reflect.jar.desired.sha1
+++ b/lib/scala-reflect.jar.desired.sha1
@@ -1 +1 @@
-3749c7d17e1f85b27d962dac524db57584e78c45 ?scala-reflect.jar
+5656bf2f17bb9f22b3ba61a83393a9794eaa5429 ?scala-reflect.jar
diff --git a/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala b/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala
index 4775138e5a..e301dfc2a4 100644
--- a/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala
+++ b/src/compiler/scala/reflect/makro/runtime/ExprUtils.scala
@@ -29,7 +29,7 @@ trait ExprUtils {
def literal(x: Double) = Expr[Double](Literal(Constant(x)))(TypeTag.Double)
- def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag.String)
+ def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag[String](definitions.StringClass.asTypeConstructor))
def literal(x: Char) = Expr[Char](Literal(Constant(x)))(TypeTag.Char)
}
diff --git a/src/compiler/scala/reflect/makro/runtime/Mirrors.scala b/src/compiler/scala/reflect/makro/runtime/Mirrors.scala
index 79fa07fdb4..ec970ee696 100644
--- a/src/compiler/scala/reflect/makro/runtime/Mirrors.scala
+++ b/src/compiler/scala/reflect/makro/runtime/Mirrors.scala
@@ -24,11 +24,14 @@ trait Mirrors {
else NoSymbol
}
+ private lazy val libraryClasspathLoader: ClassLoader = {
+ val classpath = platform.classPath.asURLs
+ ScalaClassLoader.fromURLs(classpath)
+ }
+
private def isJavaClass(path: String): Boolean =
try {
- val classpath = platform.classPath.asURLs
- var classLoader = ScalaClassLoader.fromURLs(classpath)
- Class.forName(path, true, classLoader)
+ Class.forName(path, true, libraryClasspathLoader)
true
} catch {
case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) =>
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index b70c3f44a3..e09f13a052 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -11,20 +11,21 @@ abstract class Taggers {
import treeBuild._
val coreTags = Map(
- ByteClass.asType -> nme.Byte,
- ShortClass.asType -> nme.Short,
- CharClass.asType -> nme.Char,
- IntClass.asType -> nme.Int,
- LongClass.asType -> nme.Long,
- FloatClass.asType -> nme.Float,
- DoubleClass.asType -> nme.Double,
- BooleanClass.asType -> nme.Boolean,
- UnitClass.asType -> nme.Unit,
- AnyClass.asType -> nme.Any,
- ObjectClass.asType -> nme.Object,
- NothingClass.asType -> nme.Nothing,
- NullClass.asType -> nme.Null,
- StringClass.asType -> nme.String)
+ ByteTpe -> nme.Byte,
+ ShortTpe -> nme.Short,
+ CharTpe -> nme.Char,
+ IntTpe -> nme.Int,
+ LongTpe -> nme.Long,
+ FloatTpe -> nme.Float,
+ DoubleTpe -> nme.Double,
+ BooleanTpe -> nme.Boolean,
+ UnitTpe -> nme.Unit,
+ AnyTpe -> nme.Any,
+ AnyValTpe -> nme.AnyVal,
+ AnyRefTpe -> nme.AnyRef,
+ ObjectTpe -> nme.Object,
+ NothingTpe -> nme.Nothing,
+ NullTpe -> nme.Null)
def materializeClassTag(prefix: Tree, tpe: Type): Tree = {
val tagModule = ClassTagModule
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 3a98d308a7..9b0777580b 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -23,10 +23,8 @@ trait GenSymbols {
def symtab: SymbolTable = state.symtab
/** Reify a reference to a symbol */
- def reifySymRef(sym0: Symbol): Tree = {
- assert(sym0 != null, "sym is null")
- val sym = sym0.dealias
-
+ def reifySymRef(sym: Symbol): Tree = {
+ assert(sym != null, "sym is null")
if (sym == NoSymbol)
mirrorSelect(nme.NoSymbol)
else if (sym.isRootPackage)
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index b97bf6b0cd..f48df8df65 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -161,11 +161,9 @@ trait GenTrees {
if (tree.symbol.isLocalToReifee || tree.tpe.isLocalToReifee)
reifyProduct(tree)
else {
- val sym0 = tree.symbol
- val sym = sym0.dealias
- val tpe0 = tree.tpe
- val tpe = tpe0.dealias
- if (reifyDebug) println("reifying bound type %s (underlying type is %s, dealiased is %s)".format(sym0, tpe0, tpe))
+ val sym = tree.symbol
+ val tpe = tree.tpe
+ if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe))
if (tpe.isSpliceable) {
val spliced = spliceType(tpe)
@@ -187,7 +185,7 @@ trait GenTrees {
if (reifyDebug) println("tpe is locatable: reify as Ident(%s)".format(sym))
mirrorBuildCall(nme.Ident, reify(sym))
} else {
- if (reifyDebug) println("tpe is an alias, but not a locatable: reify as TypeTree(%s)".format(tpe))
+ if (reifyDebug) println("tpe is not locatable: reify as TypeTree(%s)".format(tpe))
mirrorBuildCall(nme.TypeTree, reify(tpe))
}
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index f4e2200edc..82951a2434 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -11,9 +11,8 @@ trait GenTypes {
* Reify a type.
* For internal use only, use ``reified'' instead.
*/
- def reifyType(tpe0: Type): Tree = {
- assert(tpe0 != null, "tpe is null")
- val tpe = tpe0.dealias
+ def reifyType(tpe: Type): Tree = {
+ assert(tpe != null, "tpe is null")
if (tpe.isErroneous)
CannotReifyErroneousReifee(tpe)
@@ -29,9 +28,9 @@ trait GenTypes {
if (spliced != EmptyTree)
return spliced
- val tsym = tpe.typeSymbol
+ val tsym = tpe.typeSymbolDirect
if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
- Select(Select(reify(tpe.typeSymbol), nme.asTypeSymbol), nme.asTypeConstructor)
+ Select(Select(reify(tsym), nme.asTypeSymbol), nme.asTypeConstructor)
else tpe match {
case tpe @ NoType =>
reifyMirrorObject(tpe)
@@ -107,13 +106,11 @@ trait GenTypes {
}
private def spliceAsManifest(tpe: Type): Tree = {
- val ManifestClass = rootMirror.staticClass("scala.reflect.Manifest")
- val ManifestModule = rootMirror.staticModule("scala.reflect.Manifest")
- def isSynthetic(manifest: Tree) = manifest exists (sub => sub.symbol != null && (sub.symbol == ManifestModule || sub.symbol.owner == ManifestModule))
+ def isSynthetic(manifest: Tree) = manifest exists (sub => sub.symbol != null && (sub.symbol == FullManifestModule || sub.symbol.owner == FullManifestModule))
def searchForManifest(typer: analyzer.Typer): Tree =
analyzer.inferImplicit(
EmptyTree,
- appliedType(ManifestClass.asTypeConstructor, List(tpe)),
+ appliedType(FullManifestClass.asTypeConstructor, List(tpe)),
reportAmbiguous = false,
isView = false,
context = typer.context,
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index a34692f5e0..e70716885e 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -99,7 +99,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "msil")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7", "msil")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index daa08ef8a7..2cada92c1e 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -150,6 +150,9 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the scaladoc tool to use the binary given to create diagrams */
private var docDiagramsDotPath: Option[String] = None
+ /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */
+ private var docRawOutput: Boolean = false
+
/*============================================================================*\
** Properties setters **
@@ -419,6 +422,11 @@ class Scaladoc extends ScalaMatchingTask {
def setDiagramsDotPath(input: String) =
docDiagramsDotPath = Some(input)
+ /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setRawOutput(input: String) =
+ docRawOutput = Flag.getBooleanValue(input, "rawOutput")
+
/*============================================================================*\
** Properties getters **
\*============================================================================*/
@@ -616,6 +624,7 @@ class Scaladoc extends ScalaMatchingTask {
docSettings.docImplicitsShowAll.value = docImplicitsShowAll
docSettings.docDiagrams.value = docDiagrams
docSettings.docDiagramsDebug.value = docDiagramsDebug
+ docSettings.docRawOutput.value = docRawOutput
if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 35bf2dd288..e378d71944 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -319,7 +319,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def ccon = Class.forName(name).getConstructor(classOf[CharsetDecoder], classOf[Reporter])
try Some(ccon.newInstance(charset.newDecoder(), reporter).asInstanceOf[SourceReader])
- catch { case x =>
+ catch { case ex: Throwable =>
globalError("exception while trying to instantiate source reader '" + name + "'")
None
}
@@ -1546,7 +1546,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
try compileUnitsInternal(units, fromPhase)
- catch { case ex =>
+ catch { case ex: Throwable =>
val shown = if (settings.verbose.value) {
val pw = new java.io.PrintWriter(new java.io.StringWriter)
ex.printStackTrace(pw)
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index 5a4b4172c6..c6fdb4b891 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -20,7 +20,8 @@ class ScalaDoc {
def process(args: Array[String]): Boolean = {
var reporter: ConsoleReporter = null
- val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"))
+ val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"),
+ msg => reporter.printMessage(msg))
reporter = new ConsoleReporter(docSettings) {
// need to do this so that the Global instance doesn't trash all the
// symbols just because there was an error
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 3a527676b4..be5909a67f 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -213,17 +213,14 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
- def paramToArg(vparam: Symbol) = {
- val arg = Ident(vparam)
- if (isRepeatedParamType(vparam.tpe)) wildcardStar(arg)
- else arg
- }
+ def paramToArg(vparam: Symbol): Tree =
+ paramToArg(Ident(vparam), isRepeatedParamType(vparam.tpe))
- def paramToArg(vparam: ValDef) = {
- val arg = Ident(vparam.name)
- if (treeInfo.isRepeatedParamType(vparam.tpt)) wildcardStar(arg)
- else arg
- }
+ def paramToArg(vparam: ValDef): Tree =
+ paramToArg(Ident(vparam.name), treeInfo.isRepeatedParamType(vparam.tpt))
+
+ def paramToArg(arg: Ident, isRepeatedParam: Boolean): Tree =
+ if (isRepeatedParam) wildcardStar(arg) else arg
/** Make forwarder to method `target`, passing all parameters in `params` */
def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) =
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 62885cc73d..80b0f640a4 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -44,7 +44,7 @@ trait JavaPlatform extends Platform {
else List(dependencyAnalysis)
private def classEmitPhase =
- if (settings.target.value == "jvm-1.5") genJVM
+ if (settings.target.value == "jvm-1.5-fjbg") genJVM
else genASM
def platformPhases = List(
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index f7541a4739..b638745327 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -860,7 +860,7 @@ abstract class GenICode extends SubComponent {
if (sym.isLabel) { // jump to a label
val label = ctx.labels.getOrElse(sym, {
// it is a forward jump, scan for labels
- scanForLabels(ctx.defdef, ctx)
+ resolveForwardLabel(ctx.defdef, ctx, sym)
ctx.labels.get(sym) match {
case Some(l) =>
log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
@@ -1406,21 +1406,17 @@ abstract class GenICode extends SubComponent {
def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
/**
- * Traverse the tree and store label stubs in the context. This is
- * necessary to handle forward jumps, because at a label application
- * with arguments, the symbols of the corresponding LabelDef parameters
- * are not yet known.
+ * Find the label denoted by `lsym` and enter it in context `ctx`.
*
- * Since it is expensive to traverse each method twice, this method is called
- * only when forward jumps really happen, and then it re-traverses the whole
- * method, scanning for LabelDefs.
+ * We only enter one symbol at a time, even though we might traverse the same
+ * tree more than once per method. That's because we cannot enter labels that
+ * might be duplicated (for instance, inside finally blocks).
*
* TODO: restrict the scanning to smaller subtrees than the whole method.
* It is sufficient to scan the trees of the innermost enclosing block.
*/
- //
- private def scanForLabels(tree: Tree, ctx: Context): Unit = tree foreachPartial {
- case t @ LabelDef(_, params, rhs) =>
+ private def resolveForwardLabel(tree: Tree, ctx: Context, lsym: Symbol): Unit = tree foreachPartial {
+ case t @ LabelDef(_, params, rhs) if t.symbol == lsym =>
ctx.labels.getOrElseUpdate(t.symbol, {
val locals = params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false))
ctx.method addLocals locals
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 5f495c8456..13457bfe58 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -15,37 +15,48 @@ import scala.reflect.internal.util.{Position,NoPosition}
/*
A pattern match
- case THIS(clasz) =>
- case STORE_THIS(kind) =>
- case CONSTANT(const) =>
- case LOAD_ARRAY_ITEM(kind) =>
- case LOAD_LOCAL(local) =>
- case LOAD_FIELD(field, isStatic) =>
- case LOAD_MODULE(module) =>
- case STORE_ARRAY_ITEM(kind) =>
- case STORE_LOCAL(local) =>
- case STORE_FIELD(field, isStatic) =>
- case CALL_PRIMITIVE(primitive) =>
- case CALL_METHOD(method, style) =>
- case NEW(kind) =>
- case CREATE_ARRAY(elem, dims) =>
- case IS_INSTANCE(tpe) =>
- case CHECK_CAST(tpe) =>
- case SWITCH(tags, labels) =>
- case JUMP(whereto) =>
- case CJUMP(success, failure, cond, kind) =>
- case CZJUMP(success, failure, cond, kind) =>
- case RETURN(kind) =>
- case THROW(clasz) =>
- case DROP(kind) =>
- case DUP(kind) =>
- case MONITOR_ENTER() =>
- case MONITOR_EXIT() =>
- case BOX(boxType) =>
- case UNBOX(tpe) =>
- case SCOPE_ENTER(lv) =>
- case SCOPE_EXIT(lv) =>
- case LOAD_EXCEPTION(clasz) =>
+ // locals
+ case THIS(clasz) =>
+ case STORE_THIS(kind) =>
+ case LOAD_LOCAL(local) =>
+ case STORE_LOCAL(local) =>
+ case SCOPE_ENTER(lv) =>
+ case SCOPE_EXIT(lv) =>
+ // stack
+ case LOAD_MODULE(module) =>
+ case LOAD_EXCEPTION(clasz) =>
+ case DROP(kind) =>
+ case DUP(kind) =>
+ // constants
+ case CONSTANT(const) =>
+ // arithlogic
+ case CALL_PRIMITIVE(primitive) =>
+ // casts
+ case IS_INSTANCE(tpe) =>
+ case CHECK_CAST(tpe) =>
+ // objs
+ case NEW(kind) =>
+ case MONITOR_ENTER() =>
+ case MONITOR_EXIT() =>
+ case BOX(boxType) =>
+ case UNBOX(tpe) =>
+ // flds
+ case LOAD_FIELD(field, isStatic) =>
+ case STORE_FIELD(field, isStatic) =>
+ // mthds
+ case CALL_METHOD(method, style) =>
+ // arrays
+ case LOAD_ARRAY_ITEM(kind) =>
+ case STORE_ARRAY_ITEM(kind) =>
+ case CREATE_ARRAY(elem, dims) =>
+ // jumps
+ case SWITCH(tags, labels) =>
+ case JUMP(whereto) =>
+ case CJUMP(success, failure, cond, kind) =>
+ case CZJUMP(success, failure, cond, kind) =>
+ // ret
+ case RETURN(kind) =>
+ case THROW(clasz) =>
*/
@@ -58,11 +69,26 @@ import scala.reflect.internal.util.{Position,NoPosition}
trait Opcodes { self: ICodes =>
import global.{Symbol, NoSymbol, Type, Name, Constant};
+ // categories of ICode instructions
+ final val localsCat = 1
+ final val stackCat = 2
+ final val constCat = 3
+ final val arilogCat = 4
+ final val castsCat = 5
+ final val objsCat = 6
+ final val fldsCat = 7
+ final val mthdsCat = 8
+ final val arraysCat = 9
+ final val jumpsCat = 10
+ final val retCat = 11
+
/** This class represents an instruction of the intermediate code.
* Each case subclass will represent a specific operation.
*/
abstract class Instruction extends Cloneable {
+ def category: Int = 0 // undefined
+
/** This abstract method returns the number of used elements on the stack */
def consumed : Int = 0
@@ -118,6 +144,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(REFERENCE(clasz))
+
+ override def category = localsCat
}
/** Loads a constant on the stack.
@@ -130,6 +158,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(toTypeKind(constant.tpe))
+
+ override def category = constCat
}
/** Loads an element of an array. The array and the index should
@@ -143,6 +173,8 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = List(ARRAY(kind), INT)
override def producedTypes = List(kind)
+
+ override def category = arraysCat
}
/** Load a local variable on the stack. It can be a method argument.
@@ -154,6 +186,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(local.kind)
+
+ override def category = localsCat
}
/** Load a field on the stack. The object to which it refers should be
@@ -176,6 +210,8 @@ trait Opcodes { self: ICodes =>
// see #4283
var hostClass: Symbol = field.owner
def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
+
+ override def category = fldsCat
}
case class LOAD_MODULE(module: Symbol) extends Instruction {
@@ -187,6 +223,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(REFERENCE(module))
+
+ override def category = stackCat
}
/** Store a value into an array at a specified index.
@@ -198,6 +236,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override def consumedTypes = List(ARRAY(kind), INT, kind)
+
+ override def category = arraysCat
}
/** Store a value into a local variable. It can be an argument.
@@ -209,6 +249,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override def consumedTypes = List(local.kind)
+
+ override def category = localsCat
}
/** Store a value into a field.
@@ -228,6 +270,8 @@ trait Opcodes { self: ICodes =>
List(toTypeKind(field.tpe))
else
List(REFERENCE(field.owner), toTypeKind(field.tpe));
+
+ override def category = fldsCat
}
/** Store a value into the 'this' pointer.
@@ -238,6 +282,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
override def consumedTypes = List(kind)
+ override def category = localsCat
}
/** Call a primitive function.
@@ -292,6 +337,8 @@ trait Opcodes { self: ICodes =>
case StartConcat => List(ConcatClass)
case EndConcat => List(REFERENCE(global.definitions.StringClass))
}
+
+ override def category = arilogCat
}
/** This class represents a CALL_METHOD instruction
@@ -347,6 +394,8 @@ trait Opcodes { self: ICodes =>
* being able to store such instructions into maps, when more
* than one CALL_METHOD to the same method might exist.
*/
+
+ override def category = mthdsCat
}
case class BOX(boxType: TypeKind) extends Instruction {
@@ -355,6 +404,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = boxType :: Nil
override def produced = 1
+ override def category = objsCat
}
case class UNBOX(boxType: TypeKind) extends Instruction {
@@ -363,6 +413,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil
override def produced = 1
+ override def category = objsCat
}
/** Create a new instance of a class through the specified constructor
@@ -378,6 +429,8 @@ trait Opcodes { self: ICodes =>
/** The corresponding constructor call. */
var init: CALL_METHOD = _
+
+ override def category = objsCat
}
@@ -392,6 +445,8 @@ trait Opcodes { self: ICodes =>
override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
override def produced = 1;
+
+ override def category = arraysCat
}
/** This class represents a IS_INSTANCE instruction
@@ -405,6 +460,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil
override def produced = 1
+
+ override def category = castsCat
}
/** This class represents a CHECK_CAST instruction
@@ -419,6 +476,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override val consumedTypes = List(ObjectReference)
override def producedTypes = List(typ)
+
+ override def category = castsCat
}
/** This class represents a SWITCH instruction
@@ -439,6 +498,8 @@ trait Opcodes { self: ICodes =>
override val consumedTypes = List(INT)
def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
+
+ override def category = jumpsCat
}
/** This class represents a JUMP instruction
@@ -451,6 +512,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 0
+
+ override def category = jumpsCat
}
/** This class represents a CJUMP instruction
@@ -474,6 +537,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override val consumedTypes = List(kind, kind)
+
+ override def category = jumpsCat
}
/** This class represents a CZJUMP instruction
@@ -495,6 +560,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
override val consumedTypes = List(kind)
+
+ override def category = jumpsCat
}
@@ -507,6 +574,8 @@ trait Opcodes { self: ICodes =>
override def produced = 0
// TODO override val consumedTypes = List(kind)
+
+ override def category = retCat
}
/** This class represents a THROW instruction
@@ -522,6 +591,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def category = retCat
}
/** This class represents a DROP instruction
@@ -534,6 +605,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def category = stackCat
}
/** This class represents a DUP instruction
@@ -543,6 +616,7 @@ trait Opcodes { self: ICodes =>
case class DUP (typ: TypeKind) extends Instruction {
override def consumed = 1
override def produced = 2
+ override def category = stackCat
}
/** This class represents a MONITOR_ENTER instruction
@@ -555,6 +629,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def category = objsCat
}
/** This class represents a MONITOR_EXIT instruction
@@ -567,6 +643,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1;
override def produced = 0;
+
+ override def category = objsCat
}
/** A local variable becomes visible at this point in code.
@@ -577,6 +655,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = "SCOPE_ENTER " + lv
override def consumed = 0
override def produced = 0
+ override def category = localsCat
}
/** A local variable leaves its scope at this point in code.
@@ -587,6 +666,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = "SCOPE_EXIT " + lv
override def consumed = 0
override def produced = 0
+ override def category = localsCat
}
/** Fake instruction. It designates the VM who pushes an exception
@@ -598,6 +678,7 @@ trait Opcodes { self: ICodes =>
override def consumed = sys.error("LOAD_EXCEPTION does clean the whole stack, no idea how many things it consumes!")
override def produced = 1
override def producedTypes = REFERENCE(clasz) :: Nil
+ override def category = stackCat
}
/** This class represents a method invocation style. */
@@ -658,6 +739,8 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def producedTypes = List(msil_mgdptr(local.kind))
+
+ override def category = localsCat
}
case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
@@ -670,6 +753,8 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
override def producedTypes = List(msil_mgdptr(REFERENCE(field.owner)));
+
+ override def category = fldsCat
}
case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
@@ -681,6 +766,8 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = List(ARRAY(kind), INT)
override def producedTypes = List(msil_mgdptr(kind))
+
+ override def category = arraysCat
}
case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
@@ -689,6 +776,7 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = ObjectReference :: Nil // actually consumes a 'boxed valueType'
override def produced = 1
override def producedTypes = List(msil_mgdptr(valueType))
+ override def category = objsCat
}
case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
@@ -696,6 +784,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = ObjectReference :: Nil // actually consumes a managed pointer
override def produced = 0
+ override def category = objsCat
}
case class CIL_NEWOBJ(method: Symbol) extends Instruction {
@@ -705,6 +794,7 @@ trait Opcodes { self: ICodes =>
override def consumedTypes = method.tpe.paramTypes map toTypeKind
override def produced = 1
override def producedTypes = List(toTypeKind(method.tpe.resultType))
+ override def category = objsCat
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index 290979d205..663b626bef 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -38,19 +38,21 @@ trait Repository {
}
/** Load bytecode for given symbol. */
- def load(sym: Symbol) {
+ def load(sym: Symbol): Boolean = {
try {
val (c1, c2) = icodeReader.readClass(sym)
- assert(c1.symbol == sym || c2.symbol == sym,
- "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
+ assert(c1.symbol == sym || c2.symbol == sym, "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
loaded += (c1.symbol -> c1)
loaded += (c2.symbol -> c2)
+
+ true
} catch {
case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
- if (settings.debug.value)
- e.printStackTrace
+ if (settings.debug.value) { e.printStackTrace }
+
+ false
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index d31eafff48..7a5615ac26 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -174,11 +174,8 @@ abstract class TypeFlowAnalysis {
}
i match {
- case THIS(clasz) =>
- stack push toTypeKind(clasz.tpe)
-
- case CONSTANT(const) =>
- stack push toTypeKind(const.tpe)
+ case THIS(clasz) => stack push toTypeKind(clasz.tpe)
+ case CONSTANT(const) => stack push toTypeKind(const.tpe)
case LOAD_ARRAY_ITEM(kind) =>
stack.pop2 match {
@@ -194,139 +191,73 @@ abstract class TypeFlowAnalysis {
stack push (if (t == typeLattice.bottom) local.kind else t)
case LOAD_FIELD(field, isStatic) =>
- if (!isStatic)
- stack.pop
+ if (!isStatic) { stack.pop }
stack push toTypeKind(field.tpe)
- case LOAD_MODULE(module) =>
- stack push toTypeKind(module.tpe)
-
- case STORE_ARRAY_ITEM(kind) =>
- stack.pop3
-
- case STORE_LOCAL(local) =>
- val t = stack.pop
- bindings += (local -> t)
-
- case STORE_THIS(_) =>
- stack.pop
+ case LOAD_MODULE(module) => stack push toTypeKind(module.tpe)
+ case STORE_ARRAY_ITEM(kind) => stack.pop3
+ case STORE_LOCAL(local) => val t = stack.pop; bindings += (local -> t)
+ case STORE_THIS(_) => stack.pop
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- stack.pop
- else
- stack.pop2
+ case STORE_FIELD(field, isStatic) => if (isStatic) stack.pop else stack.pop2
case CALL_PRIMITIVE(primitive) =>
primitive match {
- case Negation(kind) =>
- stack.pop; stack.push(kind)
+ case Negation(kind) => stack.pop; stack.push(kind)
+
case Test(_, kind, zero) =>
stack.pop
- if (!zero) stack.pop
+ if (!zero) { stack.pop }
stack push BOOL;
- case Comparison(_, _) =>
- stack.pop2
- stack push INT
+
+ case Comparison(_, _) => stack.pop2; stack push INT
case Arithmetic(op, kind) =>
stack.pop
- if (op != NOT)
- stack.pop
+ if (op != NOT) { stack.pop }
val k = kind match {
case BYTE | SHORT | CHAR => INT
case _ => kind
}
stack push k
- case Logical(op, kind) =>
- stack.pop2
- stack push kind
-
- case Shift(op, kind) =>
- stack.pop2
- stack push kind
-
- case Conversion(src, dst) =>
- stack.pop
- stack push dst
-
- case ArrayLength(kind) =>
- stack.pop
- stack push INT
-
- case StartConcat =>
- stack.push(ConcatClass)
-
- case EndConcat =>
- stack.pop
- stack.push(STRING)
-
- case StringConcat(el) =>
- stack.pop2
- stack push ConcatClass
+ case Logical(op, kind) => stack.pop2; stack push kind
+ case Shift(op, kind) => stack.pop2; stack push kind
+ case Conversion(src, dst) => stack.pop; stack push dst
+ case ArrayLength(kind) => stack.pop; stack push INT
+ case StartConcat => stack.push(ConcatClass)
+ case EndConcat => stack.pop; stack.push(STRING)
+ case StringConcat(el) => stack.pop2; stack push ConcatClass
}
case cm @ CALL_METHOD(_, _) =>
stack pop cm.consumed
cm.producedTypes foreach (stack push _)
- case BOX(kind) =>
- stack.pop
- stack.push(BOXED(kind))
-
- case UNBOX(kind) =>
- stack.pop
- stack.push(kind)
-
- case NEW(kind) =>
- stack.push(kind)
-
- case CREATE_ARRAY(elem, dims) =>
- stack.pop(dims)
- stack.push(ARRAY(elem))
-
- case IS_INSTANCE(tpe) =>
- stack.pop
- stack.push(BOOL)
-
- case CHECK_CAST(tpe) =>
- stack.pop
- stack.push(tpe)
+ case BOX(kind) => stack.pop; stack.push(BOXED(kind))
+ case UNBOX(kind) => stack.pop; stack.push(kind)
- case SWITCH(tags, labels) =>
- stack.pop
+ case NEW(kind) => stack.push(kind)
- case JUMP(whereto) =>
- ()
+ case CREATE_ARRAY(elem, dims) => stack.pop(dims); stack.push(ARRAY(elem))
- case CJUMP(success, failure, cond, kind) =>
- stack.pop2
+ case IS_INSTANCE(tpe) => stack.pop; stack.push(BOOL)
+ case CHECK_CAST(tpe) => stack.pop; stack.push(tpe)
- case CZJUMP(success, failure, cond, kind) =>
- stack.pop
+ case _: SWITCH => stack.pop
+ case _: JUMP => ()
+ case _: CJUMP => stack.pop2
+ case _: CZJUMP => stack.pop
- case RETURN(kind) =>
- if (kind != UNIT)
- stack.pop;
+ case RETURN(kind) => if (kind != UNIT) { stack.pop }
+ case THROW(_) => stack.pop
- case THROW(_) =>
- stack.pop
+ case DROP(kind) => stack.pop
+ case DUP(kind) => stack.push(stack.head)
- case DROP(kind) =>
- stack.pop
+ case MONITOR_ENTER() | MONITOR_EXIT() => stack.pop
- case DUP(kind) =>
- stack.push(stack.head)
-
- case MONITOR_ENTER() =>
- stack.pop
-
- case MONITOR_EXIT() =>
- stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
+ case SCOPE_ENTER(_) | SCOPE_EXIT(_) => ()
case LOAD_EXCEPTION(clasz) =>
stack.pop(stack.length)
@@ -551,14 +482,24 @@ abstract class TypeFlowAnalysis {
val relevantBBs = mutable.Set.empty[BasicBlock]
+ /*
+ * Rationale to prevent some methods from ever being inlined:
+ *
+ * (1) inlining getters and setters results in exposing a private field,
+ * which may itself prevent inlining of the caller (at best) or
+ * lead to situations like SI-5442 ("IllegalAccessError when mixing optimized and unoptimized bytecode")
+ *
+ * (2) only invocations having a receiver object are considered (ie no static-methods are ever inlined).
+ * This is taken care of by checking `isDynamic` (ie virtual method dispatch) and `Static(true)` (ie calls to private members)
+ */
private def isPreCandidate(cm: opcodes.CALL_METHOD): Boolean = {
val msym = cm.method
val style = cm.style
- // Dynamic == normal invocations
- // Static(true) == calls to private members
- !msym.isConstructor && !blackballed(msym) &&
- (style.isDynamic || (style.hasInstance && style.isStatic))
- // && !(msym hasAnnotation definitions.ScalaNoInlineClass)
+
+ !blackballed(msym) &&
+ !msym.isConstructor &&
+ (!msym.isAccessor || inliner.isClosureClass(msym.owner)) &&
+ (style.isDynamic || (style.hasInstance && style.isStatic))
}
override def init(m: icodes.IMethod) {
@@ -575,7 +516,7 @@ abstract class TypeFlowAnalysis {
* but the effect on method size could be explored. */
putOnRadar(m.linearizedBlocks(linearizer))
populatePerimeter()
- assert(relevantBBs.isEmpty || relevantBBs.contains(m.startBlock), "you gave me dead code")
+ // usually but not always true (counterexample in SI-6015) `(relevantBBs.isEmpty || relevantBBs.contains(m.startBlock))`
}
def conclusives(b: BasicBlock): List[opcodes.CALL_METHOD] = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 0c527fbaf4..8d243a1dd0 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -295,6 +295,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isHidden) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
@@ -458,6 +459,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val CLASS_CONSTRUCTOR_NAME = "<clinit>"
val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
// -----------------------------------------------------------------------------------------
// factory methods
// -----------------------------------------------------------------------------------------
@@ -644,20 +649,102 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
} // end of class JBuilder
/** functionality for building plain and mirror classes */
abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ def debugLevel = settings.debuginfo.indexOfChoice
+
+ val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
+
// -----------------------------------------------------------------------------------------
// more constants
// -----------------------------------------------------------------------------------------
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
-
val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
@@ -763,7 +850,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// without it. This is particularly bad because the availability of
// generic information could disappear as a consequence of a seemingly
// unrelated change.
- sym.isSynthetic
+ sym.isHidden
|| sym.isLiftedMethod
|| sym.isBridge
|| (sym.ownerChain exists (_.isImplClass))
@@ -969,86 +1056,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners: List[Symbol] = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
-
- // entries ready to be serialized into the classfile, used to detect duplicates.
- val entries = mutable.Map.empty[String, String]
-
- // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
- if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val jname = javaName(innerSym) // never null
- val oname = outerName(innerSym) // null when method-enclosed
- val iname = innerName(innerSym) // null for anonymous inner class
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- assert(jname != null, "javaName is broken.") // documentation
- val doAdd = entries.get(jname) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == oname, "duplicate")
- false
- case None => true
- }
-
- if(doAdd) {
- entries += (jname -> oname)
- jclass.visitInnerClass(jname, oname, iname, flags)
- }
-
- /*
- * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
- * If a class file has a version number that is greater than or equal to 51.0, and
- * has an InnerClasses attribute in its attributes table, then for all entries in the
- * classes array of the InnerClasses attribute, the value of the
- * outer_class_info_index item must be zero if the value of the
- * inner_name_index item is zero.
- */
-
- }
- }
- }
-
/** Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -1316,7 +1323,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// Additional interface parents based on annotations and other cues
def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(JavaCloneableClass)
+ case CloneableAttr => Some(CloneableClass)
case RemoteAttr => Some(RemoteInterfaceClass)
case _ => None
}
@@ -1381,8 +1388,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// typestate: entering mode with valid call sequences:
// [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
- jclass.visitSource(c.cunit.source.toString,
- null /* SourceDebugExtension */)
+ if(emitSource) {
+ jclass.visitSource(c.cunit.source.toString,
+ null /* SourceDebugExtension */)
+ }
val enclM = getEnclosingMethodAttribute()
if(enclM != null) {
@@ -1510,12 +1519,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
jfield.visitEnd()
}
- def debugLevel = settings.debuginfo.indexOfChoice
-
- // val emitSource = debugLevel >= 1
- val emitLines = debugLevel >= 2
- val emitVars = debugLevel >= 3
-
var method: IMethod = _
var jmethod: asm.MethodVisitor = _
var jMethodName: String = _
@@ -1894,6 +1897,15 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
i += 1
}
+ // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011)
+ i = 1
+ while (i < keys.length) {
+ if(keys(i-1) == keys(i)) {
+ abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.")
+ }
+ i += 1
+ }
+
val keyMin = keys(0)
val keyMax = keys(keys.length - 1)
@@ -2180,9 +2192,15 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val st = pending.getOrElseUpdate(lv, mutable.Stack.empty[Label])
st.push(start)
}
- def popScope(lv: Local, end: Label) {
- val start = pending(lv).pop()
- seen ::= LocVarEntry(lv, start, end)
+ def popScope(lv: Local, end: Label, iPos: Position) {
+ pending.get(lv) match {
+ case Some(st) if st.nonEmpty =>
+ val start = st.pop()
+ seen ::= LocVarEntry(lv, start, end)
+ case _ =>
+ // TODO SI-6049
+ getCurrentCUnit().warning(iPos, "Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6049")
+ }
}
def getMerged(): collection.Map[Local, List[Interval]] = {
@@ -2366,251 +2384,269 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- instr match {
- case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
-
- case CONSTANT(const) => genConstant(jmethod, const)
-
- case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
-
- case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
-
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
-
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
- jmethod.visitVarInsn(Opcodes.ALOAD, 0)
- } else {
- jmethod.visitFieldInsn(
- Opcodes.GETSTATIC,
- javaName(module) /* + "$" */ ,
- strMODULE_INSTANCE_FIELD,
- descriptor(module)
- )
- }
+ (instr.category: @scala.annotation.switch) match {
+
+ case icodes.localsCat => (instr: @unchecked) match {
+ case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
+ case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+
+ case SCOPE_ENTER(lv) =>
+ // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if(relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val start = new asm.Label
+ jmethod.visitLabel(start)
+ scoping.pushScope(lv, start)
+ }
+
+ case SCOPE_EXIT(lv) =>
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if(relevant) {
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val end = new asm.Label
+ jmethod.visitLabel(end)
+ scoping.popScope(lv, end, instr.pos)
+ }
+ }
- case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+ case icodes.stackCat => (instr: @unchecked) match {
- case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ } else {
+ jmethod.visitFieldInsn(
+ Opcodes.GETSTATIC,
+ javaName(module) /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ descriptor(module)
+ )
+ }
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+ case DROP(kind) => emit(if(kind.isWideType) Opcodes.POP2 else Opcodes.POP)
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+ case DUP(kind) => emit(if(kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
- case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
+ case LOAD_EXCEPTION(_) => ()
+ }
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getInternalName
- jcode.invokevirtual(target, "clone", mdesc_arrayClone)
+ case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
+ case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
- case BOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
+ case icodes.castsCat => (instr: @unchecked) match {
- case UNBOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
+ case IS_INSTANCE(tpe) =>
+ val jtyp: asm.Type =
+ tpe match {
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jmethod.visitTypeInsn(Opcodes.NEW, className)
+ case CHECK_CAST(tpe) =>
+ tpe match {
- case CREATE_ARRAY(elem, 1) => jcode newarray elem
+ case REFERENCE(cls) =>
+ if (cls != ObjectClass) { // No need to checkcast for Objects
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
+ }
- case CREATE_ARRAY(elem, dims) =>
- jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+ case ARRAY(elem) =>
+ val iname = javaArrayType(javaType(elem)).getInternalName
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
- case IS_INSTANCE(tpe) =>
- val jtyp: asm.Type =
- tpe match {
- case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
- case ARRAY(elem) => javaArrayType(javaType(elem))
case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
}
- jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
- case CHECK_CAST(tpe) =>
- tpe match {
+ }
- case REFERENCE(cls) =>
- if (cls != ObjectClass) { // No need to checkcast for Objects
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
- }
+ case icodes.objsCat => (instr: @unchecked) match {
- case ARRAY(elem) =>
- val iname = javaArrayType(javaType(elem)).getInternalName
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
+ case BOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
+ case UNBOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
- case sw @ SWITCH(tagss, branches) =>
- assert(branches.length == tagss.length + 1, sw)
- val flatSize = sw.flatTagsCount
- val flatKeys = new Array[Int](flatSize)
- val flatBranches = new Array[asm.Label](flatSize)
-
- var restTagss = tagss
- var restBranches = branches
- var k = 0 // ranges over flatKeys and flatBranches
- while(restTagss.nonEmpty) {
- val currLabel = labels(restBranches.head)
- for(cTag <- restTagss.head) {
- flatKeys(k) = cTag;
- flatBranches(k) = currLabel
- k += 1
- }
- restTagss = restTagss.tail
- restBranches = restBranches.tail
- }
- val defaultLabel = labels(restBranches.head)
- assert(restBranches.tail.isEmpty)
- debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
- jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
-
- case JUMP(whereto) =>
- if (nextBlock != whereto) {
- jcode goTo labels(whereto)
- }
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jmethod.visitTypeInsn(Opcodes.NEW, className)
- case CJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else {
- (kind: @unchecked) match {
- case LONG => emit(Opcodes.LCMP)
- case FLOAT =>
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- }
+ case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+ case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+ }
- case CZJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- // @unchecked because references aren't compared with GT, GE, LT, LE.
- ((cond, nextBlock) : @unchecked) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode goTo labels(failure)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode goTo labels(failure)
- }
- } else {
- (kind: @unchecked) match {
- case LONG =>
- emit(Opcodes.LCONST_0)
- emit(Opcodes.LCMP)
- case FLOAT =>
- emit(Opcodes.FCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- emit(Opcodes.DCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- }
+ case icodes.fldsCat => (instr: @unchecked) match {
- case RETURN(kind) => jcode emitRETURN kind
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
- case THROW(_) => emit(Opcodes.ATHROW)
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
- case DROP(kind) =>
- emit(if(kind.isWideType) Opcodes.POP2 else Opcodes.POP)
+ }
- case DUP(kind) =>
- emit(if(kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
+ case icodes.mthdsCat => (instr: @unchecked) match {
- case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getInternalName
+ jcode.invokevirtual(target, "clone", mdesc_arrayClone)
- case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
- case SCOPE_ENTER(lv) =>
- // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if(relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val start = new asm.Label
- jmethod.visitLabel(start)
- scoping.pushScope(lv, start)
- }
+ }
- case SCOPE_EXIT(lv) =>
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if(relevant) {
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val end = new asm.Label
- jmethod.visitLabel(end)
- scoping.popScope(lv, end)
- }
+ case icodes.arraysCat => (instr: @unchecked) match {
+ case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
+ case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+ case CREATE_ARRAY(elem, 1) => jcode newarray elem
+ case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+ }
+
+ case icodes.jumpsCat => (instr: @unchecked) match {
+
+ case sw @ SWITCH(tagss, branches) =>
+ assert(branches.length == tagss.length + 1, sw)
+ val flatSize = sw.flatTagsCount
+ val flatKeys = new Array[Int](flatSize)
+ val flatBranches = new Array[asm.Label](flatSize)
+
+ var restTagss = tagss
+ var restBranches = branches
+ var k = 0 // ranges over flatKeys and flatBranches
+ while(restTagss.nonEmpty) {
+ val currLabel = labels(restBranches.head)
+ for(cTag <- restTagss.head) {
+ flatKeys(k) = cTag;
+ flatBranches(k) = currLabel
+ k += 1
+ }
+ restTagss = restTagss.tail
+ restBranches = restBranches.tail
+ }
+ val defaultLabel = labels(restBranches.head)
+ assert(restBranches.tail.isEmpty)
+ debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
+ jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto) {
+ jcode goTo labels(whereto)
+ }
+
+ case CJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF_ICMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ICMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ if (nextBlock == success) {
+ jcode.emitIF_ACMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ACMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ case FLOAT =>
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ case CZJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ val Success = success
+ val Failure = failure
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ ((cond, nextBlock) : @unchecked) match {
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
+ case (EQ, _) =>
+ jcode emitIFNULL labels(success)
+ jcode goTo labels(failure)
+ case (NE, _) =>
+ jcode emitIFNONNULL labels(success)
+ jcode goTo labels(failure)
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG =>
+ emit(Opcodes.LCONST_0)
+ emit(Opcodes.LCMP)
+ case FLOAT =>
+ emit(Opcodes.FCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(Opcodes.DCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ }
+
+ case icodes.retCat => (instr: @unchecked) match {
+ case RETURN(kind) => jcode emitRETURN kind
+ case THROW(_) => emit(Opcodes.ATHROW)
+ }
- case LOAD_EXCEPTION(_) =>
- ()
}
}
@@ -2893,8 +2929,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// typestate: entering mode with valid call sequences:
// [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
- mirrorClass.visitSource("" + cunit.source,
- null /* SourceDebugExtension */)
+ if(emitSource) {
+ mirrorClass.visitSource("" + cunit.source,
+ null /* SourceDebugExtension */)
+ }
val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
mirrorClass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
@@ -3033,9 +3071,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- // TODO no inner classes attribute is written. Confirm intent.
- assert(innerClassBuffer.isEmpty, innerClassBuffer)
-
+ addInnerClasses(clasz.symbol, beanInfoClass)
beanInfoClass.visitEnd()
writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 21260d399c..9661ae6b3e 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -727,7 +727,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// without it. This is particularly bad because the availability of
// generic information could disappear as a consequence of a seemingly
// unrelated change.
- sym.isSynthetic
+ sym.isHidden
|| sym.isLiftedMethod
|| sym.isBridge
|| (sym.ownerChain exists (_.isImplClass))
@@ -1972,6 +1972,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isHidden) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index d4ee9b6b48..5cc6e78e9d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -100,9 +100,29 @@ abstract class DeadCodeElimination extends SubComponent {
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
+
case LOAD_LOCAL(l) =>
defs = defs + Pair(((bb, idx)), rd.vars)
-// Console.println(i + ": " + (bb, idx) + " rd: " + rd + " and having: " + defs)
+
+ case STORE_LOCAL(_) =>
+ /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
+ * (otherwise any side-effects of the module's constructor go lost).
+ * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
+ * are already marked (case clause below).
+ * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below)
+ * will have the module's load marked provided `isSideEffecting(m1)`.
+ * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity).
+ * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+ */
+ val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
+ val (bb1, idx1) = p
+ bb1(idx1) match {
+ case LOAD_MODULE(module) => isLoadNeeded(module)
+ case _ => false
+ }
+ }
+ if (necessary) worklist += ((bb, idx))
+
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
@@ -129,6 +149,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
+ private def isLoadNeeded(module: Symbol): Boolean = {
+ module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol
+ }
+
/** Mark useful instructions. Instructions in the worklist are each inspected and their
* dependencies are marked useful too, and added to the worklist.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index f332e8cfdd..44acfed411 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -12,6 +12,29 @@ import scala.tools.nsc.symtab._
import scala.reflect.internal.util.NoSourceFile
/**
+ * Inliner balances two competing goals:
+ * (a) aggressive inlining of:
+ * (a.1) the apply methods of anonymous closures, so that their anon-classes can be eliminated;
+ * (a.2) higher-order-methods defined in an external library, e.g. `Range.foreach()` among many others.
+ * (b) circumventing the barrier to inter-library inlining that private accesses in the callee impose.
+ *
+ * Summing up the discussion in SI-5442 and SI-5891,
+ * the current implementation achieves to a large degree both goals above, and
+ * overcomes a problem exhibited by previous versions:
+ *
+ * (1) Problem: Attempting to access a private member `p` at runtime resulting in an `IllegalAccessError`,
+ * where `p` is defined in a library L, and is accessed from a library C (for Client),
+ * where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
+ * The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
+ * (the accesibility of methods and constructors isn't touched by the inliner).
+ *
+ * Thus we add one more goal to our list:
+ * (c) Compile C (either optimized or not) against any of L or L',
+ * so that it runs with either L or L' (in particular, compile against L' and run with L).
+ *
+ * The chosen strategy is described in some detail in the comments for `accessRequirements()` and `potentiallyPublicized()`.
+ * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2011Q4/Inliner.pdf
+ *
* @author Iulian Dragos
*/
abstract class Inliners extends SubComponent {
@@ -50,6 +73,8 @@ abstract class Inliners extends SubComponent {
)
def lookup(clazz: Symbol): Symbol = {
// println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
+ ", most likely this reveals the TFA at fault (receiver and callee don't match).")
if (sym.owner == clazz || isBottomType(clazz)) sym
else sym.overridingSymbol(clazz) match {
case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
@@ -86,13 +111,27 @@ abstract class Inliners extends SubComponent {
def name = phaseName
val inliner = new Inliner
- override def apply(c: IClass) {
- inliner analyzeClass c
+ object iclassOrdering extends Ordering[IClass] {
+ def compare(a: IClass, b: IClass) = {
+ val sourceNamesComparison = (a.cunit.toString() compare b.cunit.toString())
+ if(sourceNamesComparison != 0) sourceNamesComparison
+ else {
+ val namesComparison = (a.toString() compare b.toString())
+ if(namesComparison != 0) namesComparison
+ else {
+ a.symbol.id compare b.symbol.id
+ }
+ }
+ }
}
+ val queue = new mutable.PriorityQueue[IClass]()(iclassOrdering)
+
+ override def apply(c: IClass) { queue += c }
override def run() {
try {
super.run()
+ for(c <- queue) { inliner analyzeClass c }
} finally {
inliner.clearCaches()
}
@@ -138,7 +177,8 @@ abstract class Inliners extends SubComponent {
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
- private def getRecentTFA(incm: IMethod): (Boolean, analysis.MethodTFA) = {
+
+ private def getRecentTFA(incm: IMethod, forceable: Boolean): (Boolean, analysis.MethodTFA) = {
def containsRETURN(blocks: List[BasicBlock]) = blocks exists { bb => bb.lastInstruction.isInstanceOf[RETURN] }
@@ -154,7 +194,7 @@ abstract class Inliners extends SubComponent {
var a: analysis.MethodTFA = null
if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
- if(hasInline(incm.symbol)) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
+ if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
(hasRETURN, a)
}
@@ -174,12 +214,22 @@ abstract class Inliners extends SubComponent {
tfa.isOnWatchlist.clear()
}
+ object imethodOrdering extends Ordering[IMethod] {
+ def compare(a: IMethod, b: IMethod) = {
+ val namesComparison = (a.toString() compare b.toString())
+ if(namesComparison != 0) namesComparison
+ else {
+ a.symbol.id compare b.symbol.id
+ }
+ }
+ }
+
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
debuglog("Analyzing " + cls)
this.currentIClazz = cls
- val ms = cls.methods filterNot { _.symbol.isConstructor }
+ val ms = cls.methods filterNot { _.symbol.isConstructor } sorted imethodOrdering
ms foreach { im =>
if(hasInline(im.symbol)) {
log("Not inlining into " + im.symbol.originalName.decode + " because it is marked @inline.")
@@ -221,7 +271,7 @@ abstract class Inliners extends SubComponent {
* The ensuing analysis of each candidate (performed by `analyzeInc()`)
* may result in a CFG isomorphic to that of the callee being inserted in place of the callsite
* (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG,
- * a situation we call "successful inlining").
+ * a substitution we call "successful inlining").
*
* (3) following iterations have `relevantBBs` updated to focus on the inlined basic blocks and their successors only.
* Details in `MTFAGrowable.reinit()`
@@ -280,20 +330,23 @@ abstract class Inliners extends SubComponent {
}
/**
- Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
- at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
-
+ * Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
+ * at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
+ *
*/
def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
+ assert(bb.toList contains i, "Candidate callsite does not belong to BasicBlock.")
+
var inlined = false
- val msym = i.method
+ val shouldWarn = hasInline(i.method)
- def warnNoInline(reason: String) = {
- if (hasInline(msym) && !caller.isBridge)
- warn(i.pos, "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
- }
+ def warnNoInline(reason: String) = {
+ if (shouldWarn) {
+ warn(i.pos, "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason))
+ }
+ }
- def isAvailable = icodes available concreteMethod.enclClass
+ var isAvailable = icodes available concreteMethod.enclClass
if (!isAvailable && shouldLoadImplFor(concreteMethod, receiver)) {
// Until r22824 this line was:
@@ -304,21 +357,23 @@ abstract class Inliners extends SubComponent {
// was the proximate cause for SI-3882:
// error: Illegal index: 0 overlaps List((variable par1,LONG))
// error: Illegal index: 0 overlaps List((variable par1,LONG))
- icodes.load(concreteMethod.enclClass)
+ isAvailable = icodes.load(concreteMethod.enclClass)
}
- def isCandidate = (
- isClosureClass(receiver)
- || concreteMethod.isEffectivelyFinal
- || receiver.isEffectivelyFinal
- )
- def isApply = concreteMethod.name == nme.apply
- def isCountable = !(
- isClosureClass(receiver)
- || isApply
- || isMonadicMethod(concreteMethod)
- || receiver.enclosingPackage == definitions.RuntimePackage
- ) // only count non-closures
+ def isCandidate = (
+ isClosureClass(receiver)
+ || concreteMethod.isEffectivelyFinal
+ || receiver.isEffectivelyFinal
+ )
+
+ def isApply = concreteMethod.name == nme.apply
+
+ def isCountable = !(
+ isClosureClass(receiver)
+ || isApply
+ || isMonadicMethod(concreteMethod)
+ || receiver.enclosingPackage == definitions.RuntimePackage
+ ) // only count non-closures
debuglog("Treating " + i
+ "\n\treceiver: " + receiver
@@ -327,42 +382,62 @@ abstract class Inliners extends SubComponent {
if (isAvailable && isCandidate) {
lookupIMethod(concreteMethod, receiver) match {
- case Some(callee) =>
+
+ case Some(callee) if callee.hasCode =>
val inc = new IMethodInfo(callee)
val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- if (pair isStampedForInlining stackLength) {
- retry = true
- inlined = true
- if (isCountable)
- count += 1
-
- pair.doInline(bb, i)
- if (!inc.inline || inc.isMonadic)
- caller.inlinedCalls += 1
- inlinedMethodCount(inc.sym) += 1
-
- /* Remove this method from the cache, as the calls-private relation
- * might have changed after the inlining.
- */
- usesNonPublics -= m
- recentTFAs -= m.symbol
- }
- else {
- if (settings.debug.value)
- pair logFailure stackLength
+ (pair isStampedForInlining stackLength) match {
+
+ case inlInfo if inlInfo.isSafe =>
+
+ (inlInfo: @unchecked) match {
+
+ case FeasibleInline(accessNeeded, toBecomePublic) =>
+ for(f <- toBecomePublic) {
+ debuglog("Making public (synthetic) field-symbol: " + f)
+ f setFlag Flags.notPRIVATE
+ f setFlag Flags.notPROTECTED
+ }
+ // only add to `knownSafe` after all `toBecomePublic` fields actually made public.
+ if(accessNeeded == NonPublicRefs.Public) { tfa.knownSafe += inc.sym }
+
+ case InlineableAtThisCaller => ()
+
+ }
+
+ retry = true
+ inlined = true
+ if (isCountable) { count += 1 };
+
+ pair.doInline(bb, i)
+ if (!pair.isInlineForced || inc.isMonadic) { caller.inlinedCalls += 1 };
+ inlinedMethodCount(inc.sym) += 1
- warnNoInline(pair failureReason stackLength)
+ // Remove the caller from the cache (this inlining might have changed its calls-private relation).
+ usesNonPublics -= m
+ recentTFAs -= m.symbol
+
+
+ case DontInlineHere(msg) =>
+ debuglog("inline failed, reason: " + msg)
+ warnNoInline(msg)
+
+ case NeverSafeToInline => ()
}
+
+ case Some(callee) =>
+ assert(!callee.hasCode, "The case clause right before this one should have handled this case.")
+ warnNoInline("callee (" + callee + ") has no code")
+ ()
+
case None =>
warnNoInline("bytecode was not available")
debuglog("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
}
+ } else {
+ warnNoInline(if(!isAvailable) "bytecode was not available" else "it can be overridden")
}
- else warnNoInline(
- if (!isAvailable) "bytecode was not available"
- else "it can be overridden"
- )
inlined
}
@@ -398,6 +473,7 @@ abstract class Inliners extends SubComponent {
tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
tfa.run
+
staleOut.clear()
splicedBlocks.clear()
staleIn.clear()
@@ -491,11 +567,8 @@ abstract class Inliners extends SubComponent {
def paramTypes = sym.info.paramTypes
def minimumStack = paramTypes.length + 1
- def inline = hasInline(sym)
- def noinline = hasNoInline(sym)
-
def isBridge = sym.isBridge
- def isInClosure = isClosureClass(owner)
+ val isInClosure = isClosureClass(owner)
val isHigherOrder = isHigherOrderMethod(sym)
def isMonadic = isMonadicMethod(sym)
@@ -511,20 +584,131 @@ abstract class Inliners extends SubComponent {
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
def hasHandlers = handlers.nonEmpty
+
+ def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
case _: Finalizer => true
case _ => false
}
- // the number of inlined calls in 'm', used by 'shouldInline'
+ // the number of inlined calls in 'm', used by 'isScoreOK'
var inlinedCalls = 0
def addLocals(ls: List[Local]) = m.locals ++= ls
def addLocal(l: Local) = addLocals(List(l))
def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
+
+ /**
+ * This method inspects the callee's instructions, finding out the most restrictive accessibility implied by them.
+ *
+ * Rather than giving up upon encountering an access to a private field `p`, it provisorily admits `p` as "can-be-made-public", provided:
+ * - `p` is being compiled as part of this compilation run, and
+ * - `p` is synthetic or param-accessor.
+ *
+ * This method is side-effect free, in particular it lets the invoker decide
+ * whether the accessibility of the `toBecomePublic` fields should be changed or not.
+ */
+ def accessRequirements: AccessReq = {
+
+ var toBecomePublic: List[Symbol] = Nil
+
+ def check(sym: Symbol, cond: Boolean) =
+ if (cond) Private
+ else if (sym.isProtected) Protected
+ else Public
+
+ def canMakePublic(f: Symbol): Boolean =
+ (m.sourceFile ne NoSourceFile) &&
+ (f.isSynthetic || f.isParamAccessor) &&
+ { toBecomePublic = f :: toBecomePublic; true }
+
+ /* A safety check to consider as private, for the purposes of inlining, a public field that:
+ * (1) is defined in an external library, and
+ * (2) can be presumed synthetic (due to a dollar sign in its name).
+ * Such field was made public by `doMakePublic()` and we don't want to rely on that,
+ * because under other compilation conditions (ie no -optimize) that won't be the case anymore.
+ *
+ * This allows aggressive intra-library inlining (making public if needed)
+ * that does not break inter-library scenarios (see comment for `Inliners`).
+ *
+ * TODO handle more robustly the case of a trait var changed at the source-level from public to private[this]
+ * (eg by having ICodeReader use unpickler, see SI-5442).
+ * */
+ def potentiallyPublicized(f: Symbol): Boolean = {
+ (m.sourceFile eq NoSourceFile) && f.name.containsChar('$')
+ }
+
+ def checkField(f: Symbol) = check(f, potentiallyPublicized(f) ||
+ (f.isPrivate && !canMakePublic(f)))
+ def checkSuper(n: Symbol) = check(n, n.isPrivate || !n.isClassConstructor)
+ def checkMethod(n: Symbol) = check(n, n.isPrivate)
+
+ def getAccess(i: Instruction) = i match {
+ case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
+ case CALL_METHOD(n, _) => checkMethod(n)
+ case LOAD_FIELD(f, _) => checkField(f)
+ case STORE_FIELD(f, _) => checkField(f)
+ case _ => Public
+ }
+
+ var seen = Public
+ val iter = instructions.iterator
+ while((seen ne Private) && iter.hasNext) {
+ val i = iter.next()
+ getAccess(i) match {
+ case Private =>
+ log("instruction " + i + " requires private access.")
+ toBecomePublic = Nil
+ seen = Private
+ case Protected => seen = Protected
+ case _ => ()
+ }
+ }
+
+ AccessReq(seen, toBecomePublic)
+ }
+
}
- class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
+ /**
+ * Classifies a pair (caller, callee) into one of four categories:
+ *
+ * (a) inlining should be performed, classified in turn into:
+ * (a.1) `InlineableAtThisCaller`: unconditionally at this caller
+ * (a.2) `FeasibleInline`: it only remains for certain access requirements to be met (see `IMethodInfo.accessRequirements()`)
+ *
+ * (b) inlining shouldn't be performed, classified in turn into:
+ * (b.1) `DontInlineHere`: indicates that this particular occurrence of the callee at the caller shouldn't be inlined.
+ * - Nothing is said about the outcome for other callers, or for other occurrences of the callee for the same caller.
+ * - In particular inlining might be possible, but heuristics gave a low score for it.
+ * (b.2) `NeverSafeToInline`: the callee can't be inlined anywhere, irrespective of caller.
+ *
+ * The classification above is computed by `isStampedForInlining()` based on which `analyzeInc()` goes on to:
+ * - either log the reason for failure --- case (b) ---,
+ * - or perform inlining --- case (a) ---.
+ */
+ sealed abstract class InlineSafetyInfo {
+ def isSafe = false
+ def isUnsafe = !isSafe
+ }
+ case object NeverSafeToInline extends InlineSafetyInfo
+ case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true }
+ case class DontInlineHere(msg: String) extends InlineSafetyInfo
+ case class FeasibleInline(accessNeeded: NonPublicRefs.Value,
+ toBecomePublic: List[Symbol]) extends InlineSafetyInfo {
+ override def isSafe = true
+ }
+
+ case class AccessReq(
+ accessNeeded: NonPublicRefs.Value,
+ toBecomePublic: List[Symbol]
+ )
+
+ final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
+
+ assert(!caller.isBridge && inc.m.hasCode,
+ "A guard in Inliner.analyzeClass() should have prevented from getting here.")
+
def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
private def freshName(s: String): TermName = {
@@ -532,6 +716,12 @@ abstract class Inliners extends SubComponent {
newTermName(s + fresh(s))
}
+ private def isKnownToInlineSafely: Boolean = { tfa.knownSafe(inc.sym) }
+
+ val isInlineForced = hasInline(inc.sym)
+ val isInlineForbidden = hasNoInline(inc.sym)
+ assert(!(isInlineForced && isInlineForbidden), "method ("+inc.m+") marked both @inline and @noinline.")
+
/** Inline 'inc' into 'caller' at the given block and instruction.
* The instruction must be a CALL_METHOD.
*/
@@ -549,7 +739,7 @@ abstract class Inliners extends SubComponent {
def newLocal(baseName: String, kind: TypeKind) =
new Local(caller.sym.newVariable(freshName(baseName), targetPos), kind, false)
- val (hasRETURN, a) = getRecentTFA(inc.m)
+ val (hasRETURN, a) = getRecentTFA(inc.m, isInlineForced)
/* The exception handlers that are active at the current block. */
val activeHandlers = caller.handlers filter (_ covered block)
@@ -709,129 +899,94 @@ abstract class Inliners extends SubComponent {
if (settings.debug.value) icodes.checkValid(caller.m)
}
- def isStampedForInlining(stackLength: Int) =
- !sameSymbols && inc.m.hasCode && shouldInline &&
- isSafeToInline(stackLength) // `isSafeToInline()` must be invoked last in this AND expr bc it mutates the `knownSafe` and `knownUnsafe` maps for good.
-
- def logFailure(stackLength: Int) = log(
- """|inline failed for %s:
- | pair.sameSymbols: %s
- | inc.numInlined < 2: %s
- | inc.m.hasCode: %s
- | isSafeToInline: %s
- | shouldInline: %s
- """.stripMargin.format(
- inc.m, sameSymbols, inlinedMethodCount(inc.sym) < 2,
- inc.m.hasCode, isSafeToInline(stackLength), shouldInline
- )
- )
-
- def failureReason(stackLength: Int) =
- if (!inc.m.hasCode) "bytecode was unavailable"
- else if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) "method is synchronized"
- else if (!isSafeToInline(stackLength)) "it is unsafe (target may reference private fields)"
- else "of a bug (run with -Ylog:inline -Ydebug for more information)"
+ def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
- def canAccess(level: NonPublicRefs.Value) = level match {
- case Private => caller.owner == inc.owner
- case Protected => caller.owner.tpe <:< inc.owner.tpe
- case Public => true
- }
- private def sameSymbols = caller.sym == inc.sym
- private def sameOwner = caller.owner == inc.owner
+ if(tfa.blackballed(inc.sym)) { return NeverSafeToInline }
- /** A method is safe to inline when:
- * - it does not contain calls to private methods when called from another class
- * - it is not inlined into a position with non-empty stack,
- * while having a top-level finalizer (see liftedTry problem)
- * - it is not recursive
- * Note:
- * - synthetic private members are made public in this pass.
- */
- def isSafeToInline(stackLength: Int): Boolean = {
+ if(!isKnownToInlineSafely) {
- if(tfa.blackballed(inc.sym)) { return false }
- if(tfa.knownSafe(inc.sym)) { return true }
+ if(inc.openBlocks.nonEmpty) {
+ val msg = ("Encountered " + inc.openBlocks.size + " open block(s) in isSafeToInline: this indicates a bug in the optimizer!\n" +
+ " caller = " + caller.m + ", callee = " + inc.m)
+ warn(inc.sym.pos, msg)
+ tfa.knownNever += inc.sym
+ return DontInlineHere("Open blocks in " + inc.m)
+ }
- if(helperIsSafeToInline(stackLength)) {
- tfa.knownSafe += inc.sym; true
- } else {
- tfa.knownUnsafe += inc.sym; false
- }
- }
+ val reasonWhyNever: String = {
+ var rs: List[String] = Nil
+ if(inc.isRecursive) { rs ::= "is recursive" }
+ if(isInlineForbidden) { rs ::= "is annotated @noinline" }
+ if(inc.isSynchronized) { rs ::= "is synchronized method" }
+ if(rs.isEmpty) null else rs.mkString("", ", and ", "")
+ }
- private def helperIsSafeToInline(stackLength: Int): Boolean = {
- def makePublic(f: Symbol): Boolean =
- /*
- * Completely disabling member publifying. This shouldn't have been done in the first place. :|
- */
- false
- // (inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
- // debuglog("Making not-private symbol out of synthetic: " + f)
-
- // f setNotFlag Flags.PRIVATE
- // true
- // }
-
- if (!inc.m.hasCode || inc.isRecursive) { return false }
- if (inc.m.symbol.hasFlag(Flags.SYNCHRONIZED)) { return false }
-
- val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
- // Avoiding crashing the compiler if there are open blocks.
- inc.openBlocks foreach { b =>
- warn(inc.sym.pos,
- "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
- " caller = " + caller.m + ", callee = " + inc.m
- )
- return false
+ if(reasonWhyNever != null) {
+ tfa.knownNever += inc.sym
+ // next time around NeverSafeToInline is returned, thus skipping (duplicate) msg, this is intended.
+ return DontInlineHere(inc.m + " " + reasonWhyNever)
}
- def check(sym: Symbol, cond: Boolean) =
- if (cond) Private
- else if (sym.isProtected) Protected
- else Public
-
- def checkField(f: Symbol) = check(f, f.isPrivate && !makePublic(f))
- def checkSuper(m: Symbol) = check(m, m.isPrivate || !m.isClassConstructor)
- def checkMethod(m: Symbol) = check(m, m.isPrivate)
-
- def getAccess(i: Instruction) = i match {
- case CALL_METHOD(m, SuperCall(_)) => checkSuper(m)
- case CALL_METHOD(m, _) => checkMethod(m)
- case LOAD_FIELD(f, _) => checkField(f)
- case STORE_FIELD(f, _) => checkField(f)
- case _ => Public
+
+ if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
+ tfa.knownUnsafe += inc.sym;
+ return DontInlineHere("sameSymbols (ie caller == callee)")
}
- def iterate(): NonPublicRefs.Value = inc.instructions.foldLeft(Public)((res, inc) => getAccess(inc) match {
- case Private => log("instruction " + inc + " requires private access.") ; return Private
- case Protected => Protected
- case Public => res
- })
- iterate()
- })
+ }
- canAccess(accessNeeded) && {
- val isIllegalStack = (stackLength > inc.minimumStack && inc.hasNonFinalizerHandler)
+ /*
+ * From here on, two main categories of checks remain, (a) and (b) below:
+ * (a.1) either the scoring heuristics give green light; or
+ * (a.2) forced as candidate due to @inline.
+ * After that, safety proper is checked:
+ * (b.1) the callee does not contain calls to private methods when called from another class
+ * (b.2) the callee is not going to be inlined into a position with non-empty stack,
+ * while having a top-level finalizer (see liftedTry problem)
+ * As a result of (b), some synthetic private members can be chosen to become public.
+ */
- !isIllegalStack || {
- debuglog("method " + inc.sym + " is used on a non-empty stack with finalizer.")
- false
- }
+ if(!isInlineForced && !isScoreOK) {
+ // During inlining retry, a previous caller-callee pair that scored low may pass.
+ // Thus, adding the callee to tfa.knownUnsafe isn't warranted.
+ return DontInlineHere("too low score (heuristics)")
+ }
+
+ if(isKnownToInlineSafely) { return InlineableAtThisCaller }
+
+ if(stackLength > inc.minimumStack && inc.hasNonFinalizerHandler) {
+ val msg = "method " + inc.sym + " is used on a non-empty stack with finalizer."
+ debuglog(msg)
+ // FYI: not reason enough to add inc.sym to tfa.knownUnsafe (because at other callsite in this caller, inlining might be ok)
+ return DontInlineHere(msg)
+ }
+
+ val accReq = inc.accessRequirements
+ if(!canAccess(accReq.accessNeeded)) {
+ tfa.knownUnsafe += inc.sym
+ return DontInlineHere("access level required by callee not matched by caller")
}
+
+ FeasibleInline(accReq.accessNeeded, accReq.toBecomePublic)
+
}
- /** Decide whether to inline or not. Heuristics:
+ def canAccess(level: NonPublicRefs.Value) = level match {
+ case Private => caller.owner == inc.owner
+ case Protected => caller.owner.tpe <:< inc.owner.tpe
+ case Public => true
+ }
+ private def sameSymbols = caller.sym == inc.sym
+ private def sameOwner = caller.owner == inc.owner
+
+ /** Gives green light for inlining (which may still be vetoed later). Heuristics:
* - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
* - it's bad to inline large methods
* - it's good to inline higher order functions
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- private def neverInline = caller.isBridge || !inc.m.hasCode || inc.noinline
- private def alwaysInline = inc.inline
-
- def shouldInline: Boolean = !neverInline && (alwaysInline || {
- debuglog("shouldInline: " + caller.m + " with " + inc.m)
+ def isScoreOK: Boolean = {
+ debuglog("shouldInline: " + caller.m + " , callee:" + inc.m)
var score = 0
@@ -855,7 +1010,7 @@ abstract class Inliners extends SubComponent {
log("shouldInline(" + inc.m + ") score: " + score)
score > 0
- })
+ }
}
def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index e2e1ddf065..3c92c3b4b6 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -81,20 +81,22 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
new { override val global: compiler.type = compiler }
with model.ModelFactory(compiler, settings)
with model.ModelFactoryImplicitSupport
+ with model.diagram.DiagramFactory
with model.comment.CommentFactory
with model.TreeFactory {
- override def templateShouldDocument(sym: compiler.Symbol) =
- extraTemplatesToDocument(sym) || super.templateShouldDocument(sym)
+ override def templateShouldDocument(sym: compiler.Symbol, inTpl: TemplateImpl) =
+ extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl)
}
)
modelFactory.makeModel match {
case Some(madeModel) =>
- if (settings.reportModel)
+ if (!settings.scaladocQuietRun)
println("model contains " + modelFactory.templatesCount + " documentable templates")
Some(madeModel)
case None =>
- println("no documentable class found in compilation units")
+ if (!settings.scaladocQuietRun)
+ println("no documentable class found in compilation units")
None
}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 4458889d55..31e49131f6 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -11,8 +11,9 @@ import java.lang.System
import language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
- * @param error A function that prints a string to the appropriate error stream. */
-class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
+ * @param error A function that prints a string to the appropriate error stream
+ * @param print A function that prints the string, without any extra boilerplate of error */
+class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
/** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
* `html`. */
@@ -104,6 +105,12 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
"(for example conversions that require Numeric[String] to be in scope)"
)
+ val docImplicitsSoundShadowing = BooleanSetting (
+ "-implicits-sound-shadowing",
+ "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " +
+ "only use it if you haven't defined usecase for implicitly inherited members."
+ )
+
val docDiagrams = BooleanSetting (
"-diagrams",
"Create inheritance diagrams for classes, traits and packages."
@@ -116,10 +123,49 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
val docDiagramsDotPath = PathSetting (
"-diagrams-dot-path",
- "The path to the dot executable used to generate the inheritance diagrams. Ex: /usr/bin/dot",
+ "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot",
"dot" // by default, just pick up the system-wide dot
)
+ /** The maxium nuber of normal classes to show in the diagram */
+ val docDiagramsMaxNormalClasses = IntSetting(
+ "-diagrams-max-classes",
+ "The maximum number of superclasses or subclasses to show in a diagram",
+ 15,
+ None,
+ _ => None
+ )
+
+ /** The maxium nuber of implcit classes to show in the diagram */
+ val docDiagramsMaxImplicitClasses = IntSetting(
+ "-diagrams-max-implicits",
+ "The maximum number of implicitly converted classes to show in a diagram",
+ 10,
+ None,
+ _ => None
+ )
+
+ val docDiagramsDotTimeout = IntSetting(
+ "-diagrams-dot-timeout",
+ "The timeout before the graphviz dot util is forecefully closed, in seconds (default: 10)",
+ 10,
+ None,
+ _ => None
+ )
+
+ val docDiagramsDotRestart = IntSetting(
+ "-diagrams-dot-restart",
+ "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)",
+ 5,
+ None,
+ _ => None
+ )
+
+ val docRawOutput = BooleanSetting (
+ "-raw-output",
+ "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)"
+ )
+
// Somewhere slightly before r18708 scaladoc stopped building unless the
// self-type check was suppressed. I hijacked the slotted-for-removal-anyway
// suppress-vt-warnings option and renamed it for this purpose.
@@ -129,14 +175,16 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
docDiagrams, docDiagramsDebug, docDiagramsDotPath,
- docImplicits, docImplicitsDebug, docImplicitsShowAll
+ docDiagramsDotTimeout, docDiagramsDotRestart,
+ docImplicits, docImplicitsDebug, docImplicitsShowAll,
+ docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
override def isScaladoc = true
- // unset by the testsuite, we don't need to count the entities in the model
- var reportModel = true
+ // set by the testsuite, when checking test output
+ var scaladocQuietRun = false
/**
* This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
@@ -150,15 +198,16 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
* the function result should be a humanly-understandable description of the type class
*/
val knownTypeClasses: Map[String, String => String] = Map() +
- ("<root>.scala.package.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
- ("<root>.scala.package.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
- ("<root>.scala.package.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
- ("<root>.scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
- ("<root>.scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
- ("<root>.scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
- ("<root>.scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
- ("<root>.scala.reflect.AbsTypeTag" -> ((tparam: String) => tparam + " is accompanied by an AbsTypeTag, which is a runtime representation of its type that survives erasure")) +
- ("<root>.scala.reflect.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
+ // TODO: Bring up to date and test these
+ ("scala.package.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
+ ("scala.package.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
+ ("scala.package.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
+ ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
+ ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.AbsTypeTag" -> ((tparam: String) => tparam + " is accompanied by an AbsTypeTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
/**
* Set of classes to exclude from index and diagrams
@@ -182,7 +231,8 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
"scala.Predef.any2stringfmt",
"scala.Predef.any2stringadd",
"scala.Predef.any2ArrowAssoc",
- "scala.Predef.any2Ensuring")
+ "scala.Predef.any2Ensuring",
+ "scala.collection.TraversableOnce.alternateImplicit")
/** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
val arraySkipConversions = List(
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 914824d523..51c5793d46 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -11,6 +11,9 @@ import model._
import java.io.{ File => JFile }
import io.{ Streamable, Directory }
import scala.collection._
+import page.diagram._
+
+import html.page.diagram.DiagramGenerator
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
@@ -29,21 +32,27 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"jquery.js",
"jquery.layout.js",
"scheduler.js",
+ "diagrams.js",
"template.js",
"tools.tooltip.js",
+ "modernizr.custom.js",
"index.css",
"ref-index.css",
"template.css",
+ "diagrams.css",
"class.png",
"class_big.png",
+ "class_diagram.png",
"object.png",
"object_big.png",
+ "object_diagram.png",
"package.png",
"package_big.png",
"trait.png",
"trait_big.png",
+ "trait_diagram.png",
"class_to_object_big.png",
"object_to_class_big.png",
@@ -105,6 +114,8 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
finally out.close()
}
+ DiagramGenerator.initialize(universe.settings)
+
libResources foreach (s => copyResource("lib/" + s))
new page.Index(universe, index) writeFor this
@@ -115,14 +126,17 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
for (letter <- index.firstLetterIndex) {
new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
}
+
+ DiagramGenerator.cleanup()
}
def writeTemplates(writeForThis: HtmlPage => Unit) {
val written = mutable.HashSet.empty[DocTemplateEntity]
+ val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings)
def writeTemplate(tpl: DocTemplateEntity) {
if (!(written contains tpl)) {
- writeForThis(new page.Template(universe, tpl))
+ writeForThis(new page.Template(universe, diagramGenerator, tpl))
written += tpl
tpl.templates map writeTemplate
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index e3da8bddea..4a1a8cf898 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -13,7 +13,7 @@ import comment._
import xml.{XML, NodeSeq}
import xml.dtd.{DocType, PublicID}
import scala.collection._
-import java.nio.channels.Channels
+import java.io.Writer
/** An html page that is part of a Scaladoc site.
* @author David Bernard
@@ -52,17 +52,19 @@ abstract class HtmlPage extends Page { thisPage =>
</head>
{ body }
</html>
- val fos = createFileOutputStream(site)
- val w = Channels.newWriter(fos.getChannel, site.encoding)
- try {
+
+ writeFile(site) { (w: Writer) =>
w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
w.write(doctype.toString + "\n")
w.write(xml.Xhtml.toXhtml(html))
}
- finally {
- w.close()
- fos.close()
- }
+
+ if (site.universe.settings.docRawOutput.value)
+ writeFile(site, ".raw") {
+ // we're only interested in the body, as this will go into the diff
+ _.write(body.text)
+ }
+
//XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
}
@@ -116,11 +118,25 @@ abstract class HtmlPage extends Page { thisPage =>
case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
- case EntityLink(entity) => templateToHtml(entity)
case Monospace(in) => <code>{ inlineToHtml(in) }</code>
case Text(text) => xml.Text(text)
case Summary(in) => inlineToHtml(in)
case HtmlTag(tag) => xml.Unparsed(tag)
+ case EntityLink(target, template) => template() match {
+ case Some(tpl) =>
+ templateToHtml(tpl)
+ case None =>
+ xml.Text(target)
+ }
+ }
+
+ def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match {
+ case Nil =>
+ sys.error("Internal Scaladoc error")
+ case List(tpe) =>
+ typeToHtml(tpe, hasLinks)
+ case tpe :: rest =>
+ typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks)
}
def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala
index c5bf3e0e37..5e3ab87ccd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala
@@ -8,6 +8,8 @@ package scala.tools.nsc.doc.html
import scala.tools.nsc.doc.model._
import java.io.{FileOutputStream, File}
import scala.reflect.NameTransformer
+import java.nio.channels.Channels
+import java.io.Writer
abstract class Page {
thisPage =>
@@ -20,8 +22,8 @@ abstract class Page {
def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/")
- def createFileOutputStream(site: HtmlFactory) = {
- val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
+ def createFileOutputStream(site: HtmlFactory, suffix: String = "") = {
+ val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix)
val folder = file.getParentFile
if (! folder.exists) {
folder.mkdirs
@@ -29,6 +31,18 @@ abstract class Page {
new FileOutputStream(file.getPath)
}
+ def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = {
+ val fos = createFileOutputStream(site, suffix)
+ val w = Channels.newWriter(fos.getChannel, site.encoding)
+ try {
+ fn(w)
+ }
+ finally {
+ w.close()
+ fos.close()
+ }
+ }
+
/** Writes this page as a file. The file's location is relative to the
* generator's site root, and the encoding is also defined by the generator.
* @param generator The generator that is writing this page. */
@@ -44,7 +58,7 @@ abstract class Page {
def templateToPath(tpl: TemplateEntity): List[String] = {
def doName(tpl: TemplateEntity): String =
- NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
+ (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
def downPacks(pack: Package): List[String] =
if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
@@ -83,18 +97,4 @@ abstract class Page {
}
relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
}
-
- def isExcluded(dtpl: DocTemplateEntity) = {
- val qname = dtpl.qualifiedName
- ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
- qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
- ) && !(
- qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
- qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
- qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
- qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
- qname == "scala.runtime.AbstractFunction2"
- )
- )
- }
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 8ed13e0da2..0e894a03bf 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -61,7 +61,7 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
}
<ol class="templates">{
val tpls: Map[String, Seq[DocTemplateEntity]] =
- (pack.templates filter (t => !t.isPackage && !isExcluded(t) )) groupBy (_.name)
+ (pack.templates filter (t => !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) )) groupBy (_.name)
val placeholderSeq: NodeSeq = <div class="placeholder"></div>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
index 7edd4937c4..2b68ac2937 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -15,14 +15,8 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
def path = List("index.js")
override def writeFor(site: HtmlFactory) {
- val stream = createFileOutputStream(site)
- val writer = Channels.newWriter(stream.getChannel, site.encoding)
- try {
- writer.write("Index.PACKAGES = " + packages.toString() + ";")
- }
- finally {
- writer.close
- stream.close
+ writeFile(site) {
+ _.write("Index.PACKAGES = " + packages.toString() + ";")
}
}
@@ -68,7 +62,7 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
def allPackagesWithTemplates = {
Map(allPackages.map((key) => {
- key -> key.templates.filter(t => !t.isPackage && !isExcluded(t))
+ key -> key.templates.filter(t => !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName))
}) : _*)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 66189a6854..0d0410c7e2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -9,10 +9,17 @@ package html
package page
import model._
+import model.diagram._
+import diagram._
+
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
import language.postfixOps
-class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage {
+import model._
+import model.diagram._
+import diagram._
+
+class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemplateEntity) extends HtmlPage {
val path =
templateToPath(tpl)
@@ -29,10 +36,22 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
val headers =
<xml:group>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ <link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
<script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
+ { if (universe.settings.docDiagrams.isSetByUser) {
+ <script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
+ } else NodeSeq.Empty }
+ <script type="text/javascript">
+ if(top === self) {{
+ var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
+ var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
+ window.location.href = url + '#' + hash;
+ }}
+ </script>
</xml:group>
val valueMembers =
@@ -41,9 +60,12 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
val (absValueMembers, nonAbsValueMembers) =
valueMembers partition (_.isAbstract)
- val (deprValueMembers, concValueMembers) =
+ val (deprValueMembers, nonDeprValueMembers) =
nonAbsValueMembers partition (_.deprecation.isDefined)
+ val (concValueMembers, shadowedImplicitMembers) =
+ nonDeprValueMembers partition (!Template.isShadowedOrAmbiguousImplicit(_))
+
val typeMembers =
tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted
@@ -84,7 +106,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</div>
{ signature(tpl, true) }
- { memberToCommentHtml(tpl, true) }
+ { memberToCommentHtml(tpl, tpl.inTemplate, true) }
<div id="mbrsel">
<div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
@@ -96,7 +118,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
{ if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
{
- if (!tpl.linearization.isEmpty)
+ if (!tpl.linearizationTemplates.isEmpty)
<div id="ancestors">
<span class="filtertype">Inherited<br/>
</span>
@@ -122,7 +144,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
- <a href="docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
+ <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
</div>
}
{
@@ -138,35 +160,42 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
{ if (constructors.isEmpty) NodeSeq.Empty else
<div id="constructors" class="members">
<h3>Instance Constructors</h3>
- <ol>{ constructors map (memberToHtml(_)) }</ol>
+ <ol>{ constructors map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (typeMembers.isEmpty) NodeSeq.Empty else
<div id="types" class="types members">
<h3>Type Members</h3>
- <ol>{ typeMembers map (memberToHtml(_)) }</ol>
+ <ol>{ typeMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (absValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>Abstract Value Members</h3>
- <ol>{ absValueMembers map (memberToHtml(_)) }</ol>
+ <ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (concValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
- <ol>{ concValueMembers map (memberToHtml(_)) }</ol>
+ <ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
+ </div>
+ }
+
+ { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
+ <div id="values" class="values members">
+ <h3>Shadowed Implict Value Members</h3>
+ <ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (deprValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>Deprecated Value Members</h3>
- <ol>{ deprValueMembers map (memberToHtml(_)) }</ol>
+ <ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
</div>
@@ -237,38 +266,39 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
tparamsToString(d.typeParams) + paramLists.mkString
}
- def memberToHtml(mbr: MemberEntity): NodeSeq = {
+ def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
val defParamsString = mbr match {
case d:MemberEntity with Def => defParamsToString(d)
case _ => ""
}
- val memberComment = memberToCommentHtml(mbr, false)
+ val memberComment = memberToCommentHtml(mbr, inTpl, false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString } fullComment={ if(memberComment.isEmpty) "no" else "yes" }>
+ data-isabs={ mbr.isAbstract.toString }
+ fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }>
<a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
{ signature(mbr, false) }
{ memberComment }
</li>
}
- def memberToCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
+ def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): NodeSeq = {
mbr match {
case dte: DocTemplateEntity if isSelf =>
// comment of class itself
<xml:group>
- <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, isSelf = true) }</div>
+ <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }</div>
</xml:group>
case dte: DocTemplateEntity if mbr.comment.isDefined =>
// comment of inner, documented class (only short comment, full comment is on the class' own page)
memberToInlineCommentHtml(mbr, isSelf)
case _ =>
// comment of non-class member or non-documentented inner class
- val commentBody = memberToCommentBodyHtml(mbr, isSelf = false)
+ val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false)
if (commentBody.isEmpty)
NodeSeq.Empty
else {
val shortComment = memberToShortCommentHtml(mbr, isSelf)
- val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, isSelf)
+ val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, inTpl, isSelf)
val includedLongComment = if (shortComment.text.trim == longComment.text.trim)
NodeSeq.Empty
@@ -298,7 +328,8 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
<p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
- def memberToCommentBodyHtml(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+ def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+ val s = universe.settings
val memberComment =
if (mbr.comment.isEmpty) NodeSeq.Empty
@@ -383,10 +414,39 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
<dd>
- This member is added by an implicit conversion from { typeToHtml(mbr.inTemplate.resultType, true) } to
+ This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to
{ targetType } performed by method { conversionMethod } in { conversionOwner }.
{ constraintText }
</dd>
+ } ++ {
+ if (Template.isShadowedOrAmbiguousImplicit(mbr)) {
+ // These are the members that are shadowing or ambiguating the current implicit
+ // see ImplicitMemberShadowing trait for more information
+ val shadowingSuggestion = {
+ val params = mbr match {
+ case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
+ case _ => "" // no parameters
+ }
+ <br/> ++ xml.Text("To access this member you can use a ") ++
+ <a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
+ target="_blank">type ascription</a> ++ xml.Text(":") ++
+ <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
+ }
+
+ val shadowingWarning: NodeSeq =
+ if (Template.isShadowedImplicit(mbr))
+ xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
+ "class.") ++ shadowingSuggestion
+ else if (Template.isAmbiguousImplicit(mbr))
+ xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
+ "inherited members have similar signatures, so calling this member may produce an ambiguous " +
+ "implicit conversion compiler error.") ++ shadowingSuggestion
+ else NodeSeq.Empty
+
+ <dt class="implicit">Shadowing</dt> ++
+ <dd>{ shadowingWarning }</dd>
+
+ } else NodeSeq.Empty
}
case _ =>
NodeSeq.Empty
@@ -404,7 +464,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
val definitionClasses: Seq[scala.xml.Node] = {
val inDefTpls = mbr.inDefinitionTemplates
- if ((inDefTpls.tail.isEmpty && (inDefTpls.head == mbr.inTemplate)) || isReduced) NodeSeq.Empty
+ if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
else {
<dt>Definition Classes</dt>
<dd>{ templatesToHtml(inDefTpls, xml.Text(" → ")) }</dd>
@@ -562,17 +622,29 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
val subclasses = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.subClasses.nonEmpty =>
+ case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
<div class="toggleContainer block">
<span class="toggle">Known Subclasses</span>
<div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.subClasses.sortBy(_.name), xml.Text(", "))
+ templatesToHtml(dtpl.allSubClasses.sortBy(_.name), xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
}
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses
+ val typeHierarchy = if (s.docDiagrams.isSetByUser) mbr match {
+ case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.inheritanceDiagram.isDefined =>
+ makeDiagramHtml(dtpl, dtpl.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
+ case _ => NodeSeq.Empty
+ } else NodeSeq.Empty // diagrams not generated
+
+ val contentHierarchy = if (s.docDiagrams.isSetByUser) mbr match {
+ case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.contentDiagram.isDefined =>
+ makeDiagramHtml(dtpl, dtpl.contentDiagram, "Content Hierarchy", "content-diagram")
+ case _ => NodeSeq.Empty
+ } else NodeSeq.Empty // diagrams not generated
+
+ memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
}
def kindToString(mbr: MemberEntity): String = {
@@ -605,13 +677,13 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
Some(Paragraph(CText("private")))
case PrivateInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("private["), EntityLink(owner), CText("]")))))
+ Some(Paragraph(Chain(List(CText("private["), EntityLink(owner.qualifiedName, () => Some(owner)), CText("]")))))
case ProtectedInInstance() =>
Some(Paragraph(CText("protected[this]")))
case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
Some(Paragraph(CText("protected")))
case ProtectedInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner), CText("]")))))
+ Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner.qualifiedName, () => Some(owner)), CText("]")))))
case Public() =>
None
}
@@ -627,7 +699,15 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</span>
<span class="symbol">
{
- val nameClass = if (mbr.byConversion.isDefined) "implicit" else "name"
+ val nameClass =
+ if (Template.isImplicit(mbr))
+ if (Template.isShadowedOrAmbiguousImplicit(mbr))
+ "implicit shadowed"
+ else
+ "implicit"
+ else
+ "name"
+
val nameHtml = {
val value = if (mbr.isConstructor) tpl.name else mbr.name
val span = if (mbr.deprecation.isDefined)
@@ -650,7 +730,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<a href={nameLink}>{nameHtml}</a>
else nameHtml
}{
- def tparamsToHtml(mbr: Entity): NodeSeq = mbr match {
+ def tparamsToHtml(mbr: Any): NodeSeq = mbr match {
case hk: HigherKinded =>
val tpss = hk.typeParams
if (tpss.isEmpty) NodeSeq.Empty else {
@@ -662,7 +742,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
<span class="tparams">[{ tparams0(tpss) }]</span>
}
- case _ => NodeSeq.Empty
+ case _ => NodeSeq.Empty
}
tparamsToHtml(mbr)
}{
@@ -699,8 +779,8 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
}{ if (isReduced) NodeSeq.Empty else {
mbr match {
- case tpl: DocTemplateEntity if tpl.parentType.isDefined =>
- <span class="result"> extends { typeToHtml(tpl.parentType.get, hasLinks) }</span>
+ case tpl: DocTemplateEntity if !tpl.parentTypes.isEmpty =>
+ <span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
<span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
@@ -871,4 +951,31 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
typeToHtml(ub.upperBound, true) ++ xml.Text(")")
}
+ def makeDiagramHtml(tpl: DocTemplateEntity, diagram: Option[Diagram], description: String, id: String) = {
+ val s = universe.settings
+ val diagramSvg = generator.generate(diagram.get, tpl, this)
+ if (diagramSvg != NodeSeq.Empty) {
+ <div class="toggleContainer block diagram-container" id={ id + "-container"}>
+ <span class="toggle diagram-link">{ description }</span>
+ <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
+ <div class="diagram" id={ id }>{
+ diagramSvg
+ }</div>
+ </div>
+ } else NodeSeq.Empty
+ }
+}
+
+object Template {
+ /* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
+ * it won't be garbage collected and you'll end up filling the heap with garbage */
+
+ def isImplicit(mbr: MemberEntity) = mbr.byConversion.isDefined
+ def isShadowedImplicit(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isShadowed).getOrElse(false)).getOrElse(false)
+ def isAmbiguousImplicit(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isAmbiguous).getOrElse(false)).getOrElse(false)
+ def isShadowedOrAmbiguousImplicit(mbr: MemberEntity) = isShadowedImplicit(mbr) || isAmbiguousImplicit(mbr)
+
+ def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
new file mode 100644
index 0000000000..61c1819d11
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
@@ -0,0 +1,53 @@
+/**
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import scala.xml.NodeSeq
+import scala.tools.nsc.doc.html.HtmlPage
+import scala.tools.nsc.doc.model.diagram.Diagram
+import scala.tools.nsc.doc.model.DocTemplateEntity
+
+trait DiagramGenerator {
+
+ /**
+ * Generates a visualization of the internal representation
+ * of a diagram.
+ *
+ * @param d The model of the diagram
+ * @param p The page the diagram will be embedded in (needed for link generation)
+ * @return The HTML to be embedded in the Scaladoc page
+ */
+ def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage):NodeSeq
+}
+
+object DiagramGenerator {
+
+ // TODO: This is tailored towards the dot generator, since it's the only generator. In the future it should be more
+ // general.
+
+ private[this] var dotRunner: DotRunner = null
+ private[this] var settings: doc.Settings = null
+
+ def initialize(s: doc.Settings) =
+ settings = s
+
+ def getDotRunner() = {
+ if (dotRunner == null)
+ dotRunner = new DotRunner(settings)
+ dotRunner
+ }
+
+ def cleanup() = {
+ DiagramStats.printStats(settings)
+ if (dotRunner != null) {
+ dotRunner.cleanup()
+ dotRunner = null
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
new file mode 100644
index 0000000000..ec00cace75
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
@@ -0,0 +1,66 @@
+/**
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc.doc
+package html.page.diagram
+
+object DiagramStats {
+
+ class TimeTracker(title: String) {
+ var totalTime: Long = 0l
+ var maxTime: Long = 0l
+ var instances: Int = 0
+
+ def addTime(ms: Long) = {
+ if (maxTime < ms)
+ maxTime = ms
+ totalTime += ms
+ instances += 1
+ }
+
+ def printStats(print: String => Unit) = {
+ if (instances == 0)
+ print(title + ": no stats gathered")
+ else {
+ print(" " + title)
+ print(" " + "=" * title.length)
+ print(" count: " + instances + " items")
+ print(" total time: " + totalTime + " ms")
+ print(" average time: " + (totalTime/instances) + " ms")
+ print(" maximum time: " + maxTime + " ms")
+ print("")
+ }
+ }
+ }
+
+ private[this] val filterTrack = new TimeTracker("diagrams model filtering")
+ private[this] val modelTrack = new TimeTracker("diagrams model generation")
+ private[this] val dotGenTrack = new TimeTracker("dot diagram generation")
+ private[this] val dotRunTrack = new TimeTracker("dot process runnning")
+ private[this] val svgTrack = new TimeTracker("svg processing")
+ private[this] var brokenImages = 0
+ private[this] var fixedImages = 0
+
+ def printStats(settings: Settings) = {
+ if (settings.docDiagramsDebug.value) {
+ settings.printMsg("\nDiagram generation running time breakdown:\n")
+ filterTrack.printStats(settings.printMsg)
+ modelTrack.printStats(settings.printMsg)
+ dotGenTrack.printStats(settings.printMsg)
+ dotRunTrack.printStats(settings.printMsg)
+ svgTrack.printStats(settings.printMsg)
+ println(" Broken images: " + brokenImages)
+ println(" Fixed images: " + fixedImages)
+ println("")
+ }
+ }
+
+ def addFilterTime(ms: Long) = filterTrack.addTime(ms)
+ def addModelTime(ms: Long) = modelTrack.addTime(ms)
+ def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms)
+ def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms)
+ def addSvgTime(ms: Long) = svgTrack.addTime(ms)
+
+ def addBrokenImage(): Unit = brokenImages += 1
+ def addFixedImage(): Unit = fixedImages += 1
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
new file mode 100644
index 0000000000..dc6f941c30
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -0,0 +1,499 @@
+/**
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
+import scala.collection.immutable._
+import javax.xml.parsers.SAXParser
+import model._
+import model.diagram._
+
+class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
+
+ // the page where the diagram will be embedded
+ private var page: HtmlPage = null
+ // path to the "lib" folder relative to the page
+ private var pathToLib: String = null
+ // maps nodes to unique indices
+ private var node2Index: Map[Node, Int] = null
+ // maps an index to its corresponding node
+ private var index2Node: Map[Int, Node] = null
+ // true if the current diagram is a class diagram
+ private var isClassDiagram = false
+ // incoming implicit nodes (needed for determining the CSS class of a node)
+ private var incomingImplicitNodes: List[Node] = List()
+ // the suffix used when there are two many classes to show
+ private final val MultiSuffix = " classes/traits"
+ // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams)
+ private var counter = 0
+
+ def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
+ counter = counter + 1;
+ this.page = page
+ pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
+ val dot = generateDot(diagram)
+ val result = generateSVG(dot, template)
+ // clean things up a bit, so we don't leave garbage on the heap
+ this.page = null
+ node2Index = null
+ index2Node = null
+ incomingImplicitNodes = List()
+ result
+ }
+
+ /**
+ * Generates a dot string for a given diagram.
+ */
+ private def generateDot(d: Diagram) = {
+ // inheritance nodes (all nodes except thisNode and implicit nodes)
+ var nodes: List[Node] = null
+ // inheritance edges (all edges except implicit edges)
+ var edges: List[(Node, List[Node])] = null
+
+ // timing
+ var tDot = -System.currentTimeMillis
+
+ // variables specific to class diagrams:
+ // current node of a class diagram
+ var thisNode:Node = null
+ var subClasses = List[Node]()
+ var superClasses = List[Node]()
+ var incomingImplicits = List[Node]()
+ var outgoingImplicits = List[Node]()
+ isClassDiagram = false
+
+ d match {
+ case ClassDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) =>
+
+ def textTypeEntity(text: String) =
+ new TypeEntity {
+ val name = text
+ def refEntity: SortedMap[Int, (TemplateEntity, Int)] = SortedMap()
+ }
+
+ // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string
+ // conservatively, we'll limit at 4000, to be sure:
+ def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str
+
+ // avoid overcrowding the diagram:
+ // if there are too many super / sub / implicit nodes, represent
+ // them by on node with a corresponding tooltip
+ superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) {
+ val superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", ")))
+ List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None, superClassesTooltip))
+ } else _superClasses
+
+ subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) {
+ val subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", ")))
+ List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None, subClassesTooltip))
+ } else _subClasses
+
+ incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
+ val incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", ")))
+ List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None, incomingImplicitsTooltip))
+ } else _incomingImplicits
+
+ outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
+ val outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", ")))
+ List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None, outgoingImplicitsTooltip))
+ } else _outgoingImplicits
+
+ thisNode = _thisNode
+ nodes = List()
+ edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode))
+ node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap
+ isClassDiagram = true
+ incomingImplicitNodes = incomingImplicits
+ case _ =>
+ nodes = d.nodes
+ edges = d.edges
+ node2Index = d.nodes.zipWithIndex.toMap
+ incomingImplicitNodes = List()
+ }
+ index2Node = node2Index map {_.swap}
+
+ val implicitsDot = {
+ if (!isClassDiagram) ""
+ else {
+ // dot cluster containing thisNode
+ val thisCluster = "subgraph clusterThis {\n" +
+ "style=\"invis\"\n" +
+ node2Dot(thisNode) +
+ "}"
+ // dot cluster containing incoming implicit nodes, if any
+ val incomingCluster = {
+ if(incomingImplicits.isEmpty) ""
+ else "subgraph clusterIncoming {\n" +
+ "style=\"invis\"\n" +
+ incomingImplicits.reverse.map(n => node2Dot(n)).mkString +
+ (if (incomingImplicits.size > 1)
+ incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
+ " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
+ else "") +
+ "}"
+ }
+ // dot cluster containing outgoing implicit nodes, if any
+ val outgoingCluster = {
+ if(outgoingImplicits.isEmpty) ""
+ else "subgraph clusterOutgoing {\n" +
+ "style=\"invis\"\n" +
+ outgoingImplicits.reverse.map(n => node2Dot(n)).mkString +
+ (if (outgoingImplicits.size > 1)
+ outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
+ " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
+ else "") +
+ "}"
+ }
+
+ // assemble clusters into another cluster
+ val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name
+ val outgoingTooltip = thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ")
+ "subgraph clusterAll {\n" +
+ "style=\"invis\"\n" +
+ outgoingCluster + "\n" +
+ thisCluster + "\n" +
+ incomingCluster + "\n" +
+ // incoming implicit edge
+ (if (!incomingImplicits.isEmpty) {
+ val n = incomingImplicits.last
+ "node" + node2Index(n) +" -> node" + node2Index(thisNode) +
+ " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" +
+ ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n"
+ } else "") +
+ // outgoing implicit edge
+ (if (!outgoingImplicits.isEmpty) {
+ val n = outgoingImplicits.head
+ "node" + node2Index(thisNode) + " -> node" + node2Index(n) +
+ " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" +
+ ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n"
+ } else "") +
+ "}"
+ }
+ }
+
+ // assemble graph
+ val graph = "digraph G {\n" +
+ // graph / node / edge attributes
+ graphAttributesStr +
+ "node [" + nodeAttributesStr + "];\n" +
+ "edge [" + edgeAttributesStr + "];\n" +
+ implicitsDot + "\n" +
+ // inheritance nodes
+ nodes.map(n => node2Dot(n)).mkString +
+ subClasses.map(n => node2Dot(n)).mkString +
+ superClasses.map(n => node2Dot(n)).mkString +
+ // inheritance edges
+ edges.map{ case (from, tos) => tos.map(to => {
+ val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from)
+ // the X -> Y edge is inverted twice to keep the diagram flowing the right way
+ // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"]
+ "node" + node2Index(to) + " -> node" + node2Index(from) +
+ " [id=\"" + cssClass(to, from) + "|" + id + "\", " +
+ "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") +
+ to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n"
+ }).mkString}.mkString +
+ "}"
+
+ tDot += System.currentTimeMillis
+ DiagramStats.addDotGenerationTime(tDot)
+
+ graph
+ }
+
+ /**
+ * Generates the dot string of a given node.
+ */
+ private def node2Dot(node: Node) = {
+
+ // escape HTML characters in node names
+ def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;");
+
+ // assemble node attribues in a map
+ var attr = scala.collection.mutable.Map[String, String]()
+
+ // link
+ node.doctpl match {
+ case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram")
+ case _ =>
+ }
+
+ // tooltip
+ node.tooltip match {
+ case Some(text) => attr += "tooltip" -> text
+ // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A])
+ case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName
+ case _ =>
+ }
+
+ // styles
+ if(node.isImplicitNode)
+ attr ++= implicitStyle
+ else if(node.isOutsideNode)
+ attr ++= outsideStyle
+ else if(node.isTraitNode)
+ attr ++= traitStyle
+ else if(node.isClassNode)
+ attr ++= classStyle
+ else if(node.isObjectNode)
+ attr ++= objectStyle
+ else
+ attr ++= defaultStyle
+
+ // HTML label
+ var name = escape(node.name)
+ var img = ""
+ if(node.isTraitNode)
+ img = "trait_diagram.png"
+ else if(node.isClassNode)
+ img = "class_diagram.png"
+ else if(node.isObjectNode)
+ img = "object_diagram.png"
+
+ if(!img.equals("")) {
+ img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
+ name = name + " "
+ }
+ val label = "<<TABLE BORDER=\"0\" CELLBORDER=\"0\">" +
+ "<TR>" + img + "<TD VALIGN=\"MIDDLE\">" + name + "</TD></TR>" +
+ "</TABLE>>"
+
+ // dot does not allow to specify a CSS class, therefore
+ // set the id to "{class}|{id}", which will be used in
+ // the transform method
+ val id = "graph" + counter + "_" + node2Index(node)
+ attr += ("id" -> (cssClass(node) + "|" + id))
+
+ // return dot string
+ "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n"
+ }
+
+ /**
+ * Returns the CSS class for an edge connecting node1 and node2.
+ */
+ private def cssClass(node1: Node, node2: Node): String = {
+ if (node1.isImplicitNode && node2.isThisNode)
+ "implicit-incoming"
+ else if (node1.isThisNode && node2.isImplicitNode)
+ "implicit-outgoing"
+ else
+ "inheritance"
+ }
+
+ /**
+ * Returns the CSS class for a node.
+ */
+ private def cssClass(node: Node): String =
+ if (node.isImplicitNode && incomingImplicitNodes.contains(node))
+ "implicit-incoming" + cssBaseClass(node, "", " ")
+ else if (node.isImplicitNode)
+ "implicit-outgoing" + cssBaseClass(node, "", " ")
+ else if (node.isThisNode)
+ "this" + cssBaseClass(node, "", " ")
+ else if (node.isOutsideNode)
+ "outside" + cssBaseClass(node, "", " ")
+ else
+ cssBaseClass(node, "default", "")
+
+ private def cssBaseClass(node: Node, default: String, space: String) =
+ if (node.isClassNode)
+ space + "class"
+ else if (node.isTraitNode)
+ space + "trait"
+ else if (node.isObjectNode)
+ space + "object"
+ else
+ default
+
+ /**
+ * Calls dot with a given dot string and returns the SVG output.
+ */
+ private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
+ val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
+ var tSVG = -System.currentTimeMillis
+
+ val result = if (dotOutput != null) {
+ val src = scala.io.Source.fromString(dotOutput);
+ try {
+ val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
+ val doc = cpa.document()
+ if (doc != null)
+ transform(doc.docElem)
+ else
+ NodeSeq.Empty
+ } catch {
+ case exc =>
+ if (settings.docDiagramsDebug.value) {
+ settings.printMsg("\n\n**********************************************************************")
+ settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
+ settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
+ settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t",""))
+ settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ",""))
+ settings.printMsg("\n\n**********************************************************************")
+ } else {
+ settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.")
+ settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.")
+ }
+ NodeSeq.Empty
+ }
+ } else
+ NodeSeq.Empty
+
+ tSVG += System.currentTimeMillis
+ DiagramStats.addSvgTime(tSVG)
+
+ result
+ }
+
+ /**
+ * Transforms the SVG generated by dot:
+ * - adds a class attribute to the SVG element
+ * - changes the path of the node images from absolute to relative
+ * - assigns id and class attributes to nodes and edges
+ * - removes title elements
+ */
+ private def transform(e:scala.xml.Node): scala.xml.Node = e match {
+ // add an id and class attribute to the SVG element
+ case Elem(prefix, "svg", attribs, scope, child @ _*) => {
+ val klass = if (isClassDiagram) "class-diagram" else "package-diagram"
+ Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
+ new UnprefixedAttribute("id", "graph" + counter, Null) %
+ new UnprefixedAttribute("class", klass, Null)
+ }
+ // change the path of the node images from absolute to relative
+ case img @  => {
+ val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString
+ val file = href.substring(href.lastIndexOf("/") + 1, href.size)
+ img.asInstanceOf[Elem] %
+ new PrefixedAttribute("xlink", "href", pathToLib + file, Null)
+ }
+ // assign id and class attributes to edges and nodes:
+ // the id attribute generated by dot has the format: "{class}|{id}"
+ case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
+ var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
+ val dotId = (g \ "@id").toString
+ if (dotId.count(_ == '|') == 1) {
+ val Array(klass, id) = dotId.toString.split("\\|")
+ /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple
+ * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
+ * back in the node */
+ val kind = getKind(klass)
+ if (kind != "")
+ if (((g \ "a" \ "image").isEmpty)) {
+ DiagramStats.addBrokenImage()
+ val xposition = getPosition(g, "x", -22)
+ val yposition = getPosition(g, "y", -11.3334)
+ if (xposition.isDefined && yposition.isDefined) {
+ val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
+ val anchorNode = (g \ "a") match {
+ case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
+ transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
+ case _ =>
+ g \ "a"
+ }
+ res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
+ DiagramStats.addFixedImage()
+ }
+ }
+ res % new UnprefixedAttribute("id", id, Null) %
+ new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null)
+ }
+ else res
+ }
+ // remove titles
+ case <title>{ _* }</title> =>
+ scala.xml.Text("")
+ // apply recursively
+ case Elem(prefix, label, attribs, scope, child @ _*) =>
+ Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
+ case x => x
+ }
+
+ def getKind(klass: String): String =
+ if (klass.contains("class")) "class"
+ else if (klass.contains("trait")) "trait"
+ else if (klass.contains("object")) "object"
+ else ""
+
+ def getPosition(g: xml.Node, axis: String, offset: Double): Option[Double] = {
+ val node = g \ "a" \ "text" \ ("@" + axis)
+ if (node.isEmpty)
+ None
+ else
+ Some(node.toString.toDouble + offset)
+ }
+
+ /* graph / node / edge attributes */
+
+ private val graphAttributes: Map[String, String] = Map(
+ "compound" -> "true",
+ "rankdir" -> "TB"
+ )
+
+ private val nodeAttributes = Map(
+ "shape" -> "rectangle",
+ "style" -> "filled",
+ "penwidth" -> "1",
+ "margin" -> "0.08,0.01",
+ "width" -> "0.0",
+ "height" -> "0.0",
+ "fontname" -> "Arial",
+ "fontsize" -> "10.00"
+ )
+
+ private val edgeAttributes = Map(
+ "color" -> "#d4d4d4",
+ "arrowsize" -> "0.5",
+ "fontcolor" -> "#aaaaaa",
+ "fontsize" -> "10.00",
+ "fontname" -> "Arial"
+ )
+
+ private val defaultStyle = Map(
+ "color" -> "#ababab",
+ "fillcolor" -> "#e1e1e1",
+ "fontcolor" -> "#7d7d7d",
+ "margin" -> "0.1,0.04"
+ )
+
+ private val implicitStyle = Map(
+ "color" -> "#ababab",
+ "fillcolor" -> "#e1e1e1",
+ "fontcolor" -> "#7d7d7d"
+ )
+
+ private val outsideStyle = Map(
+ "color" -> "#ababab",
+ "fillcolor" -> "#e1e1e1",
+ "fontcolor" -> "#7d7d7d"
+ )
+
+ private val traitStyle = Map(
+ "color" -> "#37657D",
+ "fillcolor" -> "#498AAD",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private val classStyle = Map(
+ "color" -> "#115F3B",
+ "fillcolor" -> "#0A955B",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private val objectStyle = Map(
+ "color" -> "#102966",
+ "fillcolor" -> "#3556a7",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ")
+
+ private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
+ private val nodeAttributesStr = flatten(nodeAttributes)
+ private val edgeAttributesStr = flatten(edgeAttributes)
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
new file mode 100644
index 0000000000..37600fa908
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -0,0 +1,227 @@
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import java.io.InputStream
+import java.io.OutputStream
+import java.io.InputStreamReader
+import java.io.OutputStreamWriter
+import java.io.BufferedWriter
+import java.io.BufferedReader
+import java.io.IOException
+import scala.sys.process._
+import scala.concurrent.SyncVar
+
+import model._
+import model.diagram._
+
+/** This class takes care of running the graphviz dot utility */
+class DotRunner(settings: doc.Settings) {
+
+ private[this] var dotRestarts = 0
+ private[this] var dotProcess: DotProcess = null
+
+ def feedToDot(dotInput: String, template: DocTemplateEntity): String = {
+
+ if (dotProcess == null) {
+ if (dotRestarts < settings.docDiagramsDotRestart.value) {
+ if (dotRestarts != 0)
+ settings.printMsg("A new graphviz dot process will be created...\n")
+ dotRestarts += 1
+ dotProcess = new DotProcess(settings)
+ } else
+ return null
+ }
+
+ val tStart = System.currentTimeMillis
+ val result = dotProcess.feedToDot(dotInput, template.qualifiedName)
+ val tFinish = System.currentTimeMillis
+ DiagramStats.addDotRunningTime(tFinish - tStart)
+
+ if (result == null) {
+ dotProcess.cleanup()
+ dotProcess = null
+ if (dotRestarts == settings.docDiagramsDotRestart.value) {
+ settings.printMsg("\n")
+ settings.printMsg("**********************************************************************")
+ settings.printMsg("Diagrams will be disabled for this run beucause the graphviz dot tool")
+ settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:")
+ settings.printMsg("")
+ val baseList = List(settings.docDiagramsDebug,
+ settings.docDiagramsDotPath,
+ settings.docDiagramsDotRestart,
+ settings.docDiagramsDotTimeout)
+ val width = (baseList map (_.helpSyntax.length)).max
+ def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + " " + s.helpDescription
+ baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett)))
+ settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.")
+ settings.printMsg("**********************************************************************\n\n")
+
+ }
+ }
+
+ result
+ }
+
+ def cleanup() =
+ if (dotProcess != null)
+ dotProcess.cleanup()
+}
+
+class DotProcess(settings: doc.Settings) {
+
+ @volatile var error: Boolean = false // signal an error
+ val inputString = new SyncVar[String] // used for the dot process input
+ val outputString = new SyncVar[String] // used for the dot process output
+ val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging
+
+ // set in only one place, in the main thread
+ var process: Process = null
+ var templateName: String = ""
+ var templateInput: String = ""
+
+ def feedToDot(input: String, template: String): String = {
+
+ templateName = template
+ templateInput = input
+
+ try {
+
+ // process creation
+ if (process == null) {
+ val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_))
+ val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg")
+ process = processBuilder.run(procIO)
+ }
+
+ // pass the input and wait for the output
+ assert(!inputString.isSet)
+ assert(!outputString.isSet)
+ inputString.put(input)
+ var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
+ if (error) result = null
+
+ result
+
+ } catch {
+ case exc =>
+ errorBuffer.append(" Main thread in " + templateName + ": " +
+ (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc))
+ error = true
+ return null
+ }
+ }
+
+ def cleanup(): Unit = {
+
+ // we'll need to know if there was any error for reporting
+ val _error = error
+
+ if (process != null) {
+ // if there's no error, this should exit cleanly
+ if (!error) feedToDot("<finish>", "<finishing>")
+
+ // just in case there's any thread hanging, this will take it out of the loop
+ error = true
+ process.destroy()
+ // we'll need to unblock the input again
+ if (!inputString.isSet) inputString.put("")
+ if (outputString.isSet) outputString.take()
+ }
+
+ if (_error) {
+ if (settings.docDiagramsDebug.value) {
+ settings.printMsg("\n**********************************************************************")
+ settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.")
+ settings.printMsg("\nThe following is the log of the failure:")
+ settings.printMsg(errorBuffer.toString)
+ settings.printMsg(" Cleanup: Last template: " + templateName)
+ settings.printMsg(" Cleanup: Last dot input: \n " + templateInput.replaceAll("\n","\n ") + "\n")
+ settings.printMsg(" Cleanup: Dot path: " + settings.docDiagramsDotPath.value)
+ if (process != null)
+ settings.printMsg(" Cleanup: Dot exit code: " + process.exitValue)
+ settings.printMsg("**********************************************************************")
+ } else {
+ // we shouldn't just sit there for 50s not reporting anything, no?
+ settings.printMsg("Graphviz dot encountered an error when generating the diagram for")
+ settings.printMsg(templateName + ". Use the " + settings.docDiagramsDebug.name + " flag")
+ settings.printMsg("for more information.")
+ }
+ }
+ }
+
+ /* The standard input passing function */
+ private[this] def inputFn(stdin: OutputStream): Unit = {
+ val writer = new BufferedWriter(new OutputStreamWriter(stdin))
+ try {
+ var input = inputString.take()
+
+ while (!error) {
+ if (input == "<finish>") {
+ // empty => signal to finish
+ stdin.close()
+ return
+ } else {
+ // send output to dot
+ writer.write(input + "\n\n")
+ writer.flush()
+ }
+
+ if (!error) input = inputString.take()
+ }
+ stdin.close()
+ } catch {
+ case exc =>
+ error = true
+ stdin.close()
+ errorBuffer.append(" Input thread in " + templateName + ": Exception: " + exc + "\n")
+ }
+ }
+
+ private[this] def outputFn(stdOut: InputStream): Unit = {
+ val reader = new BufferedReader(new InputStreamReader(stdOut))
+ var buffer: StringBuilder = new StringBuilder()
+ try {
+ var line = reader.readLine
+ while (!error && line != null) {
+ buffer.append(line + "\n")
+ // signal the last element in the svg (only for output)
+ if (line == "</svg>") {
+ outputString.put(buffer.toString)
+ buffer.setLength(0)
+ }
+ if (error) { stdOut.close(); return }
+ line = reader.readLine
+ }
+ assert(!outputString.isSet)
+ outputString.put(buffer.toString)
+ stdOut.close()
+ } catch {
+ case exc =>
+ error = true
+ stdOut.close()
+ errorBuffer.append(" Output thread in " + templateName + ": Exception: " + exc + "\n")
+ }
+ }
+
+ private[this] def errorFn(stdErr: InputStream): Unit = {
+ val reader = new BufferedReader(new InputStreamReader(stdErr))
+ var buffer: StringBuilder = new StringBuilder()
+ try {
+ var line = reader.readLine
+ while (line != null) {
+ errorBuffer.append(" DOT <error console>: " + line + "\n")
+ error = true
+ line = reader.readLine
+ }
+ stdErr.close()
+ } catch {
+ case exc =>
+ error = true
+ stdErr.close()
+ errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n")
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
new file mode 100644
index 0000000000..9d7aec792b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
new file mode 100644
index 0000000000..04d29580b7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
@@ -0,0 +1,135 @@
+.diagram-container
+{
+ display: none;
+}
+
+.diagram
+{
+ overflow: hidden;
+ padding-top:15px;
+}
+
+.diagram svg
+{
+ display: block;
+ position: absolute;
+ visibility: hidden;
+ margin: auto;
+}
+
+.diagram-help
+{
+ float:right;
+ display:none;
+}
+
+.magnifying
+{
+ cursor: -webkit-zoom-in ! important;
+ cursor: -moz-zoom-in ! important;
+ cursor: pointer;
+}
+
+#close-link
+{
+ position: absolute;
+ z-index: 100;
+ font-family: Arial, sans-serif;
+ font-size: 10pt;
+ text-decoration: underline;
+ color: #315479;
+}
+
+#close:hover
+{
+ text-decoration: none;
+}
+
+svg a
+{
+ cursor:pointer;
+}
+
+svg text
+{
+ font-size: 10px;
+}
+
+/* try to move the node text 1px in order to be vertically
+ centered (does not work in all browsers) */
+svg .node text
+{
+ transform: translate(0px,1px);
+ -ms-transform: translate(0px,1px);
+ -webkit-transform: translate(0px,1px);
+ -o-transform: translate(0px,1px);
+ -moz-transform: translate(0px,1px);
+}
+
+/* hover effect for edges */
+
+svg .edge.over text,
+svg .edge.implicit-incoming.over polygon,
+svg .edge.implicit-outgoing.over polygon
+{
+ fill: #202020;
+}
+
+svg .edge.over path,
+svg .edge.over polygon
+{
+ stroke: #202020;
+}
+
+/* hover effect for nodes in class diagrams */
+
+svg.class-diagram .node
+{
+ opacity: 0.75;
+}
+
+svg.class-diagram .node.this
+{
+ opacity: 1.0;
+}
+
+svg.class-diagram .node.over
+{
+ opacity: 1.0;
+}
+
+svg .node.over polygon
+{
+ stroke: #202020;
+}
+
+/* hover effect for nodes in package diagrams */
+
+svg.package-diagram .node.class.over polygon,
+svg.class-diagram .node.this.class.over polygon
+{
+ fill: #098552;
+ fill: #04663e;
+}
+
+svg.package-diagram .node.trait.over polygon,
+svg.class-diagram .node.this.trait.over polygon
+{
+ fill: #3c7b9b;
+ fill: #235d7b;
+}
+
+svg.package-diagram .node.object.over polygon
+{
+ fill: #183377;
+}
+
+svg.package-diagram .node.outside.over polygon
+{
+ fill: #d4d4d4;
+}
+
+svg.package-diagram .node.default.over polygon
+{
+ fill: #d4d4d4;
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
new file mode 100644
index 0000000000..478f2e38ac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
@@ -0,0 +1,324 @@
+/**
+ * JavaScript functions enhancing the SVG diagrams.
+ *
+ * @author Damien Obrist
+ */
+
+var diagrams = {};
+
+/**
+ * Initializes the diagrams in the main window.
+ */
+$(document).ready(function()
+{
+ // hide diagrams in browsers not supporting SVG
+ if(Modernizr && !Modernizr.inlinesvg)
+ return;
+
+ // only execute this in the main window
+ if(diagrams.isPopup)
+ return;
+
+ if($("#content-diagram").length)
+ $("#inheritance-diagram").css("padding-bottom", "20px");
+
+ $(".diagram-container").css("display", "block");
+
+ $(".diagram").each(function() {
+ // store inital dimensions
+ $(this).data("width", $("svg", $(this)).width());
+ $(this).data("height", $("svg", $(this)).height());
+ // store unscaled clone of SVG element
+ $(this).data("svg", $(this).get(0).childNodes[0].cloneNode(true));
+ });
+
+ // make diagram visible, hide container
+ $(".diagram").css("display", "none");
+ $(".diagram svg").css({
+ "position": "static",
+ "visibility": "visible",
+ "z-index": "auto"
+ });
+
+ // enable linking to diagrams
+ if($(location).attr("hash") == "#inheritance-diagram") {
+ diagrams.toggle($("#inheritance-diagram-container"), true);
+ } else if($(location).attr("hash") == "#content-diagram") {
+ diagrams.toggle($("#content-diagram-container"), true);
+ }
+
+ $(".diagram-link").click(function() {
+ diagrams.toggle($(this).parent());
+ });
+
+ // register resize function
+ $(window).resize(diagrams.resize);
+
+ // don't bubble event to parent div
+ // when clicking on a node of a resized
+ // diagram
+ $("svg a").click(function(e) {
+ e.stopPropagation();
+ });
+
+ diagrams.initHighlighting();
+});
+
+/**
+ * Initializes the diagrams in the popup.
+ */
+diagrams.initPopup = function(id)
+{
+ // copy diagram from main window
+ if(!jQuery.browser.msie)
+ $("body").append(opener.$("#" + id).data("svg"));
+
+ // positioning
+ $("svg").css("position", "absolute");
+ $(window).resize(function()
+ {
+ var svg_w = $("svg").css("width").replace("px", "");
+ var svg_h = $("svg").css("height").replace("px", "");
+ var x = $(window).width() / 2 - svg_w / 2;
+ if(x < 0) x = 0;
+ var y = $(window).height() / 2 - svg_h / 2;
+ if(y < 0) y = 0;
+ $("svg").css("left", x + "px");
+ $("svg").css("top", y + "px");
+ });
+ $(window).resize();
+
+ diagrams.initHighlighting();
+ $("svg a").click(function(e) {
+ opener.diagrams.redirectFromPopup(this.href.baseVal);
+ window.close();
+ });
+ $(document).keyup(function(e) {
+ if (e.keyCode == 27) window.close();
+ });
+}
+
+/**
+ * Initializes highlighting for nodes and edges.
+ */
+diagrams.initHighlighting = function()
+{
+ // helper function since $.hover doesn't work in IE
+
+ function hover(elements, fn)
+ {
+ elements.mouseover(fn);
+ elements.mouseout(fn);
+ }
+
+ // inheritance edges
+
+ hover($("svg .edge.inheritance"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ var parts = $(this).attr("id").split("_");
+ toggleClass($("#" + parts[0] + "_" + parts[1]));
+ toggleClass($("#" + parts[0] + "_" + parts[2]));
+ toggleClass($(this));
+ });
+
+ // nodes
+
+ hover($("svg .node"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ var parts = $(this).attr("id").split("_");
+ var index = parts[1];
+ $("svg#" + parts[0] + " .edge.inheritance").each(function(){
+ var parts2 = $(this).attr("id").split("_");
+ if(parts2[1] == index)
+ {
+ toggleClass($("#" + parts2[0] + "_" + parts2[2]));
+ toggleClass($(this));
+ } else if(parts2[2] == index)
+ {
+ toggleClass($("#" + parts2[0] + "_" + parts2[1]));
+ toggleClass($(this));
+ }
+ });
+ });
+
+ // incoming implicits
+
+ hover($("svg .node.implicit-incoming"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .edge.implicit-incoming"));
+ toggleClass($("svg .node.this"));
+ });
+
+ hover($("svg .edge.implicit-incoming"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .node.this"));
+ $("svg .node.implicit-incoming").each(function(){
+ toggleClass($(this));
+ });
+ });
+
+ // implicit outgoing nodes
+
+ hover($("svg .node.implicit-outgoing"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .edge.implicit-outgoing"));
+ toggleClass($("svg .node.this"));
+ });
+
+ hover($("svg .edge.implicit-outgoing"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .node.this"));
+ $("svg .node.implicit-outgoing").each(function(){
+ toggleClass($(this));
+ });
+ });
+};
+
+/**
+ * Resizes the diagrams according to the available width.
+ */
+diagrams.resize = function()
+{
+ // available width
+ var availableWidth = $("body").width() - 20;
+
+ $(".diagram-container").each(function() {
+ // unregister click event on whole div
+ $(".diagram", this).unbind("click");
+ var diagramWidth = $(".diagram", this).data("width");
+ var diagramHeight = $(".diagram", this).data("height");
+
+ if(diagramWidth > availableWidth)
+ {
+ // resize diagram
+ var height = diagramHeight / diagramWidth * availableWidth;
+ $(".diagram svg", this).width(availableWidth);
+ $(".diagram svg", this).height(height);
+
+ // register click event on whole div
+ $(".diagram", this).click(function() {
+ diagrams.popup($(this));
+ });
+ $(".diagram", this).addClass("magnifying");
+ }
+ else
+ {
+ // restore full size of diagram
+ $(".diagram svg", this).width(diagramWidth);
+ $(".diagram svg", this).height(diagramHeight);
+ // don't show custom cursor any more
+ $(".diagram", this).removeClass("magnifying");
+ }
+ });
+};
+
+/**
+ * Shows or hides a diagram depending on its current state.
+ */
+diagrams.toggle = function(container, dontAnimate)
+{
+ // change class of link
+ $(".diagram-link", container).toggleClass("open");
+ // get element to show / hide
+ var div = $(".diagram", container);
+ if (div.is(':visible'))
+ {
+ $(".diagram-help", container).hide();
+ div.unbind("click");
+ div.removeClass("magnifying");
+ div.slideUp(100);
+ }
+ else
+ {
+ diagrams.resize();
+ if(dontAnimate)
+ div.show();
+ else
+ div.slideDown(100);
+ $(".diagram-help", container).show();
+ }
+};
+
+/**
+ * Opens a popup containing a copy of a diagram.
+ */
+diagrams.windows = {};
+diagrams.popup = function(diagram)
+{
+ var id = diagram.attr("id");
+ if(!diagrams.windows[id] || diagrams.windows[id].closed) {
+ var title = $(".symbol .name", $("#signature")).text();
+ // cloning from parent window to popup somehow doesn't work in IE
+ // therefore include the SVG as a string into the HTML
+ var svgIE = jQuery.browser.msie ? $("<div />").append(diagram.data("svg")).html() : "";
+ var html = '' +
+ '<?xml version="1.0" encoding="UTF-8"?>\n' +
+ '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' +
+ '<html>\n' +
+ ' <head>\n' +
+ ' <title>' + title + '</title>\n' +
+ ' <link href="' + $("#diagrams-css").attr("href") + '" media="screen" type="text/css" rel="stylesheet" />\n' +
+ ' <script type="text/javascript" src="' + $("#jquery-js").attr("src") + '"></script>\n' +
+ ' <script type="text/javascript" src="' + $("#diagrams-js").attr("src") + '"></script>\n' +
+ ' <script type="text/javascript">\n' +
+ ' diagrams.isPopup = true;\n' +
+ ' </script>\n' +
+ ' </head>\n' +
+ ' <body onload="diagrams.initPopup(\'' + id + '\');">\n' +
+ ' <a href="#" onclick="window.close();" id="close-link">Close this window</a>\n' +
+ ' ' + svgIE + '\n' +
+ ' </body>\n' +
+ '</html>';
+
+ var padding = 30;
+ var screenHeight = screen.availHeight;
+ var screenWidth = screen.availWidth;
+ var w = Math.min(screenWidth, diagram.data("width") + 2 * padding);
+ var h = Math.min(screenHeight, diagram.data("height") + 2 * padding);
+ var left = (screenWidth - w) / 2;
+ var top = (screenHeight - h) / 2;
+ var parameters = "height=" + h + ", width=" + w + ", left=" + left + ", top=" + top + ", scrollbars=yes, location=no, resizable=yes";
+ var win = window.open("about:blank", "_blank", parameters);
+ win.document.open();
+ win.document.write(html);
+ win.document.close();
+ diagrams.windows[id] = win;
+ }
+ win.focus();
+};
+
+/**
+ * This method is called from within the popup when a node is clicked.
+ */
+diagrams.redirectFromPopup = function(url)
+{
+ window.location = url;
+};
+
+/**
+ * Helper method that adds a class to a SVG element.
+ */
+diagrams.addClass = function(svgElem, newClass) {
+ newClass = newClass || "over";
+ var classes = svgElem.attr("class");
+ if ($.inArray(newClass, classes.split(/\s+/)) == -1) {
+ classes += (classes ? ' ' : '') + newClass;
+ svgElem.attr("class", classes);
+ }
+};
+
+/**
+ * Helper method that removes a class from a SVG element.
+ */
+diagrams.removeClass = function(svgElem, oldClass) {
+ oldClass = oldClass || "over";
+ var classes = svgElem.attr("class");
+ classes = $.grep(classes.split(/\s+/), function(n, i) { return n != oldClass; }).join(' ');
+ svgElem.attr("class", classes);
+};
+
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
new file mode 100644
index 0000000000..4688d633fe
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
@@ -0,0 +1,4 @@
+/* Modernizr 2.5.3 (Custom Build) | MIT & BSD
+ * Build: http://www.modernizr.com/download/#-inlinesvg
+ */
+;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnProperty,t;!w(s,"undefined")&&!w(s.call,"undefined")?t=function(a,b){return s.call(a,b)}:t=function(a,b){return b in a&&w(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=q.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(q.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(q.call(arguments)))};return e}),m.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="<svg/>",(a.firstChild&&a.firstChild.namespaceURI)==l.svg};for(var z in m)t(m,z)&&(r=z.toLowerCase(),e[r]=m[z](),p.push((e[r]?"":"no-")+r));return u(""),h=j=null,e._version=d,e}(this,this.document); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
new file mode 100644
index 0000000000..6e9f2f743f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
new file mode 100644
index 0000000000..d30dbad858
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
@@ -0,0 +1,10 @@
+// ┌────────────────────────────────────────────────────────────────────┐ \\
+// │ Raphaël 2.1.0 - JavaScript Vector Library │ \\
+// ├────────────────────────────────────────────────────────────────────┤ \\
+// │ Copyright © 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) │ \\
+// │ Copyright © 2008-2012 Sencha Labs (http://sencha.com) │ \\
+// ├────────────────────────────────────────────────────────────────────┤ \\
+// │ Licensed under the MIT (http://raphaeljs.com/license.html) license.│ \\
+// └────────────────────────────────────────────────────────────────────┘ \\
+
+(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;t<u;t++)"zIndex"in f[t]&&(o.push(f[t].zIndex),f[t].zIndex<0&&(p[f[t].zIndex]=f[t]));o.sort(g);while(o[l]<0){n=p[o[l++]],q.push(n.apply(b,e));if(i){i=d;return q}}for(t=0;t<u;t++){n=f[t];if("zIndex"in n)if(n.zIndex==o[l]){q.push(n.apply(b,e));if(i)break;do{l++,n=p[o[l]],n&&q.push(n.apply(b,e));if(i)break}while(n)}else p[n.zIndex]=n;else{q.push(n.apply(b,e));if(i)break}}i=d,h=r;return q.length?q:null};k.listeners=function(a){var b=a.split(d),c=j,f,g,h,i,k,l,m,n,o=[c],p=[];for(i=0,k=b.length;i<k;i++){n=[];for(l=0,m=o.length;l<m;l++){c=o[l].n,g=[c[b[i]],c[e]],h=2;while(h--)f=g[h],f&&(n.push(f),p=p.concat(f.f||[]))}o=n}return p},k.on=function(a,b){var c=a.split(d),e=j;for(var g=0,h=c.length;g<h;g++)e=e.n,!e[c[g]]&&(e[c[g]]={n:{}}),e=e[c[g]];e.f=e.f||[];for(g=0,h=e.f.length;g<h;g++)if(e.f[g]==b)return f;e.f.push(b);return function(a){+a==+a&&(b.zIndex=+a)}},k.stop=function(){i=1},k.nt=function(a){if(a)return(new RegExp("(?:\\.|\\/|^)"+a+"(?:\\.|\\/|$)")).test(h);return h},k.off=k.unbind=function(a,b){var f=a.split(d),g,h,i,k,l,m,n,o=[j];for(k=0,l=f.length;k<l;k++)for(m=0;m<o.length;m+=i.length-2){i=[m,1],g=o[m].n;if(f[k]!=e)g[f[k]]&&i.push(g[f[k]]);else for(h in g)g[c](h)&&i.push(g[h]);o.splice.apply(o,i)}for(k=0,l=o.length;k<l;k++){g=o[k];while(g.n){if(b){if(g.f){for(m=0,n=g.f.length;m<n;m++)if(g.f[m]==b){g.f.splice(m,1);break}!g.f.length&&delete g.f}for(h in g.n)if(g.n[c](h)&&g.n[h].f){var p=g.n[h].f;for(m=0,n=p.length;m<n;m++)if(p[m]==b){p.splice(m,1);break}!p.length&&delete g.n[h].f}}else{delete g.f;for(h in g.n)g.n[c](h)&&g.n[h].f&&delete g.n[h].f}g=g.n}}},k.once=function(a,b){var c=function(){var d=b.apply(this,arguments);k.unbind(a,c);return d};return k.on(a,c)},k.version=b,k.toString=function(){return"You are running Eve "+b},typeof module!="undefined"&&module.exports?module.exports=k:typeof define!="undefined"?define("eve",[],function(){return k}):a.eve=k})(this),function(){function cF(a){for(var b=0;b<cy.length;b++)cy[b].el.paper==a&&cy.splice(b--,1)}function cE(b,d,e,f,h,i){e=Q(e);var j,k,l,m=[],o,p,q,t=b.ms,u={},v={},w={};if(f)for(y=0,z=cy.length;y<z;y++){var x=cy[y];if(x.el.id==d.id&&x.anim==b){x.percent!=e?(cy.splice(y,1),l=1):k=x,d.attr(x.totalOrigin);break}}else f=+v;for(var y=0,z=b.percents.length;y<z;y++){if(b.percents[y]==e||b.percents[y]>f*b.top){e=b.percents[y],p=b.percents[y-1]||0,t=t/b.top*(e-p),o=b.percents[y+1],j=b.anim[e];break}f&&d.attr(b.anim[b.percents[y]])}if(!!j){if(!k){for(var A in j)if(j[g](A))if(U[g](A)||d.paper.customAttributes[g](A)){u[A]=d.attr(A),u[A]==null&&(u[A]=T[A]),v[A]=j[A];switch(U[A]){case C:w[A]=(v[A]-u[A])/t;break;case"colour":u[A]=a.getRGB(u[A]);var B=a.getRGB(v[A]);w[A]={r:(B.r-u[A].r)/t,g:(B.g-u[A].g)/t,b:(B.b-u[A].b)/t};break;case"path":var D=bR(u[A],v[A]),E=D[1];u[A]=D[0],w[A]=[];for(y=0,z=u[A].length;y<z;y++){w[A][y]=[0];for(var F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(E[y][F]-u[A][y][F])/t}break;case"transform":var H=d._,I=ca(H[A],v[A]);if(I){u[A]=I.from,v[A]=I.to,w[A]=[],w[A].real=!0;for(y=0,z=u[A].length;y<z;y++){w[A][y]=[u[A][y][0]];for(F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(v[A][y][F]-u[A][y][F])/t}}else{var J=d.matrix||new cb,K={_:{transform:H.transform},getBBox:function(){return d.getBBox(1)}};u[A]=[J.a,J.b,J.c,J.d,J.e,J.f],b$(K,v[A]),v[A]=K._.transform,w[A]=[(K.matrix.a-J.a)/t,(K.matrix.b-J.b)/t,(K.matrix.c-J.c)/t,(K.matrix.d-J.d)/t,(K.matrix.e-J.e)/t,(K.matrix.f-J.f)/t]}break;case"csv":var L=r(j[A])[s](c),M=r(u[A])[s](c);if(A=="clip-rect"){u[A]=M,w[A]=[],y=M.length;while(y--)w[A][y]=(L[y]-u[A][y])/t}v[A]=L;break;default:L=[][n](j[A]),M=[][n](u[A]),w[A]=[],y=d.paper.customAttributes[A].length;while(y--)w[A][y]=((L[y]||0)-(M[y]||0))/t}}var O=j.easing,P=a.easing_formulas[O];if(!P){P=r(O).match(N);if(P&&P.length==5){var R=P;P=function(a){return cC(a,+R[1],+R[2],+R[3],+R[4],t)}}else P=bf}q=j.start||b.start||+(new Date),x={anim:b,percent:e,timestamp:q,start:q+(b.del||0),status:0,initstatus:f||0,stop:!1,ms:t,easing:P,from:u,diff:w,to:v,el:d,callback:j.callback,prev:p,next:o,repeat:i||b.times,origin:d.attr(),totalOrigin:h},cy.push(x);if(f&&!k&&!l){x.stop=!0,x.start=new Date-t*f;if(cy.length==1)return cA()}l&&(x.start=new Date-x.ms*f),cy.length==1&&cz(cA)}else k.initstatus=f,k.start=new Date-k.ms*f;eve("raphael.anim.start."+d.id,d,b)}}function cD(a,b){var c=[],d={};this.ms=b,this.times=1;if(a){for(var e in a)a[g](e)&&(d[Q(e)]=a[e],c.push(Q(e)));c.sort(bd)}this.anim=d,this.top=c[c.length-1],this.percents=c}function cC(a,b,c,d,e,f){function o(a,b){var c,d,e,f,j,k;for(e=a,k=0;k<8;k++){f=m(e)-a;if(z(f)<b)return e;j=(3*i*e+2*h)*e+g;if(z(j)<1e-6)break;e=e-f/j}c=0,d=1,e=a;if(e<c)return c;if(e>d)return d;while(c<d){f=m(e);if(z(f-a)<b)return e;a>f?c=e:d=e,e=(d-c)/2+c}return e}function n(a,b){var c=o(a,b);return((l*c+k)*c+j)*c}function m(a){return((i*a+h)*a+g)*a}var g=3*b,h=3*(d-b)-g,i=1-g-h,j=3*c,k=3*(e-c)-j,l=1-j-k;return n(a,1/(200*f))}function cq(){return this.x+q+this.y+q+this.width+" × "+this.height}function cp(){return this.x+q+this.y}function cb(a,b,c,d,e,f){a!=null?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0)}function bH(b,c,d){b=a._path2curve(b),c=a._path2curve(c);var e,f,g,h,i,j,k,l,m,n,o=d?0:[];for(var p=0,q=b.length;p<q;p++){var r=b[p];if(r[0]=="M")e=i=r[1],f=j=r[2];else{r[0]=="C"?(m=[e,f].concat(r.slice(1)),e=m[6],f=m[7]):(m=[e,f,e,f,i,j,i,j],e=i,f=j);for(var s=0,t=c.length;s<t;s++){var u=c[s];if(u[0]=="M")g=k=u[1],h=l=u[2];else{u[0]=="C"?(n=[g,h].concat(u.slice(1)),g=n[6],h=n[7]):(n=[g,h,g,h,k,l,k,l],g=k,h=l);var v=bG(m,n,d);if(d)o+=v;else{for(var w=0,x=v.length;w<x;w++)v[w].segment1=p,v[w].segment2=s,v[w].bez1=m,v[w].bez2=n;o=o.concat(v)}}}}}return o}function bG(b,c,d){var e=a.bezierBBox(b),f=a.bezierBBox(c);if(!a.isBBoxIntersect(e,f))return d?0:[];var g=bB.apply(0,b),h=bB.apply(0,c),i=~~(g/5),j=~~(h/5),k=[],l=[],m={},n=d?0:[];for(var o=0;o<i+1;o++){var p=a.findDotsAtSegment.apply(a,b.concat(o/i));k.push({x:p.x,y:p.y,t:o/i})}for(o=0;o<j+1;o++)p=a.findDotsAtSegment.apply(a,c.concat(o/j)),l.push({x:p.x,y:p.y,t:o/j});for(o=0;o<i;o++)for(var q=0;q<j;q++){var r=k[o],s=k[o+1],t=l[q],u=l[q+1],v=z(s.x-r.x)<.001?"y":"x",w=z(u.x-t.x)<.001?"y":"x",x=bD(r.x,r.y,s.x,s.y,t.x,t.y,u.x,u.y);if(x){if(m[x.x.toFixed(4)]==x.y.toFixed(4))continue;m[x.x.toFixed(4)]=x.y.toFixed(4);var y=r.t+z((x[v]-r[v])/(s[v]-r[v]))*(s.t-r.t),A=t.t+z((x[w]-t[w])/(u[w]-t[w]))*(u.t-t.t);y>=0&&y<=1&&A>=0&&A<=1&&(d?n++:n.push({x:x.x,y:x.y,t1:y,t2:A}))}}return n}function bF(a,b){return bG(a,b,1)}function bE(a,b){return bG(a,b)}function bD(a,b,c,d,e,f,g,h){if(!(x(a,c)<y(e,g)||y(a,c)>x(e,g)||x(b,d)<y(f,h)||y(b,d)>x(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(!k)return;var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(n<+y(a,c).toFixed(2)||n>+x(a,c).toFixed(2)||n<+y(e,g).toFixed(2)||n>+x(e,g).toFixed(2)||o<+y(b,d).toFixed(2)||o>+x(b,d).toFixed(2)||o<+y(f,h).toFixed(2)||o>+x(f,h).toFixed(2))return;return{x:l,y:m}}}function bC(a,b,c,d,e,f,g,h,i){if(!(i<0||bB(a,b,c,d,e,f,g,h)<i)){var j=1,k=j/2,l=j-k,m,n=.01;m=bB(a,b,c,d,e,f,g,h,l);while(z(m-i)>n)k/=2,l+=(m<i?1:-1)*k,m=bB(a,b,c,d,e,f,g,h,l);return l}}function bB(a,b,c,d,e,f,g,h,i){i==null&&(i=1),i=i>1?1:i<0?0:i;var j=i/2,k=12,l=[-0.1252,.1252,-0.3678,.3678,-0.5873,.5873,-0.7699,.7699,-0.9041,.9041,-0.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0;for(var o=0;o<k;o++){var p=j*l[o]+j,q=bA(p,a,c,e,g),r=bA(p,b,d,f,h),s=q*q+r*r;n+=m[o]*w.sqrt(s)}return j*n}function bA(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function by(a,b){var c=[];for(var d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function bx(){return this.hex}function bv(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),h=d.cache=d.cache||{},i=d.count=d.count||[];if(h[g](f)){bu(i,f);return c?c(h[f]):h[f]}i.length>=1e3&&delete h[i.shift()],i.push(f),h[f]=a[m](b,e);return c?c(h[f]):h[f]}return d}function bu(a,b){for(var c=0,d=a.length;c<d;c++)if(a[c]===b)return a.push(a.splice(c,1)[0])}function bm(a){if(Object(a)!==a)return a;var b=new a.constructor;for(var c in a)a[g](c)&&(b[c]=bm(a[c]));return b}function a(c){if(a.is(c,"function"))return b?c():eve.on("raphael.DOMload",c);if(a.is(c,E))return a._engine.create[m](a,c.splice(0,3+a.is(c[0],C))).add(c);var d=Array.prototype.slice.call(arguments,0);if(a.is(d[d.length-1],"function")){var e=d.pop();return b?e.call(a._engine.create[m](a,d)):eve.on("raphael.DOMload",function(){e.call(a._engine.create[m](a,d))})}return a._engine.create[m](a,arguments)}a.version="2.1.0",a.eve=eve;var b,c=/[, ]+/,d={circle:1,rect:1,path:1,ellipse:1,text:1,image:1},e=/\{(\d+)\}/g,f="prototype",g="hasOwnProperty",h={doc:document,win:window},i={was:Object.prototype[g].call(h.win,"Raphael"),is:h.win.Raphael},j=function(){this.ca=this.customAttributes={}},k,l="appendChild",m="apply",n="concat",o="createTouch"in h.doc,p="",q=" ",r=String,s="split",t="click dblclick mousedown mousemove mouseout mouseover mouseup touchstart touchmove touchend touchcancel"[s](q),u={mousedown:"touchstart",mousemove:"touchmove",mouseup:"touchend"},v=r.prototype.toLowerCase,w=Math,x=w.max,y=w.min,z=w.abs,A=w.pow,B=w.PI,C="number",D="string",E="array",F="toString",G="fill",H=Object.prototype.toString,I={},J="push",K=a._ISURL=/^url\(['"]?([^\)]+?)['"]?\)$/i,L=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\))\s*$/i,M={NaN:1,Infinity:1,"-Infinity":1},N=/^(?:cubic-)?bezier\(([^,]+),([^,]+),([^,]+),([^\)]+)\)/,O=w.round,P="setAttribute",Q=parseFloat,R=parseInt,S=r.prototype.toUpperCase,T=a._availableAttrs={"arrow-end":"none","arrow-start":"none",blur:0,"clip-rect":"0 0 1e9 1e9",cursor:"default",cx:0,cy:0,fill:"#fff","fill-opacity":1,font:'10px "Arial"',"font-family":'"Arial"',"font-size":"10","font-style":"normal","font-weight":400,gradient:0,height:0,href:"http://raphaeljs.com/","letter-spacing":0,opacity:1,path:"M0,0",r:0,rx:0,ry:0,src:"",stroke:"#000","stroke-dasharray":"","stroke-linecap":"butt","stroke-linejoin":"butt","stroke-miterlimit":0,"stroke-opacity":1,"stroke-width":1,target:"_blank","text-anchor":"middle",title:"Raphael",transform:"",width:0,x:0,y:0},U=a._availableAnimAttrs={blur:C,"clip-rect":"csv",cx:C,cy:C,fill:"colour","fill-opacity":C,"font-size":C,height:C,opacity:C,path:"path",r:C,rx:C,ry:C,stroke:"colour","stroke-opacity":C,"stroke-width":C,transform:"transform",width:C,x:C,y:C},V=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]/g,W=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/,X={hs:1,rg:1},Y=/,?([achlmqrstvxz]),?/gi,Z=/([achlmrqstvz])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,$=/([rstm])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,_=/(-?\d*\.?\d*(?:e[\-+]?\d+)?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/ig,ba=a._radial_gradient=/^r(?:\(([^,]+?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*([^\)]+?)\))?/,bb={},bc=function(a,b){return a.key-b.key},bd=function(a,b){return Q(a)-Q(b)},be=function(){},bf=function(a){return a},bg=a._rectPath=function(a,b,c,d,e){if(e)return[["M",a+e,b],["l",c-e*2,0],["a",e,e,0,0,1,e,e],["l",0,d-e*2],["a",e,e,0,0,1,-e,e],["l",e*2-c,0],["a",e,e,0,0,1,-e,-e],["l",0,e*2-d],["a",e,e,0,0,1,e,-e],["z"]];return[["M",a,b],["l",c,0],["l",0,d],["l",-c,0],["z"]]},bh=function(a,b,c,d){d==null&&(d=c);return[["M",a,b],["m",0,-d],["a",c,d,0,1,1,0,2*d],["a",c,d,0,1,1,0,-2*d],["z"]]},bi=a._getPath={path:function(a){return a.attr("path")},circle:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.r)},ellipse:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.rx,b.ry)},rect:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height,b.r)},image:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height)},text:function(a){var b=a._getBBox();return bg(b.x,b.y,b.width,b.height)}},bj=a.mapPath=function(a,b){if(!b)return a;var c,d,e,f,g,h,i;a=bR(a);for(e=0,g=a.length;e<g;e++){i=a[e];for(f=1,h=i.length;f<h;f+=2)c=b.x(i[f],i[f+1]),d=b.y(i[f],i[f+1]),i[f]=c,i[f+1]=d}return a};a._g=h,a.type=h.win.SVGAngle||h.doc.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure","1.1")?"SVG":"VML";if(a.type=="VML"){var bk=h.doc.createElement("div"),bl;bk.innerHTML='<v:shape adj="1"/>',bl=bk.firstChild,bl.style.behavior="url(#default#VML)";if(!bl||typeof bl.adj!="object")return a.type=p;bk=null}a.svg=!(a.vml=a.type=="VML"),a._Paper=j,a.fn=k=j.prototype=a.prototype,a._id=0,a._oid=0,a.is=function(a,b){b=v.call(b);if(b=="finite")return!M[g](+a);if(b=="array")return a instanceof Array;return b=="null"&&a===null||b==typeof a&&a!==null||b=="object"&&a===Object(a)||b=="array"&&Array.isArray&&Array.isArray(a)||H.call(a).slice(8,-1).toLowerCase()==b},a.angle=function(b,c,d,e,f,g){if(f==null){var h=b-d,i=c-e;if(!h&&!i)return 0;return(180+w.atan2(-i,-h)*180/B+360)%360}return a.angle(b,c,f,g)-a.angle(d,e,f,g)},a.rad=function(a){return a%360*B/180},a.deg=function(a){return a*180/B%360},a.snapTo=function(b,c,d){d=a.is(d,"finite")?d:10;if(a.is(b,E)){var e=b.length;while(e--)if(z(b[e]-c)<=d)return b[e]}else{b=+b;var f=c%b;if(f<d)return c-f;if(f>b-d)return c-f+b}return c};var bn=a.createUUID=function(a,b){return function(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(a,b).toUpperCase()}}(/[xy]/g,function(a){var b=w.random()*16|0,c=a=="x"?b:b&3|8;return c.toString(16)});a.setWindow=function(b){eve("raphael.setWindow",a,h.win,b),h.win=b,h.doc=h.win.document,a._engine.initWin&&a._engine.initWin(h.win)};var bo=function(b){if(a.vml){var c=/^\s+|\s+$/g,d;try{var e=new ActiveXObject("htmlfile");e.write("<body>"),e.close(),d=e.body}catch(f){d=createPopup().document.body}var g=d.createTextRange();bo=bv(function(a){try{d.style.color=r(a).replace(c,p);var b=g.queryCommandValue("ForeColor");b=(b&255)<<16|b&65280|(b&16711680)>>>16;return"#"+("000000"+b.toString(16)).slice(-6)}catch(e){return"none"}})}else{var i=h.doc.createElement("i");i.title="Raphaël Colour Picker",i.style.display="none",h.doc.body.appendChild(i),bo=bv(function(a){i.style.color=a;return h.doc.defaultView.getComputedStyle(i,p).getPropertyValue("color")})}return bo(b)},bp=function(){return"hsb("+[this.h,this.s,this.b]+")"},bq=function(){return"hsl("+[this.h,this.s,this.l]+")"},br=function(){return this.hex},bs=function(b,c,d){c==null&&a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b&&(d=b.b,c=b.g,b=b.r);if(c==null&&a.is(b,D)){var e=a.getRGB(b);b=e.r,c=e.g,d=e.b}if(b>1||c>1||d>1)b/=255,c/=255,d/=255;return[b,c,d]},bt=function(b,c,d,e){b*=255,c*=255,d*=255;var f={r:b,g:c,b:d,hex:a.rgb(b,c,d),toString:br};a.is(e,"finite")&&(f.opacity=e);return f};a.color=function(b){var c;a.is(b,"object")&&"h"in b&&"s"in b&&"b"in b?(c=a.hsb2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):a.is(b,"object")&&"h"in b&&"s"in b&&"l"in b?(c=a.hsl2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):(a.is(b,"string")&&(b=a.getRGB(b)),a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b?(c=a.rgb2hsl(b),b.h=c.h,b.s=c.s,b.l=c.l,c=a.rgb2hsb(b),b.v=c.b):(b={hex:"none"},b.r=b.g=b.b=b.h=b.s=b.v=b.l=-1)),b.toString=br;return b},a.hsb2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,a=a.h,d=a.o),a*=360;var e,f,g,h,i;a=a%360/60,i=c*b,h=i*(1-z(a%2-1)),e=f=g=c-i,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.hsl2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h);if(a>1||b>1||c>1)a/=360,b/=100,c/=100;a*=360;var e,f,g,h,i;a=a%360/60,i=2*b*(c<.5?c:1-c),h=i*(1-z(a%2-1)),e=f=g=c-i/2,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.rgb2hsb=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;f=x(a,b,c),g=f-y(a,b,c),d=g==0?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=g==0?0:g/f;return{h:d,s:e,b:f,toString:bp}},a.rgb2hsl=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;g=x(a,b,c),h=y(a,b,c),i=g-h,d=i==0?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=i==0?0:f<.5?i/(2*f):i/(2-2*f);return{h:d,s:e,l:f,toString:bq}},a._path2string=function(){return this.join(",").replace(Y,"$1")};var bw=a._preload=function(a,b){var c=h.doc.createElement("img");c.style.cssText="position:absolute;left:-9999em;top:-9999em",c.onload=function(){b.call(this),this.onload=null,h.doc.body.removeChild(this)},c.onerror=function(){h.doc.body.removeChild(this)},h.doc.body.appendChild(c),c.src=a};a.getRGB=bv(function(b){if(!b||!!((b=r(b)).indexOf("-")+1))return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx};if(b=="none")return{r:-1,g:-1,b:-1,hex:"none",toString:bx};!X[g](b.toLowerCase().substring(0,2))&&b.charAt()!="#"&&(b=bo(b));var c,d,e,f,h,i,j,k=b.match(L);if(k){k[2]&&(f=R(k[2].substring(5),16),e=R(k[2].substring(3,5),16),d=R(k[2].substring(1,3),16)),k[3]&&(f=R((i=k[3].charAt(3))+i,16),e=R((i=k[3].charAt(2))+i,16),d=R((i=k[3].charAt(1))+i,16)),k[4]&&(j=k[4][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),k[1].toLowerCase().slice(0,4)=="rgba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100));if(k[5]){j=k[5][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsb2rgb(d,e,f,h)}if(k[6]){j=k[6][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsla"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsl2rgb(d,e,f,h)}k={r:d,g:e,b:f,toString:bx},k.hex="#"+(16777216|f|e<<8|d<<16).toString(16).slice(1),a.is(h,"finite")&&(k.opacity=h);return k}return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx}},a),a.hsb=bv(function(b,c,d){return a.hsb2rgb(b,c,d).hex}),a.hsl=bv(function(b,c,d){return a.hsl2rgb(b,c,d).hex}),a.rgb=bv(function(a,b,c){return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)}),a.getColor=function(a){var b=this.getColor.start=this.getColor.start||{h:0,s:1,b:a||.75},c=this.hsb2rgb(b.h,b.s,b.b);b.h+=.075,b.h>1&&(b.h=0,b.s-=.2,b.s<=0&&(this.getColor.start={h:0,s:1,b:b.b}));return c.hex},a.getColor.reset=function(){delete this.start},a.parsePathString=function(b){if(!b)return null;var c=bz(b);if(c.arr)return bJ(c.arr);var d={a:7,c:6,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,z:0},e=[];a.is(b,E)&&a.is(b[0],E)&&(e=bJ(b)),e.length||r(b).replace(Z,function(a,b,c){var f=[],g=b.toLowerCase();c.replace(_,function(a,b){b&&f.push(+b)}),g=="m"&&f.length>2&&(e.push([b][n](f.splice(0,2))),g="l",b=b=="m"?"l":"L");if(g=="r")e.push([b][n](f));else while(f.length>=d[g]){e.push([b][n](f.splice(0,d[g])));if(!d[g])break}}),e.toString=a._path2string,c.arr=bJ(e);return e},a.parseTransformString=bv(function(b){if(!b)return null;var c={r:3,s:4,t:2,m:6},d=[];a.is(b,E)&&a.is(b[0],E)&&(d=bJ(b)),d.length||r(b).replace($,function(a,b,c){var e=[],f=v.call(b);c.replace(_,function(a,b){b&&e.push(+b)}),d.push([b][n](e))}),d.toString=a._path2string;return d});var bz=function(a){var b=bz.ps=bz.ps||{};b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[g](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])});return b[a]};a.findDotsAtSegment=function(a,b,c,d,e,f,g,h,i){var j=1-i,k=A(j,3),l=A(j,2),m=i*i,n=m*i,o=k*a+l*3*i*c+j*3*i*i*e+n*g,p=k*b+l*3*i*d+j*3*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,x=j*e+i*g,y=j*f+i*h,z=90-w.atan2(q-s,r-t)*180/B;(q>s||r<t)&&(z+=180);return{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:x,y:y},alpha:z}},a.bezierBBox=function(b,c,d,e,f,g,h,i){a.is(b,"array")||(b=[b,c,d,e,f,g,h,i]);var j=bQ.apply(null,b);return{x:j.min.x,y:j.min.y,x2:j.max.x,y2:j.max.y,width:j.max.x-j.min.x,height:j.max.y-j.min.y}},a.isPointInsideBBox=function(a,b,c){return b>=a.x&&b<=a.x2&&c>=a.y&&c<=a.y2},a.isBBoxIntersect=function(b,c){var d=a.isPointInsideBBox;return d(c,b.x,b.y)||d(c,b.x2,b.y)||d(c,b.x,b.y2)||d(c,b.x2,b.y2)||d(b,c.x,c.y)||d(b,c.x2,c.y)||d(b,c.x,c.y2)||d(b,c.x2,c.y2)||(b.x<c.x2&&b.x>c.x||c.x<b.x2&&c.x>b.x)&&(b.y<c.y2&&b.y>c.y||c.y<b.y2&&c.y>b.y)},a.pathIntersection=function(a,b){return bH(a,b)},a.pathIntersectionNumber=function(a,b){return bH(a,b,1)},a.isPointInsidePath=function(b,c,d){var e=a.pathBBox(b);return a.isPointInsideBBox(e,c,d)&&bH(b,[["M",c,d],["H",e.x2+10]],1)%2==1},a._removedFactory=function(a){return function(){eve("raphael.log",null,"Raphaël: you are calling to method “"+a+"” of removed object",a)}};var bI=a.pathBBox=function(a){var b=bz(a);if(b.bbox)return b.bbox;if(!a)return{x:0,y:0,width:0,height:0,x2:0,y2:0};a=bR(a);var c=0,d=0,e=[],f=[],g;for(var h=0,i=a.length;h<i;h++){g=a[h];if(g[0]=="M")c=g[1],d=g[2],e.push(c),f.push(d);else{var j=bQ(c,d,g[1],g[2],g[3],g[4],g[5],g[6]);e=e[n](j.min.x,j.max.x),f=f[n](j.min.y,j.max.y),c=g[5],d=g[6]}}var k=y[m](0,e),l=y[m](0,f),o=x[m](0,e),p=x[m](0,f),q={x:k,y:l,x2:o,y2:p,width:o-k,height:p-l};b.bbox=bm(q);return q},bJ=function(b){var c=bm(b);c.toString=a._path2string;return c},bK=a._pathToRelative=function(b){var c=bz(b);if(c.rel)return bJ(c.rel);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=b[0][1],f=b[0][2],g=e,h=f,i++,d.push(["M",e,f]));for(var j=i,k=b.length;j<k;j++){var l=d[j]=[],m=b[j];if(m[0]!=v.call(m[0])){l[0]=v.call(m[0]);switch(l[0]){case"a":l[1]=m[1],l[2]=m[2],l[3]=m[3],l[4]=m[4],l[5]=m[5],l[6]=+(m[6]-e).toFixed(3),l[7]=+(m[7]-f).toFixed(3);break;case"v":l[1]=+(m[1]-f).toFixed(3);break;case"m":g=m[1],h=m[2];default:for(var n=1,o=m.length;n<o;n++)l[n]=+(m[n]-(n%2?e:f)).toFixed(3)}}else{l=d[j]=[],m[0]=="m"&&(g=m[1]+e,h=m[2]+f);for(var p=0,q=m.length;p<q;p++)d[j][p]=m[p]}var r=d[j].length;switch(d[j][0]){case"z":e=g,f=h;break;case"h":e+=+d[j][r-1];break;case"v":f+=+d[j][r-1];break;default:e+=+d[j][r-2],f+=+d[j][r-1]}}d.toString=a._path2string,c.rel=bJ(d);return d},bL=a._pathToAbsolute=function(b){var c=bz(b);if(c.abs)return bJ(c.abs);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);if(!b||!b.length)return[["M",0,0]];var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=+b[0][1],f=+b[0][2],g=e,h=f,i++,d[0]=["M",e,f]);var j=b.length==3&&b[0][0]=="M"&&b[1][0].toUpperCase()=="R"&&b[2][0].toUpperCase()=="Z";for(var k,l,m=i,o=b.length;m<o;m++){d.push(k=[]),l=b[m];if(l[0]!=S.call(l[0])){k[0]=S.call(l[0]);switch(k[0]){case"A":k[1]=l[1],k[2]=l[2],k[3]=l[3],k[4]=l[4],k[5]=l[5],k[6]=+(l[6]+e),k[7]=+(l[7]+f);break;case"V":k[1]=+l[1]+f;break;case"H":k[1]=+l[1]+e;break;case"R":var p=[e,f][n](l.slice(1));for(var q=2,r=p.length;q<r;q++)p[q]=+p[q]+e,p[++q]=+p[q]+f;d.pop(),d=d[n](by(p,j));break;case"M":g=+l[1]+e,h=+l[2]+f;default:for(q=1,r=l.length;q<r;q++)k[q]=+l[q]+(q%2?e:f)}}else if(l[0]=="R")p=[e,f][n](l.slice(1)),d.pop(),d=d[n](by(p,j)),k=["R"][n](l.slice(-2));else for(var s=0,t=l.length;s<t;s++)k[s]=l[s];switch(k[0]){case"Z":e=g,f=h;break;case"H":e=k[1];break;case"V":f=k[1];break;case"M":g=k[k.length-2],h=k[k.length-1];default:e=k[k.length-2],f=k[k.length-1]}}d.toString=a._path2string,c.abs=bJ(d);return d},bM=function(a,b,c,d){return[a,b,c,d,c,d]},bN=function(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]},bO=function(a,b,c,d,e,f,g,h,i,j){var k=B*120/180,l=B/180*(+e||0),m=[],o,p=bv(function(a,b,c){var d=a*w.cos(c)-b*w.sin(c),e=a*w.sin(c)+b*w.cos(c);return{x:d,y:e}});if(!j){o=p(a,b,-l),a=o.x,b=o.y,o=p(h,i,-l),h=o.x,i=o.y;var q=w.cos(B/180*e),r=w.sin(B/180*e),t=(a-h)/2,u=(b-i)/2,v=t*t/(c*c)+u*u/(d*d);v>1&&(v=w.sqrt(v),c=v*c,d=v*d);var x=c*c,y=d*d,A=(f==g?-1:1)*w.sqrt(z((x*y-x*u*u-y*t*t)/(x*u*u+y*t*t))),C=A*c*u/d+(a+h)/2,D=A*-d*t/c+(b+i)/2,E=w.asin(((b-D)/d).toFixed(9)),F=w.asin(((i-D)/d).toFixed(9));E=a<C?B-E:E,F=h<C?B-F:F,E<0&&(E=B*2+E),F<0&&(F=B*2+F),g&&E>F&&(E=E-B*2),!g&&F>E&&(F=F-B*2)}else E=j[0],F=j[1],C=j[2],D=j[3];var G=F-E;if(z(G)>k){var H=F,I=h,J=i;F=E+k*(g&&F>E?1:-1),h=C+c*w.cos(F),i=D+d*w.sin(F),m=bO(h,i,c,d,e,0,g,I,J,[F,H,C,D])}G=F-E;var K=w.cos(E),L=w.sin(E),M=w.cos(F),N=w.sin(F),O=w.tan(G/4),P=4/3*c*O,Q=4/3*d*O,R=[a,b],S=[a+P*L,b-Q*K],T=[h+P*N,i-Q*M],U=[h,i];S[0]=2*R[0]-S[0],S[1]=2*R[1]-S[1];if(j)return[S,T,U][n](m);m=[S,T,U][n](m).join()[s](",");var V=[];for(var W=0,X=m.length;W<X;W++)V[W]=W%2?p(m[W-1],m[W],l).y:p(m[W],m[W+1],l).x;return V},bP=function(a,b,c,d,e,f,g,h,i){var j=1-i;return{x:A(j,3)*a+A(j,2)*3*i*c+j*3*i*i*e+A(i,3)*g,y:A(j,3)*b+A(j,2)*3*i*d+j*3*i*i*f+A(i,3)*h}},bQ=bv(function(a,b,c,d,e,f,g,h){var i=e-2*c+a-(g-2*e+c),j=2*(c-a)-2*(e-c),k=a-c,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,o=[b,h],p=[a,g],q;z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y)),i=f-2*d+b-(h-2*f+d),j=2*(d-b)-2*(f-d),k=b-d,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y));return{min:{x:y[m](0,p),y:y[m](0,o)},max:{x:x[m](0,p),y:x[m](0,o)}}}),bR=a._path2curve=bv(function(a,b){var c=!b&&bz(a);if(!b&&c.curve)return bJ(c.curve);var d=bL(a),e=b&&bL(b),f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},h=function(a,b){var c,d;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null);switch(a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"][n](bO[m](0,[b.x,b.y][n](a.slice(1))));break;case"S":c=b.x+(b.x-(b.bx||b.x)),d=b.y+(b.y-(b.by||b.y)),a=["C",c,d][n](a.slice(1));break;case"T":b.qx=b.x+(b.x-(b.qx||b.x)),b.qy=b.y+(b.y-(b.qy||b.y)),a=["C"][n](bN(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"][n](bN(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"][n](bM(b.x,b.y,a[1],a[2]));break;case"H":a=["C"][n](bM(b.x,b.y,a[1],b.y));break;case"V":a=["C"][n](bM(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"][n](bM(b.x,b.y,b.X,b.Y))}return a},i=function(a,b){if(a[b].length>7){a[b].shift();var c=a[b];while(c.length)a.splice(b++,0,["C"][n](c.splice(0,6)));a.splice(b,1),l=x(d.length,e&&e.length||0)}},j=function(a,b,c,f,g){a&&b&&a[g][0]=="M"&&b[g][0]!="M"&&(b.splice(g,0,["M",f.x,f.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],l=x(d.length,e&&e.length||0))};for(var k=0,l=x(d.length,e&&e.length||0);k<l;k++){d[k]=h(d[k],f),i(d,k),e&&(e[k]=h(e[k],g)),e&&i(e,k),j(d,e,f,g,k),j(e,d,g,f,k);var o=d[k],p=e&&e[k],q=o.length,r=e&&p.length;f.x=o[q-2],f.y=o[q-1],f.bx=Q(o[q-4])||f.x,f.by=Q(o[q-3])||f.y,g.bx=e&&(Q(p[r-4])||g.x),g.by=e&&(Q(p[r-3])||g.y),g.x=e&&p[r-2],g.y=e&&p[r-1]}e||(c.curve=bJ(d));return e?[d,e]:d},null,bJ),bS=a._parseDots=bv(function(b){var c=[];for(var d=0,e=b.length;d<e;d++){var f={},g=b[d].match(/^([^:]*):?([\d\.]*)/);f.color=a.getRGB(g[1]);if(f.color.error)return null;f.color=f.color.hex,g[2]&&(f.offset=g[2]+"%"),c.push(f)}for(d=1,e=c.length-1;d<e;d++)if(!c[d].offset){var h=Q(c[d-1].offset||0),i=0;for(var j=d+1;j<e;j++)if(c[j].offset){i=c[j].offset;break}i||(i=100,j=e),i=Q(i);var k=(i-h)/(j-d+1);for(;d<j;d++)h+=k,c[d].offset=h+"%"}return c}),bT=a._tear=function(a,b){a==b.top&&(b.top=a.prev),a==b.bottom&&(b.bottom=a.next),a.next&&(a.next.prev=a.prev),a.prev&&(a.prev.next=a.next)},bU=a._tofront=function(a,b){b.top!==a&&(bT(a,b),a.next=null,a.prev=b.top,b.top.next=a,b.top=a)},bV=a._toback=function(a,b){b.bottom!==a&&(bT(a,b),a.next=b.bottom,a.prev=null,b.bottom.prev=a,b.bottom=a)},bW=a._insertafter=function(a,b,c){bT(a,c),b==c.top&&(c.top=a),b.next&&(b.next.prev=a),a.next=b.next,a.prev=b,b.next=a},bX=a._insertbefore=function(a,b,c){bT(a,c),b==c.bottom&&(c.bottom=a),b.prev&&(b.prev.next=a),a.prev=b.prev,b.prev=a,a.next=b},bY=a.toMatrix=function(a,b){var c=bI(a),d={_:{transform:p},getBBox:function(){return c}};b$(d,b);return d.matrix},bZ=a.transformPath=function(a,b){return bj(a,bY(a,b))},b$=a._extractTransform=function(b,c){if(c==null)return b._.transform;c=r(c).replace(/\.{3}|\u2026/g,b._.transform||p);var d=a.parseTransformString(c),e=0,f=0,g=0,h=1,i=1,j=b._,k=new cb;j.transform=d||[];if(d)for(var l=0,m=d.length;l<m;l++){var n=d[l],o=n.length,q=r(n[0]).toLowerCase(),s=n[0]!=q,t=s?k.invert():0,u,v,w,x,y;q=="t"&&o==3?s?(u=t.x(0,0),v=t.y(0,0),w=t.x(n[1],n[2]),x=t.y(n[1],n[2]),k.translate(w-u,x-v)):k.translate(n[1],n[2]):q=="r"?o==2?(y=y||b.getBBox(1),k.rotate(n[1],y.x+y.width/2,y.y+y.height/2),e+=n[1]):o==4&&(s?(w=t.x(n[2],n[3]),x=t.y(n[2],n[3]),k.rotate(n[1],w,x)):k.rotate(n[1],n[2],n[3]),e+=n[1]):q=="s"?o==2||o==3?(y=y||b.getBBox(1),k.scale(n[1],n[o-1],y.x+y.width/2,y.y+y.height/2),h*=n[1],i*=n[o-1]):o==5&&(s?(w=t.x(n[3],n[4]),x=t.y(n[3],n[4]),k.scale(n[1],n[2],w,x)):k.scale(n[1],n[2],n[3],n[4]),h*=n[1],i*=n[2]):q=="m"&&o==7&&k.add(n[1],n[2],n[3],n[4],n[5],n[6]),j.dirtyT=1,b.matrix=k}b.matrix=k,j.sx=h,j.sy=i,j.deg=e,j.dx=f=k.e,j.dy=g=k.f,h==1&&i==1&&!e&&j.bbox?(j.bbox.x+=+f,j.bbox.y+=+g):j.dirtyT=1},b_=function(a){var b=a[0];switch(b.toLowerCase()){case"t":return[b,0,0];case"m":return[b,1,0,0,1,0,0];case"r":return a.length==4?[b,0,a[2],a[3]]:[b,0];case"s":return a.length==5?[b,1,1,a[3],a[4]]:a.length==3?[b,1,1]:[b,1]}},ca=a._equaliseTransform=function(b,c){c=r(c).replace(/\.{3}|\u2026/g,b),b=a.parseTransformString(b)||[],c=a.parseTransformString(c)||[];var d=x(b.length,c.length),e=[],f=[],g=0,h,i,j,k;for(;g<d;g++){j=b[g]||b_(c[g]),k=c[g]||b_(j);if(j[0]!=k[0]||j[0].toLowerCase()=="r"&&(j[2]!=k[2]||j[3]!=k[3])||j[0].toLowerCase()=="s"&&(j[3]!=k[3]||j[4]!=k[4]))return;e[g]=[],f[g]=[];for(h=0,i=x(j.length,k.length);h<i;h++)h in j&&(e[g][h]=j[h]),h in k&&(f[g][h]=k[h])}return{from:e,to:f}};a._getContainer=function(b,c,d,e){var f;f=e==null&&!a.is(b,"object")?h.doc.getElementById(b):b;if(f!=null){if(f.tagName)return c==null?{container:f,width:f.style.pixelWidth||f.offsetWidth,height:f.style.pixelHeight||f.offsetHeight}:{container:f,width:c,height:d};return{container:1,x:b,y:c,width:d,height:e}}},a.pathToRelative=bK,a._engine={},a.path2curve=bR,a.matrix=function(a,b,c,d,e,f){return new cb(a,b,c,d,e,f)},function(b){function d(a){var b=w.sqrt(c(a));a[0]&&(a[0]/=b),a[1]&&(a[1]/=b)}function c(a){return a[0]*a[0]+a[1]*a[1]}b.add=function(a,b,c,d,e,f){var g=[[],[],[]],h=[[this.a,this.c,this.e],[this.b,this.d,this.f],[0,0,1]],i=[[a,c,e],[b,d,f],[0,0,1]],j,k,l,m;a&&a instanceof cb&&(i=[[a.a,a.c,a.e],[a.b,a.d,a.f],[0,0,1]]);for(j=0;j<3;j++)for(k=0;k<3;k++){m=0;for(l=0;l<3;l++)m+=h[j][l]*i[l][k];g[j][k]=m}this.a=g[0][0],this.b=g[1][0],this.c=g[0][1],this.d=g[1][1],this.e=g[0][2],this.f=g[1][2]},b.invert=function(){var a=this,b=a.a*a.d-a.b*a.c;return new cb(a.d/b,-a.b/b,-a.c/b,a.a/b,(a.c*a.f-a.d*a.e)/b,(a.b*a.e-a.a*a.f)/b)},b.clone=function(){return new cb(this.a,this.b,this.c,this.d,this.e,this.f)},b.translate=function(a,b){this.add(1,0,0,1,a,b)},b.scale=function(a,b,c,d){b==null&&(b=a),(c||d)&&this.add(1,0,0,1,c,d),this.add(a,0,0,b,0,0),(c||d)&&this.add(1,0,0,1,-c,-d)},b.rotate=function(b,c,d){b=a.rad(b),c=c||0,d=d||0;var e=+w.cos(b).toFixed(9),f=+w.sin(b).toFixed(9);this.add(e,f,-f,e,c,d),this.add(1,0,0,1,-c,-d)},b.x=function(a,b){return a*this.a+b*this.c+this.e},b.y=function(a,b){return a*this.b+b*this.d+this.f},b.get=function(a){return+this[r.fromCharCode(97+a)].toFixed(4)},b.toString=function(){return a.svg?"matrix("+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)].join()+")":[this.get(0),this.get(2),this.get(1),this.get(3),0,0].join()},b.toFilter=function(){return"progid:DXImageTransform.Microsoft.Matrix(M11="+this.get(0)+", M12="+this.get(2)+", M21="+this.get(1)+", M22="+this.get(3)+", Dx="+this.get(4)+", Dy="+this.get(5)+", sizingmethod='auto expand')"},b.offset=function(){return[this.e.toFixed(4),this.f.toFixed(4)]},b.split=function(){var b={};b.dx=this.e,b.dy=this.f;var e=[[this.a,this.c],[this.b,this.d]];b.scalex=w.sqrt(c(e[0])),d(e[0]),b.shear=e[0][0]*e[1][0]+e[0][1]*e[1][1],e[1]=[e[1][0]-e[0][0]*b.shear,e[1][1]-e[0][1]*b.shear],b.scaley=w.sqrt(c(e[1])),d(e[1]),b.shear/=b.scaley;var f=-e[0][1],g=e[1][1];g<0?(b.rotate=a.deg(w.acos(g)),f<0&&(b.rotate=360-b.rotate)):b.rotate=a.deg(w.asin(f)),b.isSimple=!+b.shear.toFixed(9)&&(b.scalex.toFixed(9)==b.scaley.toFixed(9)||!b.rotate),b.isSuperSimple=!+b.shear.toFixed(9)&&b.scalex.toFixed(9)==b.scaley.toFixed(9)&&!b.rotate,b.noRotation=!+b.shear.toFixed(9)&&!b.rotate;return b},b.toTransformString=function(a){var b=a||this[s]();if(b.isSimple){b.scalex=+b.scalex.toFixed(4),b.scaley=+b.scaley.toFixed(4),b.rotate=+b.rotate.toFixed(4);return(b.dx||b.dy?"t"+[b.dx,b.dy]:p)+(b.scalex!=1||b.scaley!=1?"s"+[b.scalex,b.scaley,0,0]:p)+(b.rotate?"r"+[b.rotate,0,0]:p)}return"m"+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)]}}(cb.prototype);var cc=navigator.userAgent.match(/Version\/(.*?)\s/)||navigator.userAgent.match(/Chrome\/(\d+)/);navigator.vendor=="Apple Computer, Inc."&&(cc&&cc[1]<4||navigator.platform.slice(0,2)=="iP")||navigator.vendor=="Google Inc."&&cc&&cc[1]<8?k.safari=function(){var a=this.rect(-99,-99,this.width+99,this.height+99).attr({stroke:"none"});setTimeout(function(){a.remove()})}:k.safari=be;var cd=function(){this.returnValue=!1},ce=function(){return this.originalEvent.preventDefault()},cf=function(){this.cancelBubble=!0},cg=function(){return this.originalEvent.stopPropagation()},ch=function(){if(h.doc.addEventListener)return function(a,b,c,d){var e=o&&u[b]?u[b]:b,f=function(e){var f=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,i=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,j=e.clientX+i,k=e.clientY+f;if(o&&u[g](b))for(var l=0,m=e.targetTouches&&e.targetTouches.length;l<m;l++)if(e.targetTouches[l].target==a){var n=e;e=e.targetTouches[l],e.originalEvent=n,e.preventDefault=ce,e.stopPropagation=cg;break}return c.call(d,e,j,k)};a.addEventListener(e,f,!1);return function(){a.removeEventListener(e,f,!1);return!0}};if(h.doc.attachEvent)return function(a,b,c,d){var e=function(a){a=a||h.win.event;var b=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f=a.clientX+e,g=a.clientY+b;a.preventDefault=a.preventDefault||cd,a.stopPropagation=a.stopPropagation||cf;return c.call(d,a,f,g)};a.attachEvent("on"+b,e);var f=function(){a.detachEvent("on"+b,e);return!0};return f}}(),ci=[],cj=function(a){var b=a.clientX,c=a.clientY,d=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f,g=ci.length;while(g--){f=ci[g];if(o){var i=a.touches.length,j;while(i--){j=a.touches[i];if(j.identifier==f.el._drag.id){b=j.clientX,c=j.clientY,(a.originalEvent?a.originalEvent:a).preventDefault();break}}}else a.preventDefault();var k=f.el.node,l,m=k.nextSibling,n=k.parentNode,p=k.style.display;h.win.opera&&n.removeChild(k),k.style.display="none",l=f.el.paper.getElementByPoint(b,c),k.style.display=p,h.win.opera&&(m?n.insertBefore(k,m):n.appendChild(k)),l&&eve("raphael.drag.over."+f.el.id,f.el,l),b+=e,c+=d,eve("raphael.drag.move."+f.el.id,f.move_scope||f.el,b-f.el._drag.x,c-f.el._drag.y,b,c,a)}},ck=function(b){a.unmousemove(cj).unmouseup(ck);var c=ci.length,d;while(c--)d=ci[c],d.el._drag={},eve("raphael.drag.end."+d.el.id,d.end_scope||d.start_scope||d.move_scope||d.el,b);ci=[]},cl=a.el={};for(var cm=t.length;cm--;)(function(b){a[b]=cl[b]=function(c,d){a.is(c,"function")&&(this.events=this.events||[],this.events.push({name:b,f:c,unbind:ch(this.shape||this.node||h.doc,b,c,d||this)}));return this},a["un"+b]=cl["un"+b]=function(a){var c=this.events||[],d=c.length;while(d--)if(c[d].name==b&&c[d].f==a){c[d].unbind(),c.splice(d,1),!c.length&&delete this.events;return this}return this}})(t[cm]);cl.data=function(b,c){var d=bb[this.id]=bb[this.id]||{};if(arguments.length==1){if(a.is(b,"object")){for(var e in b)b[g](e)&&this.data(e,b[e]);return this}eve("raphael.data.get."+this.id,this,d[b],b);return d[b]}d[b]=c,eve("raphael.data.set."+this.id,this,c,b);return this},cl.removeData=function(a){a==null?bb[this.id]={}:bb[this.id]&&delete bb[this.id][a];return this},cl.hover=function(a,b,c,d){return this.mouseover(a,c).mouseout(b,d||c)},cl.unhover=function(a,b){return this.unmouseover(a).unmouseout(b)};var cn=[];cl.drag=function(b,c,d,e,f,g){function i(i){(i.originalEvent||i).preventDefault();var j=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,k=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft;this._drag.x=i.clientX+k,this._drag.y=i.clientY+j,this._drag.id=i.identifier,!ci.length&&a.mousemove(cj).mouseup(ck),ci.push({el:this,move_scope:e,start_scope:f,end_scope:g}),c&&eve.on("raphael.drag.start."+this.id,c),b&&eve.on("raphael.drag.move."+this.id,b),d&&eve.on("raphael.drag.end."+this.id,d),eve("raphael.drag.start."+this.id,f||e||this,i.clientX+k,i.clientY+j,i)}this._drag={},cn.push({el:this,start:i}),this.mousedown(i);return this},cl.onDragOver=function(a){a?eve.on("raphael.drag.over."+this.id,a):eve.unbind("raphael.drag.over."+this.id)},cl.undrag=function(){var b=cn.length;while(b--)cn[b].el==this&&(this.unmousedown(cn[b].start),cn.splice(b,1),eve.unbind("raphael.drag.*."+this.id));!cn.length&&a.unmousemove(cj).unmouseup(ck)},k.circle=function(b,c,d){var e=a._engine.circle(this,b||0,c||0,d||0);this.__set__&&this.__set__.push(e);return e},k.rect=function(b,c,d,e,f){var g=a._engine.rect(this,b||0,c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.ellipse=function(b,c,d,e){var f=a._engine.ellipse(this,b||0,c||0,d||0,e||0);this.__set__&&this.__set__.push(f);return f},k.path=function(b){b&&!a.is(b,D)&&!a.is(b[0],E)&&(b+=p);var c=a._engine.path(a.format[m](a,arguments),this);this.__set__&&this.__set__.push(c);return c},k.image=function(b,c,d,e,f){var g=a._engine.image(this,b||"about:blank",c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.text=function(b,c,d){var e=a._engine.text(this,b||0,c||0,r(d));this.__set__&&this.__set__.push(e);return e},k.set=function(b){!a.is(b,"array")&&(b=Array.prototype.splice.call(arguments,0,arguments.length));var c=new cG(b);this.__set__&&this.__set__.push(c);return c},k.setStart=function(a){this.__set__=a||this.set()},k.setFinish=function(a){var b=this.__set__;delete this.__set__;return b},k.setSize=function(b,c){return a._engine.setSize.call(this,b,c)},k.setViewBox=function(b,c,d,e,f){return a._engine.setViewBox.call(this,b,c,d,e,f)},k.top=k.bottom=null,k.raphael=a;var co=function(a){var b=a.getBoundingClientRect(),c=a.ownerDocument,d=c.body,e=c.documentElement,f=e.clientTop||d.clientTop||0,g=e.clientLeft||d.clientLeft||0,i=b.top+(h.win.pageYOffset||e.scrollTop||d.scrollTop)-f,j=b.left+(h.win.pageXOffset||e.scrollLeft||d.scrollLeft)-g;return{y:i,x:j}};k.getElementByPoint=function(a,b){var c=this,d=c.canvas,e=h.doc.elementFromPoint(a,b);if(h.win.opera&&e.tagName=="svg"){var f=co(d),g=d.createSVGRect();g.x=a-f.x,g.y=b-f.y,g.width=g.height=1;var i=d.getIntersectionList(g,null);i.length&&(e=i[i.length-1])}if(!e)return null;while(e.parentNode&&e!=d.parentNode&&!e.raphael)e=e.parentNode;e==c.canvas.parentNode&&(e=d),e=e&&e.raphael?c.getById(e.raphaelid):null;return e},k.getById=function(a){var b=this.bottom;while(b){if(b.id==a)return b;b=b.next}return null},k.forEach=function(a,b){var c=this.bottom;while(c){if(a.call(b,c)===!1)return this;c=c.next}return this},k.getElementsByPoint=function(a,b){var c=this.set();this.forEach(function(d){d.isPointInside(a,b)&&c.push(d)});return c},cl.isPointInside=function(b,c){var d=this.realPath=this.realPath||bi[this.type](this);return a.isPointInsidePath(d,b,c)},cl.getBBox=function(a){if(this.removed)return{};var b=this._;if(a){if(b.dirty||!b.bboxwt)this.realPath=bi[this.type](this),b.bboxwt=bI(this.realPath),b.bboxwt.toString=cq,b.dirty=0;return b.bboxwt}if(b.dirty||b.dirtyT||!b.bbox){if(b.dirty||!this.realPath)b.bboxwt=0,this.realPath=bi[this.type](this);b.bbox=bI(bj(this.realPath,this.matrix)),b.bbox.toString=cq,b.dirty=b.dirtyT=0}return b.bbox},cl.clone=function(){if(this.removed)return null;var a=this.paper[this.type]().attr(this.attr());this.__set__&&this.__set__.push(a);return a},cl.glow=function(a){if(this.type=="text")return null;a=a||{};var b={width:(a.width||10)+(+this.attr("stroke-width")||1),fill:a.fill||!1,opacity:a.opacity||.5,offsetx:a.offsetx||0,offsety:a.offsety||0,color:a.color||"#000"},c=b.width/2,d=this.paper,e=d.set(),f=this.realPath||bi[this.type](this);f=this.matrix?bj(f,this.matrix):f;for(var g=1;g<c+1;g++)e.push(d.path(f).attr({stroke:b.color,fill:b.fill?b.color:"none","stroke-linejoin":"round","stroke-linecap":"round","stroke-width":+(b.width/c*g).toFixed(3),opacity:+(b.opacity/c).toFixed(3)}));return e.insertBefore(this).translate(b.offsetx,b.offsety)};var cr={},cs=function(b,c,d,e,f,g,h,i,j){return j==null?bB(b,c,d,e,f,g,h,i):a.findDotsAtSegment(b,c,d,e,f,g,h,i,bC(b,c,d,e,f,g,h,i,j))},ct=function(b,c){return function(d,e,f){d=bR(d);var g,h,i,j,k="",l={},m,n=0;for(var o=0,p=d.length;o<p;o++){i=d[o];if(i[0]=="M")g=+i[1],h=+i[2];else{j=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6]);if(n+j>e){if(c&&!l.start){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n),k+=["C"+m.start.x,m.start.y,m.m.x,m.m.y,m.x,m.y];if(f)return k;l.start=k,k=["M"+m.x,m.y+"C"+m.n.x,m.n.y,m.end.x,m.end.y,i[5],i[6]].join(),n+=j,g=+i[5],h=+i[6];continue}if(!b&&!c){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n);return{x:m.x,y:m.y,alpha:m.alpha}}}n+=j,g=+i[5],h=+i[6]}k+=i.shift()+i}l.end=k,m=b?n:c?l:a.findDotsAtSegment(g,h,i[0],i[1],i[2],i[3],i[4],i[5],1),m.alpha&&(m={x:m.x,y:m.y,alpha:m.alpha});return m}},cu=ct(1),cv=ct(),cw=ct(0,1);a.getTotalLength=cu,a.getPointAtLength=cv,a.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return cw(a,b).end;var d=cw(a,c,1);return b?cw(d,b).end:d},cl.getTotalLength=function(){if(this.type=="path"){if(this.node.getTotalLength)return this.node.getTotalLength();return cu(this.attrs.path)}},cl.getPointAtLength=function(a){if(this.type=="path")return cv(this.attrs.path,a)},cl.getSubpath=function(b,c){if(this.type=="path")return a.getSubpath(this.attrs.path,b,c)};var cx=a.easing_formulas={linear:function(a){return a},"<":function(a){return A(a,1.7)},">":function(a){return A(a,.48)},"<>":function(a){var b=.48-a/1.04,c=w.sqrt(.1734+b*b),d=c-b,e=A(z(d),1/3)*(d<0?-1:1),f=-c-b,g=A(z(f),1/3)*(f<0?-1:1),h=e+g+.5;return(1-h)*3*h*h+h*h*h},backIn:function(a){var b=1.70158;return a*a*((b+1)*a-b)},backOut:function(a){a=a-1;var b=1.70158;return a*a*((b+1)*a+b)+1},elastic:function(a){if(a==!!a)return a;return A(2,-10*a)*w.sin((a-.075)*2*B/.3)+1},bounce:function(a){var b=7.5625,c=2.75,d;a<1/c?d=b*a*a:a<2/c?(a-=1.5/c,d=b*a*a+.75):a<2.5/c?(a-=2.25/c,d=b*a*a+.9375):(a-=2.625/c,d=b*a*a+.984375);return d}};cx.easeIn=cx["ease-in"]=cx["<"],cx.easeOut=cx["ease-out"]=cx[">"],cx.easeInOut=cx["ease-in-out"]=cx["<>"],cx["back-in"]=cx.backIn,cx["back-out"]=cx.backOut;var cy=[],cz=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,16)},cA=function(){var b=+(new Date),c=0;for(;c<cy.length;c++){var d=cy[c];if(d.el.removed||d.paused)continue;var e=b-d.start,f=d.ms,h=d.easing,i=d.from,j=d.diff,k=d.to,l=d.t,m=d.el,o={},p,r={},s;d.initstatus?(e=(d.initstatus*d.anim.top-d.prev)/(d.percent-d.prev)*f,d.status=d.initstatus,delete d.initstatus,d.stop&&cy.splice(c--,1)):d.status=(d.prev+(d.percent-d.prev)*(e/f))/d.anim.top;if(e<0)continue;if(e<f){var t=h(e/f);for(var u in i)if(i[g](u)){switch(U[u]){case C:p=+i[u]+t*f*j[u];break;case"colour":p="rgb("+[cB(O(i[u].r+t*f*j[u].r)),cB(O(i[u].g+t*f*j[u].g)),cB(O(i[u].b+t*f*j[u].b))].join(",")+")";break;case"path":p=[];for(var v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(var x=1,y=i[u][v].length;x<y;x++)p[v][x]=+i[u][v][x]+t*f*j[u][v][x];p[v]=p[v].join(q)}p=p.join(q);break;case"transform":if(j[u].real){p=[];for(v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(x=1,y=i[u][v].length;x<y;x++)p[v][x]=i[u][v][x]+t*f*j[u][v][x]}}else{var z=function(a){return+i[u][a]+t*f*j[u][a]};p=[["m",z(0),z(1),z(2),z(3),z(4),z(5)]]}break;case"csv":if(u=="clip-rect"){p=[],v=4;while(v--)p[v]=+i[u][v]+t*f*j[u][v]}break;default:var A=[][n](i[u]);p=[],v=m.paper.customAttributes[u].length;while(v--)p[v]=+A[v]+t*f*j[u][v]}o[u]=p}m.attr(o),function(a,b,c){setTimeout(function(){eve("raphael.anim.frame."+a,b,c)})}(m.id,m,d.anim)}else{(function(b,c,d){setTimeout(function(){eve("raphael.anim.frame."+c.id,c,d),eve("raphael.anim.finish."+c.id,c,d),a.is(b,"function")&&b.call(c)})})(d.callback,m,d.anim),m.attr(k),cy.splice(c--,1);if(d.repeat>1&&!d.next){for(s in k)k[g](s)&&(r[s]=d.totalOrigin[s]);d.el.attr(r),cE(d.anim,d.el,d.anim.percents[0],null,d.totalOrigin,d.repeat-1)}d.next&&!d.stop&&cE(d.anim,d.el,d.next,null,d.totalOrigin,d.repeat)}}a.svg&&m&&m.paper&&m.paper.safari(),cy.length&&cz(cA)},cB=function(a){return a>255?255:a<0?0:a};cl.animateWith=function(b,c,d,e,f,g){var h=this;if(h.removed){g&&g.call(h);return h}var i=d instanceof cD?d:a.animation(d,e,f,g),j,k;cE(i,h,i.percents[0],null,h.attr());for(var l=0,m=cy.length;l<m;l++)if(cy[l].anim==c&&cy[l].el==b){cy[m-1].start=cy[l].start;break}return h},cl.onAnimation=function(a){a?eve.on("raphael.anim.frame."+this.id,a):eve.unbind("raphael.anim.frame."+this.id);return this},cD.prototype.delay=function(a){var b=new cD(this.anim,this.ms);b.times=this.times,b.del=+a||0;return b},cD.prototype.repeat=function(a){var b=new cD(this.anim,this.ms);b.del=this.del,b.times=w.floor(x(a,0))||1;return b},a.animation=function(b,c,d,e){if(b instanceof cD)return b;if(a.is(d,"function")||!d)e=e||d||null,d=null;b=Object(b),c=+c||0;var f={},h,i;for(i in b)b[g](i)&&Q(i)!=i&&Q(i)+"%"!=i&&(h=!0,f[i]=b[i]);if(!h)return new cD(b,c);d&&(f.easing=d),e&&(f.callback=e);return new cD({100:f},c)},cl.animate=function(b,c,d,e){var f=this;if(f.removed){e&&e.call(f);return f}var g=b instanceof cD?b:a.animation(b,c,d,e);cE(g,f,g.percents[0],null,f.attr());return f},cl.setTime=function(a,b){a&&b!=null&&this.status(a,y(b,a.ms)/a.ms);return this},cl.status=function(a,b){var c=[],d=0,e,f;if(b!=null){cE(a,this,-1,y(b,1));return this}e=cy.length;for(;d<e;d++){f=cy[d];if(f.el.id==this.id&&(!a||f.anim==a)){if(a)return f.status;c.push({anim:f.anim,status:f.status})}}if(a)return 0;return c},cl.pause=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.pause."+this.id,this,cy[b].anim)!==!1&&(cy[b].paused=!0);return this},cl.resume=function(a){for(var b=0;b<cy.length;b++)if(cy[b].el.id==this.id&&(!a||cy[b].anim==a)){var c=cy[b];eve("raphael.anim.resume."+this.id,this,c.anim)!==!1&&(delete c.paused,this.status(c.anim,c.status))}return this},cl.stop=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.stop."+this.id,this,cy[b].anim)!==!1&&cy.splice(b--,1);return this},eve.on("raphael.remove",cF),eve.on("raphael.clear",cF),cl.toString=function(){return"Raphaël’s object"};var cG=function(a){this.items=[],this.length=0,this.type="set";if(a)for(var b=0,c=a.length;b<c;b++)a[b]&&(a[b].constructor==cl.constructor||a[b].constructor==cG)&&(this[this.items.length]=this.items[this.items.length]=a[b],this.length++)},cH=cG.prototype;cH.push=function(){var a,b;for(var c=0,d=arguments.length;c<d;c++)a=arguments[c],a&&(a.constructor==cl.constructor||a.constructor==cG)&&(b=this.items.length,this[b]=this.items[b]=a,this.length++);return this},cH.pop=function(){this.length&&delete this[this.length--];return this.items.pop()},cH.forEach=function(a,b){for(var c=0,d=this.items.length;c<d;c++)if(a.call(b,this.items[c],c)===!1)return this;return this};for(var cI in cl)cl[g](cI)&&(cH[cI]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a][m](c,b)})}}(cI));cH.attr=function(b,c){if(b&&a.is(b,E)&&a.is(b[0],"object"))for(var d=0,e=b.length;d<e;d++)this.items[d].attr(b[d]);else for(var f=0,g=this.items.length;f<g;f++)this.items[f].attr(b,c);return this},cH.clear=function(){while(this.length)this.pop()},cH.splice=function(a,b,c){a=a<0?x(this.length+a,0):a,b=x(0,y(this.length-a,b));var d=[],e=[],f=[],g;for(g=2;g<arguments.length;g++)f.push(arguments[g]);for(g=0;g<b;g++)e.push(this[a+g]);for(;g<this.length-a;g++)d.push(this[a+g]);var h=f.length;for(g=0;g<h+d.length;g++)this.items[a+g]=this[a+g]=g<h?f[g]:d[g-h];g=this.items.length=this.length-=b-h;while(this[g])delete this[g++];return new cG(e)},cH.exclude=function(a){for(var b=0,c=this.length;b<c;b++)if(this[b]==a){this.splice(b,1);return!0}},cH.animate=function(b,c,d,e){(a.is(d,"function")||!d)&&(e=d||null);var f=this.items.length,g=f,h,i=this,j;if(!f)return this;e&&(j=function(){!--f&&e.call(i)}),d=a.is(d,D)?d:j;var k=a.animation(b,c,d,j);h=this.items[--g].animate(k);while(g--)this.items[g]&&!this.items[g].removed&&this.items[g].animateWith(h,k,k);return this},cH.insertAfter=function(a){var b=this.items.length;while(b--)this.items[b].insertAfter(a);return this},cH.getBBox=function(){var a=[],b=[],c=[],d=[];for(var e=this.items.length;e--;)if(!this.items[e].removed){var f=this.items[e].getBBox();a.push(f.x),b.push(f.y),c.push(f.x+f.width),d.push(f.y+f.height)}a=y[m](0,a),b=y[m](0,b),c=x[m](0,c),d=x[m](0,d);return{x:a,y:b,x2:c,y2:d,width:c-a,height:d-b}},cH.clone=function(a){a=new cG;for(var b=0,c=this.items.length;b<c;b++)a.push(this.items[b].clone());return a},cH.toString=function(){return"Raphaël‘s set"},a.registerFont=function(a){if(!a.face)return a;this.fonts=this.fonts||{};var b={w:a.w,face:{},glyphs:{}},c=a.face["font-family"];for(var d in a.face)a.face[g](d)&&(b.face[d]=a.face[d]);this.fonts[c]?this.fonts[c].push(b):this.fonts[c]=[b];if(!a.svg){b.face["units-per-em"]=R(a.face["units-per-em"],10);for(var e in a.glyphs)if(a.glyphs[g](e)){var f=a.glyphs[e];b.glyphs[e]={w:f.w,k:{},d:f.d&&"M"+f.d.replace(/[mlcxtrv]/g,function(a){return{l:"L",c:"C",x:"z",t:"m",r:"l",v:"c"}[a]||"M"})+"z"};if(f.k)for(var h in f.k)f[g](h)&&(b.glyphs[e].k[h]=f.k[h])}}return a},k.getFont=function(b,c,d,e){e=e||"normal",d=d||"normal",c=+c||{normal:400,bold:700,lighter:300,bolder:800}[c]||400;if(!!a.fonts){var f=a.fonts[b];if(!f){var h=new RegExp("(^|\\s)"+b.replace(/[^\w\d\s+!~.:_-]/g,p)+"(\\s|$)","i");for(var i in a.fonts)if(a.fonts[g](i)&&h.test(i)){f=a.fonts[i];break}}var j;if(f)for(var k=0,l=f.length;k<l;k++){j=f[k];if(j.face["font-weight"]==c&&(j.face["font-style"]==d||!j.face["font-style"])&&j.face["font-stretch"]==e)break}return j}},k.print=function(b,d,e,f,g,h,i){h=h||"middle",i=x(y(i||0,1),-1);var j=r(e)[s](p),k=0,l=0,m=p,n;a.is(f,e)&&(f=this.getFont(f));if(f){n=(g||16)/f.face["units-per-em"];var o=f.face.bbox[s](c),q=+o[0],t=o[3]-o[1],u=0,v=+o[1]+(h=="baseline"?t+ +f.face.descent:t/2);for(var w=0,z=j.length;w<z;w++){if(j[w]=="\n")k=0,B=0,l=0,u+=t;else{var A=l&&f.glyphs[j[w-1]]||{},B=f.glyphs[j[w]];k+=l?(A.w||f.w)+(A.k&&A.k[j[w]]||0)+f.w*i:0,l=1}B&&B.d&&(m+=a.transformPath(B.d,["t",k*n,u*n,"s",n,n,q,v,"t",(b-q)/n,(d-v)/n]))}}return this.path(m).attr({fill:"#000",stroke:"none"})},k.add=function(b){if(a.is(b,"array")){var c=this.set(),e=0,f=b.length,h;for(;e<f;e++)h=b[e]||{},d[g](h.type)&&c.push(this[h.type]().attr(h))}return c},a.format=function(b,c){var d=a.is(c,E)?[0][n](c):arguments;b&&a.is(b,D)&&d.length-1&&(b=b.replace(e,function(a,b){return d[++b]==null?p:d[b]}));return b||p},a.fullfill=function(){var a=/\{([^\}]+)\}/g,b=/(?:(?:^|\.)(.+?)(?=\[|\.|$|\()|\[('|")(.+?)\2\])(\(\))?/g,c=function(a,c,d){var e=d;c.replace(b,function(a,b,c,d,f){b=b||d,e&&(b in e&&(e=e[b]),typeof e=="function"&&f&&(e=e()))}),e=(e==null||e==d?a:e)+"";return e};return function(b,d){return String(b).replace(a,function(a,b){return c(a,b,d)})}}(),a.ninja=function(){i.was?h.win.Raphael=i.is:delete Raphael;return a},a.st=cH,function(b,c,d){function e(){/in/.test(b.readyState)?setTimeout(e,9):a.eve("raphael.DOMload")}b.readyState==null&&b.addEventListener&&(b.addEventListener(c,d=function(){b.removeEventListener(c,d,!1),b.readyState="complete"},!1),b.readyState="loading"),e()}(document,"DOMContentLoaded"),i.was?h.win.Raphael=a:Raphael=a,eve.on("raphael.DOMload",function(){b=!0})}(),window.Raphael.svg&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=parseInt,f=Math,g=f.max,h=f.abs,i=f.pow,j=/[, ]+/,k=a.eve,l="",m=" ",n="http://www.w3.org/1999/xlink",o={block:"M5,0 0,2.5 5,5z",classic:"M5,0 0,2.5 5,5 3.5,3 3.5,2z",diamond:"M2.5,0 5,2.5 2.5,5 0,2.5z",open:"M6,1 1,3.5 6,6",oval:"M2.5,0A2.5,2.5,0,0,1,2.5,5 2.5,2.5,0,0,1,2.5,0z"},p={};a.toString=function(){return"Your browser supports SVG.\nYou are running Raphaël "+this.version};var q=function(d,e){if(e){typeof d=="string"&&(d=q(d));for(var f in e)e[b](f)&&(f.substring(0,6)=="xlink:"?d.setAttributeNS(n,f.substring(6),c(e[f])):d.setAttribute(f,c(e[f])))}else d=a._g.doc.createElementNS("http://www.w3.org/2000/svg",d),d.style&&(d.style.webkitTapHighlightColor="rgba(0,0,0,0)");return d},r=function(b,e){var j="linear",k=b.id+e,m=.5,n=.5,o=b.node,p=b.paper,r=o.style,s=a._g.doc.getElementById(k);if(!s){e=c(e).replace(a._radial_gradient,function(a,b,c){j="radial";if(b&&c){m=d(b),n=d(c);var e=(n>.5)*2-1;i(m-.5,2)+i(n-.5,2)>.25&&(n=f.sqrt(.25-i(m-.5,2))*e+.5)&&n!=.5&&(n=n.toFixed(5)-1e-5*e)}return l}),e=e.split(/\s*\-\s*/);if(j=="linear"){var t=e.shift();t=-d(t);if(isNaN(t))return null;var u=[0,0,f.cos(a.rad(t)),f.sin(a.rad(t))],v=1/(g(h(u[2]),h(u[3]))||1);u[2]*=v,u[3]*=v,u[2]<0&&(u[0]=-u[2],u[2]=0),u[3]<0&&(u[1]=-u[3],u[3]=0)}var w=a._parseDots(e);if(!w)return null;k=k.replace(/[\(\)\s,\xb0#]/g,"_"),b.gradient&&k!=b.gradient.id&&(p.defs.removeChild(b.gradient),delete b.gradient);if(!b.gradient){s=q(j+"Gradient",{id:k}),b.gradient=s,q(s,j=="radial"?{fx:m,fy:n}:{x1:u[0],y1:u[1],x2:u[2],y2:u[3],gradientTransform:b.matrix.invert()}),p.defs.appendChild(s);for(var x=0,y=w.length;x<y;x++)s.appendChild(q("stop",{offset:w[x].offset?w[x].offset:x?"100%":"0%","stop-color":w[x].color||"#fff"}))}}q(o,{fill:"url(#"+k+")",opacity:1,"fill-opacity":1}),r.fill=l,r.opacity=1,r.fillOpacity=1;return 1},s=function(a){var b=a.getBBox(1);q(a.pattern,{patternTransform:a.matrix.invert()+" translate("+b.x+","+b.y+")"})},t=function(d,e,f){if(d.type=="path"){var g=c(e).toLowerCase().split("-"),h=d.paper,i=f?"end":"start",j=d.node,k=d.attrs,m=k["stroke-width"],n=g.length,r="classic",s,t,u,v,w,x=3,y=3,z=5;while(n--)switch(g[n]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":r=g[n];break;case"wide":y=5;break;case"narrow":y=2;break;case"long":x=5;break;case"short":x=2}r=="open"?(x+=2,y+=2,z+=2,u=1,v=f?4:1,w={fill:"none",stroke:k.stroke}):(v=u=x/2,w={fill:k.stroke,stroke:"none"}),d._.arrows?f?(d._.arrows.endPath&&p[d._.arrows.endPath]--,d._.arrows.endMarker&&p[d._.arrows.endMarker]--):(d._.arrows.startPath&&p[d._.arrows.startPath]--,d._.arrows.startMarker&&p[d._.arrows.startMarker]--):d._.arrows={};if(r!="none"){var A="raphael-marker-"+r,B="raphael-marker-"+i+r+x+y;a._g.doc.getElementById(A)?p[A]++:(h.defs.appendChild(q(q("path"),{"stroke-linecap":"round",d:o[r],id:A})),p[A]=1);var C=a._g.doc.getElementById(B),D;C?(p[B]++,D=C.getElementsByTagName("use")[0]):(C=q(q("marker"),{id:B,markerHeight:y,markerWidth:x,orient:"auto",refX:v,refY:y/2}),D=q(q("use"),{"xlink:href":"#"+A,transform:(f?"rotate(180 "+x/2+" "+y/2+") ":l)+"scale("+x/z+","+y/z+")","stroke-width":(1/((x/z+y/z)/2)).toFixed(4)}),C.appendChild(D),h.defs.appendChild(C),p[B]=1),q(D,w);var F=u*(r!="diamond"&&r!="oval");f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-F*m):(s=F*m,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),w={},w["marker-"+i]="url(#"+B+")";if(t||s)w.d=Raphael.getSubpath(k.path,s,t);q(j,w),d._.arrows[i+"Path"]=A,d._.arrows[i+"Marker"]=B,d._.arrows[i+"dx"]=F,d._.arrows[i+"Type"]=r,d._.arrows[i+"String"]=e}else f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-s):(s=0,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),d._.arrows[i+"Path"]&&q(j,{d:Raphael.getSubpath(k.path,s,t)}),delete d._.arrows[i+"Path"],delete d._.arrows[i+"Marker"],delete d._.arrows[i+"dx"],delete d._.arrows[i+"Type"],delete d._.arrows[i+"String"];for(w in p)if(p[b](w)&&!p[w]){var G=a._g.doc.getElementById(w);G&&G.parentNode.removeChild(G)}}},u={"":[0],none:[0],"-":[3,1],".":[1,1],"-.":[3,1,1,1],"-..":[3,1,1,1,1,1],". ":[1,3],"- ":[4,3],"--":[8,3],"- .":[4,3,1,3],"--.":[8,3,1,3],"--..":[8,3,1,3,1,3]},v=function(a,b,d){b=u[c(b).toLowerCase()];if(b){var e=a.attrs["stroke-width"]||"1",f={round:e,square:e,butt:0}[a.attrs["stroke-linecap"]||d["stroke-linecap"]]||0,g=[],h=b.length;while(h--)g[h]=b[h]*e+(h%2?1:-1)*f;q(a.node,{"stroke-dasharray":g.join(",")})}},w=function(d,f){var i=d.node,k=d.attrs,m=i.style.visibility;i.style.visibility="hidden";for(var o in f)if(f[b](o)){if(!a._availableAttrs[b](o))continue;var p=f[o];k[o]=p;switch(o){case"blur":d.blur(p);break;case"href":case"title":case"target":var u=i.parentNode;if(u.tagName.toLowerCase()!="a"){var w=q("a");u.insertBefore(w,i),w.appendChild(i),u=w}o=="target"?u.setAttributeNS(n,"show",p=="blank"?"new":p):u.setAttributeNS(n,o,p);break;case"cursor":i.style.cursor=p;break;case"transform":d.transform(p);break;case"arrow-start":t(d,p);break;case"arrow-end":t(d,p,1);break;case"clip-rect":var x=c(p).split(j);if(x.length==4){d.clip&&d.clip.parentNode.parentNode.removeChild(d.clip.parentNode);var z=q("clipPath"),A=q("rect");z.id=a.createUUID(),q(A,{x:x[0],y:x[1],width:x[2],height:x[3]}),z.appendChild(A),d.paper.defs.appendChild(z),q(i,{"clip-path":"url(#"+z.id+")"}),d.clip=A}if(!p){var B=i.getAttribute("clip-path");if(B){var C=a._g.doc.getElementById(B.replace(/(^url\(#|\)$)/g,l));C&&C.parentNode.removeChild(C),q(i,{"clip-path":l}),delete d.clip}}break;case"path":d.type=="path"&&(q(i,{d:p?k.path=a._pathToAbsolute(p):"M0,0"}),d._.dirty=1,d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1)));break;case"width":i.setAttribute(o,p),d._.dirty=1;if(k.fx)o="x",p=k.x;else break;case"x":k.fx&&(p=-k.x-(k.width||0));case"rx":if(o=="rx"&&d.type=="rect")break;case"cx":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"height":i.setAttribute(o,p),d._.dirty=1;if(k.fy)o="y",p=k.y;else break;case"y":k.fy&&(p=-k.y-(k.height||0));case"ry":if(o=="ry"&&d.type=="rect")break;case"cy":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"r":d.type=="rect"?q(i,{rx:p,ry:p}):i.setAttribute(o,p),d._.dirty=1;break;case"src":d.type=="image"&&i.setAttributeNS(n,"href",p);break;case"stroke-width":if(d._.sx!=1||d._.sy!=1)p/=g(h(d._.sx),h(d._.sy))||1;d.paper._vbSize&&(p*=d.paper._vbSize),i.setAttribute(o,p),k["stroke-dasharray"]&&v(d,k["stroke-dasharray"],f),d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"stroke-dasharray":v(d,p,f);break;case"fill":var D=c(p).match(a._ISURL);if(D){z=q("pattern");var F=q("image");z.id=a.createUUID(),q(z,{x:0,y:0,patternUnits:"userSpaceOnUse",height:1,width:1}),q(F,{x:0,y:0,"xlink:href":D[1]}),z.appendChild(F),function(b){a._preload(D[1],function(){var a=this.offsetWidth,c=this.offsetHeight;q(b,{width:a,height:c}),q(F,{width:a,height:c}),d.paper.safari()})}(z),d.paper.defs.appendChild(z),q(i,{fill:"url(#"+z.id+")"}),d.pattern=z,d.pattern&&s(d);break}var G=a.getRGB(p);if(!G.error)delete f.gradient,delete k.gradient,!a.is(k.opacity,"undefined")&&a.is(f.opacity,"undefined")&&q(i,{opacity:k.opacity}),!a.is(k["fill-opacity"],"undefined")&&a.is(f["fill-opacity"],"undefined")&&q(i,{"fill-opacity":k["fill-opacity"]});else if((d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p)){if("opacity"in k||"fill-opacity"in k){var H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l));if(H){var I=H.getElementsByTagName("stop");q(I[I.length-1],{"stop-opacity":("opacity"in k?k.opacity:1)*("fill-opacity"in k?k["fill-opacity"]:1)})}}k.gradient=p,k.fill="none";break}G[b]("opacity")&&q(i,{"fill-opacity":G.opacity>1?G.opacity/100:G.opacity});case"stroke":G=a.getRGB(p),i.setAttribute(o,G.hex),o=="stroke"&&G[b]("opacity")&&q(i,{"stroke-opacity":G.opacity>1?G.opacity/100:G.opacity}),o=="stroke"&&d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"gradient":(d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p);break;case"opacity":k.gradient&&!k[b]("stroke-opacity")&&q(i,{"stroke-opacity":p>1?p/100:p});case"fill-opacity":if(k.gradient){H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l)),H&&(I=H.getElementsByTagName("stop"),q(I[I.length-1],{"stop-opacity":p}));break};default:o=="font-size"&&(p=e(p,10)+"px");var J=o.replace(/(\-.)/g,function(a){return a.substring(1).toUpperCase()});i.style[J]=p,d._.dirty=1,i.setAttribute(o,p)}}y(d,f),i.style.visibility=m},x=1.2,y=function(d,f){if(d.type=="text"&&!!(f[b]("text")||f[b]("font")||f[b]("font-size")||f[b]("x")||f[b]("y"))){var g=d.attrs,h=d.node,i=h.firstChild?e(a._g.doc.defaultView.getComputedStyle(h.firstChild,l).getPropertyValue("font-size"),10):10;if(f[b]("text")){g.text=f.text;while(h.firstChild)h.removeChild(h.firstChild);var j=c(f.text).split("\n"),k=[],m;for(var n=0,o=j.length;n<o;n++)m=q("tspan"),n&&q(m,{dy:i*x,x:g.x}),m.appendChild(a._g.doc.createTextNode(j[n])),h.appendChild(m),k[n]=m}else{k=h.getElementsByTagName("tspan");for(n=0,o=k.length;n<o;n++)n?q(k[n],{dy:i*x,x:g.x}):q(k[0],{dy:0})}q(h,{x:g.x,y:g.y}),d._.dirty=1;var p=d._getBBox(),r=g.y-(p.y+p.height/2);r&&a.is(r,"finite")&&q(k[0],{dy:r})}},z=function(b,c){var d=0,e=0;this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.matrix=a.matrix(),this.realPath=null,this.paper=c,this.attrs=this.attrs||{},this._={transform:[],sx:1,sy:1,deg:0,dx:0,dy:0,dirty:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},A=a.el;z.prototype=A,A.constructor=z,a._engine.path=function(a,b){var c=q("path");b.canvas&&b.canvas.appendChild(c);var d=new z(c,b);d.type="path",w(d,{fill:"none",stroke:"#000",path:a});return d},A.rotate=function(a,b,e){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this.transform(this._.transform.concat([["r",a,b,e]]));return this},A.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3])),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]]));return this},A.translate=function(a,b){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this.transform(this._.transform.concat([["t",a,b]]));return this},A.transform=function(c){var d=this._;if(c==null)return d.transform;a._extractTransform(this,c),this.clip&&q(this.clip,{transform:this.matrix.invert()}),this.pattern&&s(this),this.node&&q(this.node,{transform:this.matrix});if(d.sx!=1||d.sy!=1){var e=this.attrs[b]("stroke-width")?this.attrs["stroke-width"]:1;this.attr({"stroke-width":e})}return this},A.hide=function(){!this.removed&&this.paper.safari(this.node.style.display="none");return this},A.show=function(){!this.removed&&this.paper.safari(this.node.style.display="");return this},A.remove=function(){if(!this.removed&&!!this.node.parentNode){var b=this.paper;b.__set__&&b.__set__.exclude(this),k.unbind("raphael.*.*."+this.id),this.gradient&&b.defs.removeChild(this.gradient),a._tear(this,b),this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.removeChild(this.node.parentNode):this.node.parentNode.removeChild(this.node);for(var c in this)this[c]=typeof this[c]=="function"?a._removedFactory(c):null;this.removed=!0}},A._getBBox=function(){if(this.node.style.display=="none"){this.show();var a=!0}var b={};try{b=this.node.getBBox()}catch(c){}finally{b=b||{}}a&&this.hide();return b},A.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c=="fill"&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;if(c=="transform")return this._.transform;var g=c.split(j),h={};for(var i=0,l=g.length;i<l;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return l-1?h:h[g[0]]}if(d==null&&a.is(c,"array")){h={};for(i=0,l=c.length;i<l;i++)h[c[i]]=this.attr(c[i]);return h}if(d!=null){var m={};m[c]=d}else c!=null&&a.is(c,"object")&&(m=c);for(var n in m)k("raphael.attr."+n+"."+this.id,this,m[n]);for(n in this.paper.customAttributes)if(this.paper.customAttributes[b](n)&&m[b](n)&&a.is(this.paper.customAttributes[n],"function")){var o=this.paper.customAttributes[n].apply(this,[].concat(m[n]));this.attrs[n]=m[n];for(var p in o)o[b](p)&&(m[p]=o[p])}w(this,m);return this},A.toFront=function(){if(this.removed)return this;this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.appendChild(this.node.parentNode):this.node.parentNode.appendChild(this.node);var b=this.paper;b.top!=this&&a._tofront(this,b);return this},A.toBack=function(){if(this.removed)return this;var b=this.node.parentNode;b.tagName.toLowerCase()=="a"?b.parentNode.insertBefore(this.node.parentNode,this.node.parentNode.parentNode.firstChild):b.firstChild!=this.node&&b.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper);var c=this.paper;return this},A.insertAfter=function(b){if(this.removed)return this;var c=b.node||b[b.length-1].node;c.nextSibling?c.parentNode.insertBefore(this.node,c.nextSibling):c.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},A.insertBefore=function(b){if(this.removed)return this;var c=b.node||b[0].node;c.parentNode.insertBefore(this.node,c),a._insertbefore(this,b,this.paper);return this},A.blur=function(b){var c=this;if(+b!==0){var d=q("filter"),e=q("feGaussianBlur");c.attrs.blur=b,d.id=a.createUUID(),q(e,{stdDeviation:+b||1.5}),d.appendChild(e),c.paper.defs.appendChild(d),c._blur=d,q(c.node,{filter:"url(#"+d.id+")"})}else c._blur&&(c._blur.parentNode.removeChild(c._blur),delete c._blur,delete c.attrs.blur),c.node.removeAttribute("filter")},a._engine.circle=function(a,b,c,d){var e=q("circle");a.canvas&&a.canvas.appendChild(e);var f=new z(e,a);f.attrs={cx:b,cy:c,r:d,fill:"none",stroke:"#000"},f.type="circle",q(e,f.attrs);return f},a._engine.rect=function(a,b,c,d,e,f){var g=q("rect");a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:b,y:c,width:d,height:e,r:f||0,rx:f||0,ry:f||0,fill:"none",stroke:"#000"},h.type="rect",q(g,h.attrs);return h},a._engine.ellipse=function(a,b,c,d,e){var f=q("ellipse");a.canvas&&a.canvas.appendChild(f);var g=new z(f,a);g.attrs={cx:b,cy:c,rx:d,ry:e,fill:"none",stroke:"#000"},g.type="ellipse",q(f,g.attrs);return g},a._engine.image=function(a,b,c,d,e,f){var g=q("image");q(g,{x:c,y:d,width:e,height:f,preserveAspectRatio:"none"}),g.setAttributeNS(n,"href",b),a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:c,y:d,width:e,height:f,src:b},h.type="image";return h},a._engine.text=function(b,c,d,e){var f=q("text");b.canvas&&b.canvas.appendChild(f);var g=new z(f,b);g.attrs={x:c,y:d,"text-anchor":"middle",text:e,font:a._availableAttrs.font,stroke:"none",fill:"#000"},g.type="text",w(g,g.attrs);return g},a._engine.setSize=function(a,b){this.width=a||this.width,this.height=b||this.height,this.canvas.setAttribute("width",this.width),this.canvas.setAttribute("height",this.height),this._viewBox&&this.setViewBox.apply(this,this._viewBox);return this},a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b&&b.container,d=b.x,e=b.y,f=b.width,g=b.height;if(!c)throw new Error("SVG container not found.");var h=q("svg"),i="overflow:hidden;",j;d=d||0,e=e||0,f=f||512,g=g||342,q(h,{height:g,version:1.1,width:f,xmlns:"http://www.w3.org/2000/svg"}),c==1?(h.style.cssText=i+"position:absolute;left:"+d+"px;top:"+e+"px",a._g.doc.body.appendChild(h),j=1):(h.style.cssText=i+"position:relative",c.firstChild?c.insertBefore(h,c.firstChild):c.appendChild(h)),c=new a._Paper,c.width=f,c.height=g,c.canvas=h,c.clear(),c._left=c._top=0,j&&(c.renderfix=function(){}),c.renderfix();return c},a._engine.setViewBox=function(a,b,c,d,e){k("raphael.setViewBox",this,this._viewBox,[a,b,c,d,e]);var f=g(c/this.width,d/this.height),h=this.top,i=e?"meet":"xMinYMin",j,l;a==null?(this._vbSize&&(f=1),delete this._vbSize,j="0 0 "+this.width+m+this.height):(this._vbSize=f,j=a+m+b+m+c+m+d),q(this.canvas,{viewBox:j,preserveAspectRatio:i});while(f&&h)l="stroke-width"in h.attrs?h.attrs["stroke-width"]:1,h.attr({"stroke-width":l}),h._.dirty=1,h._.dirtyT=1,h=h.prev;this._viewBox=[a,b,c,d,!!e];return this},a.prototype.renderfix=function(){var a=this.canvas,b=a.style,c;try{c=a.getScreenCTM()||a.createSVGMatrix()}catch(d){c=a.createSVGMatrix()}var e=-c.e%1,f=-c.f%1;if(e||f)e&&(this._left=(this._left+e)%1,b.left=this._left+"px"),f&&(this._top=(this._top+f)%1,b.top=this._top+"px")},a.prototype.clear=function(){a.eve("raphael.clear",this);var b=this.canvas;while(b.firstChild)b.removeChild(b.firstChild);this.bottom=this.top=null,(this.desc=q("desc")).appendChild(a._g.doc.createTextNode("Created with Raphaël "+a.version)),b.appendChild(this.desc),b.appendChild(this.defs=q("defs"))},a.prototype.remove=function(){k("raphael.remove",this),this.canvas.parentNode&&this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null};var B=a.st;for(var C in A)A[b](C)&&!B[b](C)&&(B[C]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(C))}(window.Raphael),window.Raphael.vml&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=Math,f=e.round,g=e.max,h=e.min,i=e.abs,j="fill",k=/[, ]+/,l=a.eve,m=" progid:DXImageTransform.Microsoft",n=" ",o="",p={M:"m",L:"l",C:"c",Z:"x",m:"t",l:"r",c:"v",z:"x"},q=/([clmz]),?([^clmz]*)/gi,r=/ progid:\S+Blur\([^\)]+\)/g,s=/-?[^,\s-]+/g,t="position:absolute;left:0;top:0;width:1px;height:1px",u=21600,v={path:1,rect:1,image:1},w={circle:1,ellipse:1},x=function(b){var d=/[ahqstv]/ig,e=a._pathToAbsolute;c(b).match(d)&&(e=a._path2curve),d=/[clmz]/g;if(e==a._pathToAbsolute&&!c(b).match(d)){var g=c(b).replace(q,function(a,b,c){var d=[],e=b.toLowerCase()=="m",g=p[b];c.replace(s,function(a){e&&d.length==2&&(g+=d+p[b=="m"?"l":"L"],d=[]),d.push(f(a*u))});return g+d});return g}var h=e(b),i,j;g=[];for(var k=0,l=h.length;k<l;k++){i=h[k],j=h[k][0].toLowerCase(),j=="z"&&(j="x");for(var m=1,r=i.length;m<r;m++)j+=f(i[m]*u)+(m!=r-1?",":o);g.push(j)}return g.join(n)},y=function(b,c,d){var e=a.matrix();e.rotate(-b,.5,.5);return{dx:e.x(c,d),dy:e.y(c,d)}},z=function(a,b,c,d,e,f){var g=a._,h=a.matrix,k=g.fillpos,l=a.node,m=l.style,o=1,p="",q,r=u/b,s=u/c;m.visibility="hidden";if(!!b&&!!c){l.coordsize=i(r)+n+i(s),m.rotation=f*(b*c<0?-1:1);if(f){var t=y(f,d,e);d=t.dx,e=t.dy}b<0&&(p+="x"),c<0&&(p+=" y")&&(o=-1),m.flip=p,l.coordorigin=d*-r+n+e*-s;if(k||g.fillsize){var v=l.getElementsByTagName(j);v=v&&v[0],l.removeChild(v),k&&(t=y(f,h.x(k[0],k[1]),h.y(k[0],k[1])),v.position=t.dx*o+n+t.dy*o),g.fillsize&&(v.size=g.fillsize[0]*i(b)+n+g.fillsize[1]*i(c)),l.appendChild(v)}m.visibility="visible"}};a.toString=function(){return"Your browser doesn’t support SVG. Falling down to VML.\nYou are running Raphaël "+this.version};var A=function(a,b,d){var e=c(b).toLowerCase().split("-"),f=d?"end":"start",g=e.length,h="classic",i="medium",j="medium";while(g--)switch(e[g]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":h=e[g];break;case"wide":case"narrow":j=e[g];break;case"long":case"short":i=e[g]}var k=a.node.getElementsByTagName("stroke")[0];k[f+"arrow"]=h,k[f+"arrowlength"]=i,k[f+"arrowwidth"]=j},B=function(e,i){e.attrs=e.attrs||{};var l=e.node,m=e.attrs,p=l.style,q,r=v[e.type]&&(i.x!=m.x||i.y!=m.y||i.width!=m.width||i.height!=m.height||i.cx!=m.cx||i.cy!=m.cy||i.rx!=m.rx||i.ry!=m.ry||i.r!=m.r),s=w[e.type]&&(m.cx!=i.cx||m.cy!=i.cy||m.r!=i.r||m.rx!=i.rx||m.ry!=i.ry),t=e;for(var y in i)i[b](y)&&(m[y]=i[y]);r&&(m.path=a._getPath[e.type](e),e._.dirty=1),i.href&&(l.href=i.href),i.title&&(l.title=i.title),i.target&&(l.target=i.target),i.cursor&&(p.cursor=i.cursor),"blur"in i&&e.blur(i.blur);if(i.path&&e.type=="path"||r)l.path=x(~c(m.path).toLowerCase().indexOf("r")?a._pathToAbsolute(m.path):m.path),e.type=="image"&&(e._.fillpos=[m.x,m.y],e._.fillsize=[m.width,m.height],z(e,1,1,0,0,0));"transform"in i&&e.transform(i.transform);if(s){var B=+m.cx,D=+m.cy,E=+m.rx||+m.r||0,G=+m.ry||+m.r||0;l.path=a.format("ar{0},{1},{2},{3},{4},{1},{4},{1}x",f((B-E)*u),f((D-G)*u),f((B+E)*u),f((D+G)*u),f(B*u))}if("clip-rect"in i){var H=c(i["clip-rect"]).split(k);if(H.length==4){H[2]=+H[2]+ +H[0],H[3]=+H[3]+ +H[1];var I=l.clipRect||a._g.doc.createElement("div"),J=I.style;J.clip=a.format("rect({1}px {2}px {3}px {0}px)",H),l.clipRect||(J.position="absolute",J.top=0,J.left=0,J.width=e.paper.width+"px",J.height=e.paper.height+"px",l.parentNode.insertBefore(I,l),I.appendChild(l),l.clipRect=I)}i["clip-rect"]||l.clipRect&&(l.clipRect.style.clip="auto")}if(e.textpath){var K=e.textpath.style;i.font&&(K.font=i.font),i["font-family"]&&(K.fontFamily='"'+i["font-family"].split(",")[0].replace(/^['"]+|['"]+$/g,o)+'"'),i["font-size"]&&(K.fontSize=i["font-size"]),i["font-weight"]&&(K.fontWeight=i["font-weight"]),i["font-style"]&&(K.fontStyle=i["font-style"])}"arrow-start"in i&&A(t,i["arrow-start"]),"arrow-end"in i&&A(t,i["arrow-end"],1);if(i.opacity!=null||i["stroke-width"]!=null||i.fill!=null||i.src!=null||i.stroke!=null||i["stroke-width"]!=null||i["stroke-opacity"]!=null||i["fill-opacity"]!=null||i["stroke-dasharray"]!=null||i["stroke-miterlimit"]!=null||i["stroke-linejoin"]!=null||i["stroke-linecap"]!=null){var L=l.getElementsByTagName(j),M=!1;L=L&&L[0],!L&&(M=L=F(j)),e.type=="image"&&i.src&&(L.src=i.src),i.fill&&(L.on=!0);if(L.on==null||i.fill=="none"||i.fill===null)L.on=!1;if(L.on&&i.fill){var N=c(i.fill).match(a._ISURL);if(N){L.parentNode==l&&l.removeChild(L),L.rotate=!0,L.src=N[1],L.type="tile";var O=e.getBBox(1);L.position=O.x+n+O.y,e._.fillpos=[O.x,O.y],a._preload(N[1],function(){e._.fillsize=[this.offsetWidth,this.offsetHeight]})}else L.color=a.getRGB(i.fill).hex,L.src=o,L.type="solid",a.getRGB(i.fill).error&&(t.type in{circle:1,ellipse:1}||c(i.fill).charAt()!="r")&&C(t,i.fill,L)&&(m.fill="none",m.gradient=i.fill,L.rotate=!1)}if("fill-opacity"in i||"opacity"in i){var P=((+m["fill-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+a.getRGB(i.fill).o+1||2)-1);P=h(g(P,0),1),L.opacity=P,L.src&&(L.color="none")}l.appendChild(L);var Q=l.getElementsByTagName("stroke")&&l.getElementsByTagName("stroke")[0],T=!1;!Q&&(T=Q=F("stroke"));if(i.stroke&&i.stroke!="none"||i["stroke-width"]||i["stroke-opacity"]!=null||i["stroke-dasharray"]||i["stroke-miterlimit"]||i["stroke-linejoin"]||i["stroke-linecap"])Q.on=!0;(i.stroke=="none"||i.stroke===null||Q.on==null||i.stroke==0||i["stroke-width"]==0)&&(Q.on=!1);var U=a.getRGB(i.stroke);Q.on&&i.stroke&&(Q.color=U.hex),P=((+m["stroke-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+U.o+1||2)-1);var V=(d(i["stroke-width"])||1)*.75;P=h(g(P,0),1),i["stroke-width"]==null&&(V=m["stroke-width"]),i["stroke-width"]&&(Q.weight=V),V&&V<1&&(P*=V)&&(Q.weight=1),Q.opacity=P,i["stroke-linejoin"]&&(Q.joinstyle=i["stroke-linejoin"]||"miter"),Q.miterlimit=i["stroke-miterlimit"]||8,i["stroke-linecap"]&&(Q.endcap=i["stroke-linecap"]=="butt"?"flat":i["stroke-linecap"]=="square"?"square":"round");if(i["stroke-dasharray"]){var W={"-":"shortdash",".":"shortdot","-.":"shortdashdot","-..":"shortdashdotdot",". ":"dot","- ":"dash","--":"longdash","- .":"dashdot","--.":"longdashdot","--..":"longdashdotdot"};Q.dashstyle=W[b](i["stroke-dasharray"])?W[i["stroke-dasharray"]]:o}T&&l.appendChild(Q)}if(t.type=="text"){t.paper.canvas.style.display=o;var X=t.paper.span,Y=100,Z=m.font&&m.font.match(/\d+(?:\.\d*)?(?=px)/);p=X.style,m.font&&(p.font=m.font),m["font-family"]&&(p.fontFamily=m["font-family"]),m["font-weight"]&&(p.fontWeight=m["font-weight"]),m["font-style"]&&(p.fontStyle=m["font-style"]),Z=d(m["font-size"]||Z&&Z[0])||10,p.fontSize=Z*Y+"px",t.textpath.string&&(X.innerHTML=c(t.textpath.string).replace(/</g,"&#60;").replace(/&/g,"&#38;").replace(/\n/g,"<br>"));var $=X.getBoundingClientRect();t.W=m.w=($.right-$.left)/Y,t.H=m.h=($.bottom-$.top)/Y,t.X=m.x,t.Y=m.y+t.H/2,("x"in i||"y"in i)&&(t.path.v=a.format("m{0},{1}l{2},{1}",f(m.x*u),f(m.y*u),f(m.x*u)+1));var _=["x","y","text","font","font-family","font-weight","font-style","font-size"];for(var ba=0,bb=_.length;ba<bb;ba++)if(_[ba]in i){t._.dirty=1;break}switch(m["text-anchor"]){case"start":t.textpath.style["v-text-align"]="left",t.bbx=t.W/2;break;case"end":t.textpath.style["v-text-align"]="right",t.bbx=-t.W/2;break;default:t.textpath.style["v-text-align"]="center",t.bbx=0}t.textpath.style["v-text-kern"]=!0}},C=function(b,f,g){b.attrs=b.attrs||{};var h=b.attrs,i=Math.pow,j,k,l="linear",m=".5 .5";b.attrs.gradient=f,f=c(f).replace(a._radial_gradient,function(a,b,c){l="radial",b&&c&&(b=d(b),c=d(c),i(b-.5,2)+i(c-.5,2)>.25&&(c=e.sqrt(.25-i(b-.5,2))*((c>.5)*2-1)+.5),m=b+n+c);return o}),f=f.split(/\s*\-\s*/);if(l=="linear"){var p=f.shift();p=-d(p);if(isNaN(p))return null}var q=a._parseDots(f);if(!q)return null;b=b.shape||b.node;if(q.length){b.removeChild(g),g.on=!0,g.method="none",g.color=q[0].color,g.color2=q[q.length-1].color;var r=[];for(var s=0,t=q.length;s<t;s++)q[s].offset&&r.push(q[s].offset+n+q[s].color);g.colors=r.length?r.join():"0% "+g.color,l=="radial"?(g.type="gradientTitle",g.focus="100%",g.focussize="0 0",g.focusposition=m,g.angle=0):(g.type="gradient",g.angle=(270-p)%360),b.appendChild(g)}return 1},D=function(b,c){this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.X=0,this.Y=0,this.attrs={},this.paper=c,this.matrix=a.matrix(),this._={transform:[],sx:1,sy:1,dx:0,dy:0,deg:0,dirty:1,dirtyT:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},E=a.el;D.prototype=E,E.constructor=D,E.transform=function(b){if(b==null)return this._.transform;var d=this.paper._viewBoxShift,e=d?"s"+[d.scale,d.scale]+"-1-1t"+[d.dx,d.dy]:o,f;d&&(f=b=c(b).replace(/\.{3}|\u2026/g,this._.transform||o)),a._extractTransform(this,e+b);var g=this.matrix.clone(),h=this.skew,i=this.node,j,k=~c(this.attrs.fill).indexOf("-"),l=!c(this.attrs.fill).indexOf("url(");g.translate(-0.5,-0.5);if(l||k||this.type=="image"){h.matrix="1 0 0 1",h.offset="0 0",j=g.split();if(k&&j.noRotation||!j.isSimple){i.style.filter=g.toFilter();var m=this.getBBox(),p=this.getBBox(1),q=m.x-p.x,r=m.y-p.y;i.coordorigin=q*-u+n+r*-u,z(this,1,1,q,r,0)}else i.style.filter=o,z(this,j.scalex,j.scaley,j.dx,j.dy,j.rotate)}else i.style.filter=o,h.matrix=c(g),h.offset=g.offset();f&&(this._.transform=f);return this},E.rotate=function(a,b,e){if(this.removed)return this;if(a!=null){a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this._.dirtyT=1,this.transform(this._.transform.concat([["r",a,b,e]]));return this}},E.translate=function(a,b){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this._.bbox&&(this._.bbox.x+=a,this._.bbox.y+=b),this.transform(this._.transform.concat([["t",a,b]]));return this},E.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3]),isNaN(e)&&(e=null),isNaN(f)&&(f=null)),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]])),this._.dirtyT=1;return this},E.hide=function(){!this.removed&&(this.node.style.display="none");return this},E.show=function(){!this.removed&&(this.node.style.display=o);return this},E._getBBox=function(){if(this.removed)return{};return{x:this.X+(this.bbx||0)-this.W/2,y:this.Y-this.H,width:this.W,height:this.H}},E.remove=function(){if(!this.removed&&!!this.node.parentNode){this.paper.__set__&&this.paper.__set__.exclude(this),a.eve.unbind("raphael.*.*."+this.id),a._tear(this,this.paper),this.node.parentNode.removeChild(this.node),this.shape&&this.shape.parentNode.removeChild(this.shape);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;this.removed=!0}},E.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c==j&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;var g=c.split(k),h={};for(var i=0,m=g.length;i<m;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return m-1?h:h[g[0]]}if(this.attrs&&d==null&&a.is(c,"array")){h={};for(i=0,m=c.length;i<m;i++)h[c[i]]=this.attr(c[i]);return h}var n;d!=null&&(n={},n[c]=d),d==null&&a.is(c,"object")&&(n=c);for(var o in n)l("raphael.attr."+o+"."+this.id,this,n[o]);if(n){for(o in this.paper.customAttributes)if(this.paper.customAttributes[b](o)&&n[b](o)&&a.is(this.paper.customAttributes[o],"function")){var p=this.paper.customAttributes[o].apply(this,[].concat(n[o]));this.attrs[o]=n[o];for(var q in p)p[b](q)&&(n[q]=p[q])}n.text&&this.type=="text"&&(this.textpath.string=n.text),B(this,n)}return this},E.toFront=function(){!this.removed&&this.node.parentNode.appendChild(this.node),this.paper&&this.paper.top!=this&&a._tofront(this,this.paper);return this},E.toBack=function(){if(this.removed)return this;this.node.parentNode.firstChild!=this.node&&(this.node.parentNode.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper));return this},E.insertAfter=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[b.length-1]),b.node.nextSibling?b.node.parentNode.insertBefore(this.node,b.node.nextSibling):b.node.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},E.insertBefore=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[0]),b.node.parentNode.insertBefore(this.node,b.node),a._insertbefore(this,b,this.paper);return this},E.blur=function(b){var c=this.node.runtimeStyle,d=c.filter;d=d.replace(r,o),+b!==0?(this.attrs.blur=b,c.filter=d+n+m+".Blur(pixelradius="+(+b||1.5)+")",c.margin=a.format("-{0}px 0 0 -{0}px",f(+b||1.5))):(c.filter=d,c.margin=0,delete this.attrs.blur)},a._engine.path=function(a,b){var c=F("shape");c.style.cssText=t,c.coordsize=u+n+u,c.coordorigin=b.coordorigin;var d=new D(c,b),e={fill:"none",stroke:"#000"};a&&(e.path=a),d.type="path",d.path=[],d.Path=o,B(d,e),b.canvas.appendChild(c);var f=F("skew");f.on=!0,c.appendChild(f),d.skew=f,d.transform(o);return d},a._engine.rect=function(b,c,d,e,f,g){var h=a._rectPath(c,d,e,f,g),i=b.path(h),j=i.attrs;i.X=j.x=c,i.Y=j.y=d,i.W=j.width=e,i.H=j.height=f,j.r=g,j.path=h,i.type="rect";return i},a._engine.ellipse=function(a,b,c,d,e){var f=a.path(),g=f.attrs;f.X=b-d,f.Y=c-e,f.W=d*2,f.H=e*2,f.type="ellipse",B(f,{cx:b,cy:c,rx:d,ry:e});return f},a._engine.circle=function(a,b,c,d){var e=a.path(),f=e.attrs;e.X=b-d,e.Y=c-d,e.W=e.H=d*2,e.type="circle",B(e,{cx:b,cy:c,r:d});return e},a._engine.image=function(b,c,d,e,f,g){var h=a._rectPath(d,e,f,g),i=b.path(h).attr({stroke:"none"}),k=i.attrs,l=i.node,m=l.getElementsByTagName(j)[0];k.src=c,i.X=k.x=d,i.Y=k.y=e,i.W=k.width=f,i.H=k.height=g,k.path=h,i.type="image",m.parentNode==l&&l.removeChild(m),m.rotate=!0,m.src=c,m.type="tile",i._.fillpos=[d,e],i._.fillsize=[f,g],l.appendChild(m),z(i,1,1,0,0,0);return i},a._engine.text=function(b,d,e,g){var h=F("shape"),i=F("path"),j=F("textpath");d=d||0,e=e||0,g=g||"",i.v=a.format("m{0},{1}l{2},{1}",f(d*u),f(e*u),f(d*u)+1),i.textpathok=!0,j.string=c(g),j.on=!0,h.style.cssText=t,h.coordsize=u+n+u,h.coordorigin="0 0";var k=new D(h,b),l={fill:"#000",stroke:"none",font:a._availableAttrs.font,text:g};k.shape=h,k.path=i,k.textpath=j,k.type="text",k.attrs.text=c(g),k.attrs.x=d,k.attrs.y=e,k.attrs.w=1,k.attrs.h=1,B(k,l),h.appendChild(j),h.appendChild(i),b.canvas.appendChild(h);var m=F("skew");m.on=!0,h.appendChild(m),k.skew=m,k.transform(o);return k},a._engine.setSize=function(b,c){var d=this.canvas.style;this.width=b,this.height=c,b==+b&&(b+="px"),c==+c&&(c+="px"),d.width=b,d.height=c,d.clip="rect(0 "+b+" "+c+" 0)",this._viewBox&&a._engine.setViewBox.apply(this,this._viewBox);return this},a._engine.setViewBox=function(b,c,d,e,f){a.eve("raphael.setViewBox",this,this._viewBox,[b,c,d,e,f]);var h=this.width,i=this.height,j=1/g(d/h,e/i),k,l;f&&(k=i/e,l=h/d,d*k<h&&(b-=(h-d*k)/2/k),e*l<i&&(c-=(i-e*l)/2/l)),this._viewBox=[b,c,d,e,!!f],this._viewBoxShift={dx:-b,dy:-c,scale:j},this.forEach(function(a){a.transform("...")});return this};var F;a._engine.initWin=function(a){var b=a.document;b.createStyleSheet().addRule(".rvml","behavior:url(#default#VML)");try{!b.namespaces.rvml&&b.namespaces.add("rvml","urn:schemas-microsoft-com:vml"),F=function(a){return b.createElement("<rvml:"+a+' class="rvml">')}}catch(c){F=function(a){return b.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="rvml">')}}},a._engine.initWin(a._g.win),a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b.container,d=b.height,e,f=b.width,g=b.x,h=b.y;if(!c)throw new Error("VML container not found.");var i=new a._Paper,j=i.canvas=a._g.doc.createElement("div"),k=j.style;g=g||0,h=h||0,f=f||512,d=d||342,i.width=f,i.height=d,f==+f&&(f+="px"),d==+d&&(d+="px"),i.coordsize=u*1e3+n+u*1e3,i.coordorigin="0 0",i.span=a._g.doc.createElement("span"),i.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;",j.appendChild(i.span),k.cssText=a.format("top:0;left:0;width:{0};height:{1};display:inline-block;position:relative;clip:rect(0 {0} {1} 0);overflow:hidden",f,d),c==1?(a._g.doc.body.appendChild(j),k.left=g+"px",k.top=h+"px",k.position="absolute"):c.firstChild?c.insertBefore(j,c.firstChild):c.appendChild(j),i.renderfix=function(){};return i},a.prototype.clear=function(){a.eve("raphael.clear",this),this.canvas.innerHTML=o,this.span=a._g.doc.createElement("span"),this.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;display:inline;",this.canvas.appendChild(this.span),this.bottom=this.top=null},a.prototype.remove=function(){a.eve("raphael.remove",this),this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;return!0};var G=a.st;for(var H in E)E[b](H)&&!G[b](H)&&(G[H]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(H))}(window.Raphael) \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 5a1779bba5..1bee55313b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -163,7 +163,7 @@ text-decoration: none;
background: url("arrow-right.png") no-repeat 0 3px transparent;
}
-.toggleContainer.open .toggle {
+.toggleContainer .toggle.open {
background: url("arrow-down.png") no-repeat 0 3px transparent;
}
@@ -171,10 +171,6 @@ text-decoration: none;
margin-top: 5px;
}
-.toggleContainer .showElement {
- padding-left: 15px;
-}
-
.value #definition {
background-color: #2C475C; /* blue */
background-image:url('defbg-blue.gif');
@@ -333,15 +329,15 @@ div.members > ol > li:last-child {
color: darkgreen;
}
-.signature .symbol .params > .implicit {
- font-style: italic;
+.signature .symbol .shadowed {
+ color: darkseagreen;
}
-.signature .symbol .implicit.deprecated {
- text-decoration: line-through;
+.signature .symbol .params > .implicit {
+ font-style: italic;
}
-.signature .symbol .name.deprecated {
+.signature .symbol .deprecated {
text-decoration: line-through;
}
@@ -802,4 +798,4 @@ div.fullcomment dl.paramcmts > dd {
#mbrsel .showall span {
color: #4C4C4C;
font-weight: bold;
-}*/ \ No newline at end of file
+}*/
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 33fbd83bee..c418c3280b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -8,7 +8,8 @@ $(document).ready(function(){
name == 'scala.Predef.any2stringfmt' ||
name == 'scala.Predef.any2stringadd' ||
name == 'scala.Predef.any2ArrowAssoc' ||
- name == 'scala.Predef.any2Ensuring'
+ name == 'scala.Predef.any2Ensuring' ||
+ name == 'scala.collection.TraversableOnce.alternateImplicit'
};
$("#linearization li:gt(0)").filter(function(){
@@ -184,21 +185,18 @@ $(document).ready(function(){
});
/* Linear super types and known subclasses */
- function toggleShowContentFct(outerElement){
- var content = $(".hiddenContent", outerElement);
- var vis = $(":visible", content);
- if (vis.length > 0) {
+ function toggleShowContentFct(e){
+ e.toggleClass("open");
+ var content = $(".hiddenContent", e.parent().get(0));
+ if (content.is(':visible')) {
content.slideUp(100);
- $(".showElement", outerElement).show();
- $(".hideElement", outerElement).hide();
}
else {
content.slideDown(100);
- $(".showElement", outerElement).hide();
- $(".hideElement", outerElement).show();
}
};
- $(".toggleContainer").click(function() {
+
+ $(".toggle:not(.diagram-link)").click(function() {
toggleShowContentFct($(this));
});
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
new file mode 100644
index 0000000000..88983254ce
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index 6488847049..2901daafd6 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -10,7 +10,7 @@ package model
import scala.collection._
import comment._
-
+import diagram._
/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the
* compiler. Entities model the following Scala concepts:
@@ -24,6 +24,9 @@ import comment._
* - annotations. */
trait Entity {
+ /** Similar to symbols, so we can track entities */
+ def id: Int
+
/** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
* instead. */
def name : String
@@ -48,6 +51,8 @@ trait Entity {
/** The annotations attached to this entity, if any. */
def annotations: List[Annotation]
+ /** The kind of the entity */
+ def kind: String
}
object Entity {
@@ -86,9 +91,14 @@ trait TemplateEntity extends Entity {
/** Whether this template is a case class. */
def isCaseClass: Boolean
+ /** Whether or not the template was defined in a package object */
+ def inPackageObject: Boolean
+
/** The self-type of this template, if it differs from the template type. */
def selfType : Option[TypeEntity]
+ /** The type of this entity, with type members */
+ def ownType: TypeEntity
}
@@ -167,6 +177,10 @@ trait MemberEntity extends Entity {
/** Whether this member is abstract. */
def isAbstract: Boolean
+ /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
+ * signature and the complete parameter descriptions. */
+ def useCaseOf: Option[MemberEntity] = None
+
/** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
def byConversion: Option[ImplicitConversion]
}
@@ -177,7 +191,7 @@ object MemberEntity {
}
/** An entity that is parameterized by types */
-trait HigherKinded extends Entity {
+trait HigherKinded {
/** The type parameters of this entity. */
def typeParams: List[TypeParam]
@@ -187,8 +201,14 @@ trait HigherKinded extends Entity {
/** A template (class, trait, object or package) which is referenced in the universe, but for which no further
* documentation is available. Only templates for which a source file is given are documented by Scaladoc. */
-trait NoDocTemplate extends TemplateEntity
+trait NoDocTemplate extends TemplateEntity {
+ def kind = "<no doc>"
+}
+/** TODO: Document */
+trait NoDocTemplateMemberEntity extends TemplateEntity with MemberEntity {
+ def kind = "<no doc, mbr>"
+}
/** A template (class, trait, object or package) for which documentation is available. Only templates for which
* a source file is given are documented by Scaladoc. */
@@ -206,11 +226,10 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* only if the `docsourceurl` setting has been set. */
def sourceUrl: Option[java.net.URL]
- /** The direct super-type of this template. */
- def parentType: Option[TypeEntity]
-
- @deprecated("Use `linearizationTemplates` and `linearizationTypes` instead", "2.9.0")
- def linearization: List[(TemplateEntity, TypeEntity)]
+ /** The direct super-type of this template
+ e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D
+ NOTE: we are dropping the refinement here! */
+ def parentTypes: List[(TemplateEntity, TypeEntity)]
/** All class, trait and object templates which are part of this template's linearization, in lineratization order.
* This template's linearization contains all of its direct and indirect super-classes and super-traits. */
@@ -220,9 +239,13 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* This template's linearization contains all of its direct and indirect super-types. */
def linearizationTypes: List[TypeEntity]
- /**All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def subClasses: List[DocTemplateEntity]
+ /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
+ * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
+ def allSubClasses: List[DocTemplateEntity]
+
+ /** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
+ * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
+ def directSubClasses: List[DocTemplateEntity]
/** All members of this template. If this template is a package, only templates for which documentation is available
* in the universe (`DocTemplateEntity`) are listed. */
@@ -250,11 +273,29 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
/** The implicit conversions this template (class or trait, objects and packages are not affected) */
def conversions: List[ImplicitConversion]
+
+ /** The shadowing information for the implicitly added members */
+ def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing]
+
+ /** Classes that can be implcitly converted to this class */
+ def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)]
+
+ /** Classes to which this class can be implicitly converted to
+ NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */
+ def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)]
+
+ /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */
+ def inheritanceDiagram: Option[Diagram]
+
+ /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */
+ def contentDiagram: Option[Diagram]
}
/** A trait template. */
-trait Trait extends DocTemplateEntity with HigherKinded
+trait Trait extends DocTemplateEntity with HigherKinded {
+ def kind = "trait"
+}
/** A class template. */
@@ -270,11 +311,14 @@ trait Class extends Trait with HigherKinded {
* parameters cannot be curried, the outer list has exactly one element. */
def valueParams: List[List[ValueParam]]
+ override def kind = "class"
}
/** An object template. */
-trait Object extends DocTemplateEntity
+trait Object extends DocTemplateEntity {
+ def kind = "object"
+}
/** A package template. A package is in the universe if it is declared as a package object, or if it
@@ -290,6 +334,8 @@ trait Package extends Object {
/** All packages that are member of this package. */
def packages: List[Package]
+
+ override def kind = "package"
}
@@ -305,10 +351,6 @@ trait NonTemplateMemberEntity extends MemberEntity {
* It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
def isUseCase: Boolean
- /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
- * signature and the complete parameter descriptions. */
- def useCaseOf: Option[MemberEntity]
-
/** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
* and should not appear in ScalaDoc. */
def isBridge: Boolean
@@ -323,6 +365,7 @@ trait Def extends NonTemplateMemberEntity with HigherKinded {
* Each parameter block is a list of value parameters. */
def valueParams : List[List[ValueParam]]
+ def kind = "method"
}
@@ -337,11 +380,14 @@ trait Constructor extends NonTemplateMemberEntity {
* element. */
def valueParams : List[List[ValueParam]]
+ def kind = "constructor"
}
/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */
-trait Val extends NonTemplateMemberEntity
+trait Val extends NonTemplateMemberEntity {
+ def kind = "[lazy] value/variable"
+}
/** An abstract type member of a template. */
@@ -353,6 +399,7 @@ trait AbstractType extends NonTemplateMemberEntity with HigherKinded {
/** The upper bound for this abstract type, if it has been defined. */
def hi: Option[TypeEntity]
+ def kind = "abstract type"
}
@@ -362,18 +409,14 @@ trait AliasType extends NonTemplateMemberEntity with HigherKinded {
/** The type aliased by this type alias. */
def alias: TypeEntity
+ def kind = "type alias"
}
/** A parameter to an entity. */
-trait ParameterEntity extends Entity {
-
- /** Whether this parameter is a type parameter. */
- def isTypeParam: Boolean
-
- /** Whether this parameter is a value parameter. */
- def isValueParam: Boolean
+trait ParameterEntity {
+ def name: String
}
@@ -388,7 +431,6 @@ trait TypeParam extends ParameterEntity with HigherKinded {
/** The upper bound for this type parameter, if it has been defined. */
def hi: Option[TypeEntity]
-
}
@@ -403,7 +445,6 @@ trait ValueParam extends ParameterEntity {
/** Whether this value parameter is implicit. */
def isImplicit: Boolean
-
}
@@ -416,6 +457,7 @@ trait Annotation extends Entity {
/** The arguments passed to the constructor of the annotation class. */
def arguments: List[ValueArgument]
+ def kind = "annotation"
}
/** A trait that signals the member results from an implicit conversion */
@@ -427,6 +469,15 @@ trait ImplicitConversion {
/** The result type after the conversion */
def targetType: TypeEntity
+ /** The result type after the conversion
+ * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
+ * types. Need to check it's not option!
+ */
+ def targetTemplate: Option[TemplateEntity]
+
+ /** The components of the implicit conversion type parents */
+ def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
+
/** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
def convertorMethod: Either[MemberEntity, String]
@@ -446,12 +497,30 @@ trait ImplicitConversion {
def members: List[MemberEntity]
}
-/** A trait that encapsulates a constraint necessary for implicit conversion */
-trait Constraint {
- // /** The implicit conversion during which this constraint appears */
- // def conversion: ImplicitConversion
+/** Shadowing captures the information that the member is shadowed by some other members
+ * There are two cases of implicitly added member shadowing:
+ * 1) shadowing from a original class member (the class already has that member)
+ * in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
+ * the call arguments (or if they fit it will call the original class' method)
+ * 2) shadowing from other possible implicit conversions ()
+ * this will result in an ambiguous implicit converion error
+ */
+trait ImplicitMemberShadowing {
+ /** The members that shadow the current entry use .inTemplate to get to the template name */
+ def shadowingMembers: List[MemberEntity]
+
+ /** The members that ambiguate this implicit conversion
+ Note: for ambiguatingMembers you have the following invariant:
+ assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */
+ def ambiguatingMembers: List[MemberEntity]
+
+ def isShadowed: Boolean = !shadowingMembers.isEmpty
+ def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty
}
+/** A trait that encapsulates a constraint necessary for implicit conversion */
+trait Constraint
+
/** A constraint involving a type parameter which must be in scope */
trait ImplicitInScopeConstraint extends Constraint {
/** The type of the implicit value required */
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 3dd77d47da..9fa6619e9f 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -6,6 +6,8 @@ package model
import comment._
+import diagram._
+
import scala.collection._
import scala.util.matching.Regex
@@ -17,16 +19,16 @@ import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
class ModelFactory(val global: Global, val settings: doc.Settings) {
- thisFactory: ModelFactory with ModelFactoryImplicitSupport with CommentFactory with TreeFactory =>
+ thisFactory: ModelFactory with ModelFactoryImplicitSupport with DiagramFactory with CommentFactory with TreeFactory =>
import global._
import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
- import rootMirror.{ RootPackage, EmptyPackage }
+ import rootMirror.{ RootPackage, RootClass, EmptyPackage }
- private var droppedPackages = 0
- def templatesCount = templatesCache.size - droppedPackages
+ def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size
- private var modelFinished = false
+ private var _modelFinished = false
+ def modelFinished: Boolean = _modelFinished
private var universe: Universe = null
private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg)
@@ -43,62 +45,62 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
}
- private lazy val noSubclassCache = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
-
- /** */
def makeModel: Option[Universe] = {
val universe = new Universe { thisUniverse =>
thisFactory.universe = thisUniverse
val settings = thisFactory.settings
- private val rootPackageMaybe = makeRootPackage
- val rootPackage = rootPackageMaybe.orNull
+ val rootPackage = modelCreation.createRootPackage
}
- modelFinished = true
+ _modelFinished = true
+ // complete the links between model entities, everthing that couldn't have been done before
+ universe.rootPackage.completeModel
+
Some(universe) filter (_.rootPackage != null)
}
- /** */
- protected val templatesCache =
- new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
-
- def findTemplate(query: String): Option[DocTemplateImpl] = {
- if (!modelFinished) sys.error("cannot find template in unfinished universe")
- templatesCache.values find { tpl => tpl.qualifiedName == query && !tpl.isObject }
- }
+ // state:
+ var ids = 0
+ private val droppedPackages = mutable.Set[PackageImpl]()
+ protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
+ protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl]
+ protected var typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
def optimize(str: String): String =
if (str.length < 16) str.intern else str
/* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
- abstract class EntityImpl(val sym: Symbol, inTpl: => TemplateImpl) extends Entity {
+ abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
+ val id = { ids += 1; ids }
val name = optimize(sym.nameString)
+ val universe = thisFactory.universe
+
+ // Debugging:
+ // assert(id != 36, sym + " " + sym.getClass)
+ //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString("."))
+
def inTemplate: TemplateImpl = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
- val universe = thisFactory.universe
def annotations = sym.annotations.map(makeAnnotation)
}
trait TemplateImpl extends EntityImpl with TemplateEntity {
override def qualifiedName: String =
- if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
+ if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
def isPackage = sym.isPackage
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
def isObject = sym.isModule && !sym.isPackage
def isCaseClass = sym.isCaseClass
def isRootPackage = false
+ def ownType = makeType(sym.tpe, this)
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
+ def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
}
- class NoDocTemplateImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with NoDocTemplate {
- def isDocTemplate = false
- }
-
- abstract class MemberImpl(sym: Symbol, implConv: ImplicitConversionImpl = null, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
- lazy val comment =
- if (inTpl == null) None else thisFactory.comment(sym, inTpl)
+ abstract class MemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ lazy val comment = if (inTpl != null) thisFactory.comment(sym, inTpl) else None
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
def inDefinitionTemplates = this match {
@@ -106,9 +108,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
mb.useCaseOf.get.inDefinitionTemplates
case _ =>
if (inTpl == null)
- makeRootPackage.toList
+ List(makeRootPackage)
else
- makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+ makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
}
def visibility = {
if (sym.isPrivateLocal) PrivateInInstance()
@@ -189,16 +191,40 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def byConversion = if (implConv ne null) Some(implConv) else None
}
+ /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class
+ * exists, but should not be documented (either it's not included in the source or it's not visible)
+ */
+ class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
+ assert(modelFinished)
+ assert(!(noDocTemplatesCache isDefinedAt sym))
+ noDocTemplatesCache += (sym -> this)
+
+ def isDocTemplate = false
+ }
+
+ /** An inherited template that was not documented in its original owner - example:
+ * in classpath: trait T { class C } -- T (and implicitly C) are not documented
+ * in the source: trait U extends T -- C appears in U as a NoDocTemplateMemberImpl -- that is, U has a member for it
+ * but C doesn't get its own page
+ */
+ class NoDocTemplateMemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, null, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplateMemberEntity {
+ assert(modelFinished)
+
+ def isDocTemplate = false
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
+ }
+
/** The instantiation of `TemplateImpl` triggers the creation of the following entities:
* All ancestors of the template and all non-package members.
*/
- abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, null, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
- //if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, null, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
+ assert(!modelFinished)
+ assert(!(docTemplatesCache isDefinedAt sym), sym)
+ docTemplatesCache += (sym -> this)
+
if (settings.verbose.value)
inform("Creating doc template for " + sym)
- templatesCache += (sym -> this)
- lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
def inSource =
if (sym.sourceFile != null && ! sym.isSynthetic)
@@ -226,14 +252,26 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
else None
}
- def parentType = {
- if (sym.isPackage || sym == AnyClass) None else {
+
+ def parentTemplates =
+ if (sym.isPackage || sym == AnyClass)
+ List()
+ else
+ sym.tpe.parents.flatMap { tpe: Type =>
+ val tSym = tpe.typeSymbol
+ if (tSym != NoSymbol)
+ List(makeTemplate(tSym))
+ else
+ List()
+ } filter (_.isInstanceOf[DocTemplateEntity])
+
+ def parentTypes =
+ if (sym.isPackage || sym == AnyClass) List() else {
val tps = sym.tpe.parents map { _.asSeenFrom(sym.thisType, sym) }
- Some(makeType(RefinedType(tps, EmptyScope), inTpl))
+ makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl)
}
- }
- protected def linearizationFromSymbol(symbol: Symbol) = {
+ protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
symbol.ancestors map { ancestor =>
val typeEntity = makeType(symbol.info.baseType(ancestor), this)
val tmplEntity = makeTemplate(ancestor) match {
@@ -244,59 +282,134 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- val linearization = linearizationFromSymbol(sym)
+ lazy val linearization = linearizationFromSymbol(sym)
def linearizationTemplates = linearization map { _._1 }
def linearizationTypes = linearization map { _._2 }
+ /* Subclass cache */
private lazy val subClassesCache = (
- if (noSubclassCache(sym)) null
+ if (sym == AnyRefClass) null
else mutable.ListBuffer[DocTemplateEntity]()
)
def registerSubClass(sc: DocTemplateEntity): Unit = {
if (subClassesCache != null)
subClassesCache += sc
}
- def subClasses = if (subClassesCache == null) Nil else subClassesCache.toList
+ def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
+ def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
+
+ /* Implcitly convertible class cache */
+ private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateEntity, ImplicitConversionImpl)] = null
+ def registerImplicitlyConvertibleClass(dtpl: DocTemplateEntity, conv: ImplicitConversionImpl): Unit = {
+ if (implicitlyConvertibleClassesCache == null)
+ implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateEntity, ImplicitConversionImpl)]()
+ implicitlyConvertibleClassesCache += ((dtpl, conv))
+ }
- val conversions = if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
+ def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversionImpl)] =
+ if (implicitlyConvertibleClassesCache == null)
+ List()
+ else
+ implicitlyConvertibleClassesCache.toList
+
+ // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added
+ // lazily, on completeModel
+ val conversions: List[ImplicitConversionImpl] =
+ if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
+
+ // members as given by the compiler
+ lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this))
+
+ // the inherited templates (classes, traits or objects)
+ var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOnwer(t, this))
+ // the direct members (methods, values, vars, types and directly contained templates)
+ var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
+ // the members generated by the symbols in memberSymsEager
+ val ownMembers = (memberSyms.flatMap(makeMember(_, null, this)))
+
+ // all the members that are documentented PLUS the members inherited by implicit conversions
+ var members: List[MemberImpl] = ownMembers
+
+ def templates = members collect { case c: DocTemplateEntity => c }
+ def methods = members collect { case d: Def => d }
+ def values = members collect { case v: Val => v }
+ def abstractTypes = members collect { case t: AbstractType => t }
+ def aliasTypes = members collect { case t: AliasType => t }
+
+ /**
+ * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but
+ * inherited templates and implicit members are added to the members at this point.
+ */
+ def completeModel: Unit = {
+ // DFS completion
+ for (member <- members)
+ member match {
+ case d: DocTemplateImpl => d.completeModel
+ case _ =>
+ }
+
+ members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, inTpl))
+
+ // compute linearization to register subclasses
+ linearization
+ outgoingImplicitlyConvertedClasses
+
+ // the members generated by the symbols in memberSymsEager PLUS the members from the usecases
+ val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct
+ implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
+ // finally, add the members generated by implicit conversions
+ members :::= conversions.flatMap(_.memberImpls)
+ }
- lazy val memberSyms =
- // Only this class's constructors are part of its members, inherited constructors are not.
- sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym) && !isPureBridge(sym) )
+ var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]()
- val members = (memberSyms.flatMap(makeMember(_, null, this))) :::
- (conversions.flatMap((_.members))) // also take in the members from implicit conversions
+ lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] =
+ conversions flatMap (conv =>
+ if (!implicitExcluded(conv.conversionQualifiedName))
+ conv.targetTypeComponents map {
+ case pair@(template, tpe) =>
+ template match {
+ case d: DocTemplateImpl => d.registerImplicitlyConvertibleClass(this, conv)
+ case _ => // nothing
+ }
+ (pair._1, pair._2, conv)
+ }
+ else List()
+ )
- val templates = members collect { case c: DocTemplateEntity => c }
- val methods = members collect { case d: Def => d }
- val values = members collect { case v: Val => v }
- val abstractTypes = members collect { case t: AbstractType => t }
- val aliasTypes = members collect { case t: AliasType => t }
override def isTemplate = true
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
def isDocTemplate = true
def companion = sym.companionSymbol match {
case NoSymbol => None
case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
- Some(makeDocTemplate(comSym, inTpl))
+ makeTemplate(comSym) match {
+ case d: DocTemplateImpl => Some(d)
+ case _ => None
+ }
case _ => None
}
+
+ // We make the diagram a lazy val, since we're not sure we'll include the diagrams in the page
+ lazy val inheritanceDiagram = makeInheritanceDiagram(this)
+ lazy val contentDiagram = makeContentDiagram(this)
}
- abstract class PackageImpl(sym: Symbol, inTpl: => PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
+ abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override val linearization = {
+ override lazy val linearization = {
val symbol = sym.info.members.find {
s => s.isPackageObject
} getOrElse sym
linearizationFromSymbol(symbol)
}
- val packages = members collect { case p: Package => p }
+ def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
- abstract class NonTemplateMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends MemberImpl(sym, implConv, inTpl) with NonTemplateMemberEntity {
+ abstract class NonTemplateMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: DocTemplateImpl) extends MemberImpl(sym, implConv, inTpl) with NonTemplateMemberEntity {
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
lazy val definitionName =
if (implConv == null) optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
@@ -305,7 +418,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isBridge = sym.isBridge
}
- abstract class NonTemplateParamMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, implConv, inTpl) {
+ abstract class NonTemplateParamMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: DocTemplateImpl) extends NonTemplateMemberImpl(sym, implConv, inTpl) {
def valueParams = {
val info = if (implConv eq null) sym.info else implConv.toType memberInfo sym
info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
@@ -314,28 +427,31 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
- override def inTemplate = inTpl
+ abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity {
+ val name = optimize(sym.nameString)
}
- private trait TypeBoundsImpl extends EntityImpl {
+ private trait TypeBoundsImpl {
+ def sym: Symbol
+ def inTpl: TemplateImpl
def lo = sym.info.bounds match {
case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
- Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTemplate, sym))
+ Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym))
case _ => None
}
def hi = sym.info.bounds match {
case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
- Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTemplate, sym))
+ Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym))
case _ => None
}
}
- trait HigherKindedImpl extends EntityImpl with HigherKinded {
+ trait HigherKindedImpl extends HigherKinded {
+ def sym: Symbol
+ def inTpl: TemplateImpl
def typeParams =
- sym.typeParams map (makeTypeParam(_, inTemplate))
+ sym.typeParams map (makeTypeParam(_, inTpl))
}
-
/* ============== MAKER METHODS ============== */
/** */
@@ -352,145 +468,133 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
aSym
}
- def makeRootPackage: Option[PackageImpl] =
- makePackage(RootPackage, null)
+ /**
+ * These are all model construction methods. Please do not use them directly, they are calling each other recursively
+ * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used
+ * after the model was created (modelFinished=true) otherwise assertions will start failing.
+ */
+ object modelCreation {
- /** Creates a package entity for the given symbol or returns `None` if the symbol does not denote a package that
- * contains at least one ''documentable'' class, trait or object. Creating a package entity */
- def makePackage(aSym: Symbol, inTpl: => PackageImpl): Option[PackageImpl] = {
- val bSym = normalizeTemplate(aSym)
- if (templatesCache isDefinedAt (bSym))
- Some(templatesCache(bSym) match {case p: PackageImpl => p})
- else {
- val pack =
- if (bSym == RootPackage)
- new RootPackageImpl(bSym) {
- override lazy val comment =
- if(settings.docRootContent.isDefault) None
- else {
- import Streamable._
- Path(settings.docRootContent.value) match {
- case f : File => {
- val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition))
- Some(rootComment)
- }
- case _ => None
- }
- }
- override val name = "root"
- override def inTemplate = this
- override def toRoot = this :: Nil
- override def qualifiedName = "_root_"
- override def inheritedFrom = Nil
- override def isRootPackage = true
- override lazy val memberSyms =
- (bSym.info.members ++ EmptyPackage.info.members) filter { s =>
- s != EmptyPackage && s != RootPackage
- }
- }
- else
- new PackageImpl(bSym, inTpl) {}
- if (pack.templates.isEmpty) {
- droppedPackages += 1
- None
- }
- else Some(pack)
+ def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match {
+ case Some(root: PackageImpl) => root
+ case _ => modelCreation.createTemplate(RootPackage, null).asInstanceOf[PackageImpl]
}
- }
-
- /** */
- def makeTemplate(aSym: Symbol): TemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- if (bSym == RootPackage)
- makeRootPackage.get
- else if (bSym.isPackage)
- makeTemplate(bSym.owner) match {
- case inPkg: PackageImpl => makePackage(bSym, inPkg) getOrElse (new NoDocTemplateImpl(bSym, inPkg))
- case inNoDocTpl: NoDocTemplateImpl => new NoDocTemplateImpl(bSym, inNoDocTpl)
- case _ => throw new Error("'" + bSym + "' must be in a package")
- }
- else if (templateShouldDocument(bSym))
- makeTemplate(bSym.owner) match {
- case inDTpl: DocTemplateImpl => makeDocTemplate(bSym, inDTpl)
- case inNoDocTpl: NoDocTemplateImpl => new NoDocTemplateImpl(bSym, inNoDocTpl)
- case _ => throw new Error("'" + bSym + "' must be in documentable template")
- }
- else
- new NoDocTemplateImpl(bSym, makeTemplate(bSym.owner))
- }
-
- /** */
- def makeDocTemplate(aSym: Symbol, inTpl: => DocTemplateImpl): DocTemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- val minimumInTpl =
- if (bSym.owner != inTpl.sym)
- makeTemplate(aSym.owner) match {
- case inDTpl: DocTemplateImpl => inDTpl
- case inNDTpl => throw new Error("'" + bSym + "' is owned by '" + inNDTpl + "' which is not documented")
+ /**
+ * Create a template, either a package, class, trait or object
+ */
+ def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
+ // don't call this after the model finished!
+ assert(!modelFinished)
+
+ def createRootPackageComment: Option[Comment] =
+ if(settings.docRootContent.isDefault) None
+ else {
+ import Streamable._
+ Path(settings.docRootContent.value) match {
+ case f : File => {
+ val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition))
+ Some(rootComment)
+ }
+ case _ => None
+ }
}
- else
- inTpl
- if (templatesCache isDefinedAt (bSym))
- templatesCache(bSym)
- else if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
- new DocTemplateImpl(bSym, minimumInTpl) with Object
- else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
- new DocTemplateImpl(bSym, minimumInTpl) with Trait
- else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
- new DocTemplateImpl(bSym, minimumInTpl) with Class {
- def valueParams =
- // we don't want params on a class (non case class) signature
- if (isCaseClass) List(sym.constrParamAccessors map (makeValueParam(_, this)))
- else List.empty
- val constructors =
- members collect { case d: Constructor => d }
- def primaryConstructor = constructors find { _.isPrimary }
+
+ def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
+ if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
+ new DocTemplateImpl(bSym, inTpl) with Object
+ else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
+ new DocTemplateImpl(bSym, inTpl) with Trait
+ else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
+ new DocTemplateImpl(bSym, inTpl) with Class {
+ def valueParams =
+ // we don't want params on a class (non case class) signature
+ if (isCaseClass) List(sym.constrParamAccessors map (makeValueParam(_, this)))
+ else List.empty
+ val constructors =
+ members collect { case d: Constructor => d }
+ def primaryConstructor = constructors find { _.isPrimary }
+ }
+ else
+ sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template")
}
- else
- throw new Error("'" + bSym + "' that isn't a class, trait or object cannot be built as a documentable template")
- }
- /** */
- def makeAnnotation(annot: AnnotationInfo): Annotation = {
- val aSym = annot.symbol
- new EntityImpl(aSym, makeTemplate(aSym.owner)) with Annotation {
- lazy val annotationClass =
- makeTemplate(annot.symbol)
- val arguments = { // lazy
- def noParams = annot.args map { _ => None }
- val params: List[Option[ValueParam]] = annotationClass match {
- case aClass: Class =>
- (aClass.primaryConstructor map { _.valueParams.head }) match {
- case Some(vps) => vps map { Some(_) }
- case None => noParams
+ val bSym = normalizeTemplate(aSym)
+ if (docTemplatesCache isDefinedAt bSym)
+ return docTemplatesCache(bSym)
+
+ /* Three cases of templates:
+ * (1) root package -- special cased for bootstrapping
+ * (2) package
+ * (3) class/object/trait
+ */
+ if (bSym == RootPackage) // (1)
+ new RootPackageImpl(bSym) {
+ override lazy val comment = createRootPackageComment
+ override val name = "root"
+ override def inTemplate = this
+ override def toRoot = this :: Nil
+ override def qualifiedName = "_root_"
+ override def inheritedFrom = Nil
+ override def isRootPackage = true
+ override lazy val memberSyms =
+ (bSym.info.members ++ EmptyPackage.info.members) filter { s =>
+ s != EmptyPackage && s != RootPackage
}
- case _ => noParams
}
- assert(params.length == annot.args.length)
- (params zip annot.args) flatMap { case (param, arg) =>
- makeTree(arg) match {
- case Some(tree) =>
- Some(new ValueArgument {
- def parameter = param
- def value = tree
- })
- case None => None
- }
+ else if (bSym.isPackage) // (2)
+ inTpl match {
+ case inPkg: PackageImpl =>
+ val pack = new PackageImpl(bSym, inPkg) {}
+ if (pack.templates.isEmpty && pack.memberSymsLazy.isEmpty)
+ droppedPackages += pack
+ pack
+ case _ =>
+ sys.error("'" + bSym + "' must be in a package")
}
+ else {
+ // no class inheritance at this point
+ assert(inOriginalOnwer(bSym, inTpl))
+ createDocTemplate(bSym, inTpl)
}
}
+
+ /**
+ * After the model is completed, no more DocTemplateEntities are created.
+ * Therefore any symbol that still appears is:
+ * - NoDocTemplateMemberEntity (created here)
+ * - NoDocTemplateEntity (created in makeTemplate)
+ */
+ def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = {
+ assert(modelFinished)
+ val bSym = normalizeTemplate(aSym)
+
+ if (docTemplatesCache isDefinedAt bSym)
+ docTemplatesCache(bSym)
+ else
+ docTemplatesCache.get(bSym.owner) match {
+ case Some(inTpl) =>
+ val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr })
+ assert(mbrs.length == 1)
+ mbrs.head
+ case _ =>
+ // move the class completely to the new location
+ new NoDocTemplateMemberImpl(aSym, inTpl)
+ }
+ }
}
- /** */
+ /** Get the root package */
+ def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
+
// TODO: Should be able to override the type
- def makeMember(aSym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+ def makeMember(aSym: Symbol, implConv: ImplicitConversionImpl, inTpl: DocTemplateImpl): List[MemberImpl] = {
def makeMember0(bSym: Symbol, _useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
- thisFactory.comment(bSym.accessed, inTpl) // This hack should be removed after analyser is fixed.
+ thisFactory.comment(bSym.accessed, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed.
override def isLazyVal = true
override def useCaseOf = _useCaseOf
})
@@ -504,10 +608,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (bSym == definitions.Object_synchronized) {
val cSymInfo = (bSym.info: @unchecked) match {
case PolyType(ts, MethodType(List(bp), mt)) =>
- val cp = bp.cloneSymbol.setInfo(definitions.byNameType(bp.info))
+ val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info))
PolyType(ts, MethodType(List(cp), mt))
}
- bSym.cloneSymbol.setInfo(cSymInfo)
+ bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo)
}
else bSym
}
@@ -538,10 +642,18 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
override def useCaseOf = _useCaseOf
})
- else if (bSym.isPackage)
- inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
- else if ((bSym.isClass || bSym.isModule || bSym == AnyRefClass) && templateShouldDocument(bSym))
- Some(makeDocTemplate(bSym, inTpl))
+ else if (bSym.isPackage && !modelFinished)
+ inTpl match {
+ case inPkg: PackageImpl => modelCreation.createTemplate(bSym, inTpl) match {
+ case p: PackageImpl if droppedPackages contains p => None
+ case p: PackageImpl => Some(p)
+ case _ => sys.error("'" + bSym + "' must be a package")
+ }
+ case _ =>
+ sys.error("'" + bSym + "' must be in a package")
+ }
+ else if (!modelFinished && templateShouldDocument(bSym, inTpl) && inOriginalOnwer(bSym, inTpl))
+ Some(modelCreation.createTemplate(bSym, inTpl))
else
None
}
@@ -561,14 +673,78 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// Use cases replace the original definitions - SI-5054
allSyms flatMap { makeMember0(_, member) }
}
+ }
+
+ def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
+ val tplSym = normalizeTemplate(aSym.owner)
+ inTpl.members.find(_.sym == aSym)
+ }
+
+ def findTemplate(query: String): Option[DocTemplateImpl] = {
+ assert(modelFinished)
+ docTemplatesCache.values find { (tpl: TemplateImpl) => tpl.qualifiedName == query && !tpl.isObject }
+ }
+
+ def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
+ assert(modelFinished)
+ docTemplatesCache.get(normalizeTemplate(aSym))
+ }
+
+ def makeTemplate(aSym: Symbol): TemplateImpl = {
+ assert(modelFinished)
+ def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = {
+ val bSym = normalizeTemplate(aSym)
+ noDocTemplatesCache.get(bSym) match {
+ case Some(noDocTpl) => noDocTpl
+ case None => new NoDocTemplateImpl(bSym, inTpl)
+ }
+ }
+
+ findTemplateMaybe(aSym) match {
+ case Some(dtpl) =>
+ dtpl
+ case None =>
+ val bSym = normalizeTemplate(aSym)
+ makeNoDocTemplate(bSym, makeTemplate(bSym.owner))
+ }
+ }
+
+
+ /** */
+ def makeAnnotation(annot: AnnotationInfo): Annotation = {
+ val aSym = annot.symbol
+ new EntityImpl(aSym, makeTemplate(aSym.owner)) with Annotation {
+ lazy val annotationClass =
+ makeTemplate(annot.symbol)
+ val arguments = { // lazy
+ def noParams = annot.args map { _ => None }
+ val params: List[Option[ValueParam]] = annotationClass match {
+ case aClass: Class =>
+ (aClass.primaryConstructor map { _.valueParams.head }) match {
+ case Some(vps) => vps map { Some(_) }
+ case None => noParams
+ }
+ case _ => noParams
+ }
+ assert(params.length == annot.args.length)
+ (params zip annot.args) flatMap { case (param, arg) =>
+ makeTree(arg) match {
+ case Some(tree) =>
+ Some(new ValueArgument {
+ def parameter = param
+ def value = tree
+ })
+ case None => None
+ }
+ }
+ }
+ }
}
/** */
- def makeTypeParam(aSym: Symbol, inTpl: => TemplateImpl): TypeParam =
+ def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam =
new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
- def isTypeParam = true
- def isValueParam = false
def variance: String = {
if (sym hasFlag Flags.COVARIANT) "+"
else if (sym hasFlag Flags.CONTRAVARIANT) "-"
@@ -577,16 +753,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl): ValueParam = {
+ def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = {
makeValueParam(aSym, inTpl, aSym.nameString)
}
+
/** */
- def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl, newName: String): ValueParam =
+ def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam =
new ParameterImpl(aSym, inTpl) with ValueParam {
override val name = newName
- def isTypeParam = false
- def isValueParam = true
def defaultValue =
if (aSym.hasDefault) {
// units.filter should return only one element
@@ -601,12 +776,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
else None
def resultType =
- makeTypeInTemplateContext(sym.tpe, inTpl, sym)
+ makeTypeInTemplateContext(aSym.tpe, inTpl, aSym)
def isImplicit = aSym.isImplicit
}
/** */
- def makeTypeInTemplateContext(aType: Type, inTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
+ def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = {
def ownerTpl(sym: Symbol): Symbol =
if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
val tpe =
@@ -619,11 +794,30 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
makeType(tpe, inTpl)
}
+ /** Get the types of the parents of the current class, ignoring the refinements */
+ def makeParentTypes(aType: Type, tpl: Option[DocTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match {
+ case RefinedType(parents, defs) =>
+ val ignoreParents = Set[Symbol](AnyRefClass, ObjectClass)
+ val filtParents =
+ // we don't want to expose too many links to AnyRef, that will just be redundant information
+ if (tpl.isDefined && (!tpl.get.isObject && parents.length < 2))
+ parents
+ else
+ parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
+ filtParents.map(parent => {
+ val templateEntity = makeTemplate(parent.typeSymbol)
+ val typeEntity = makeType(parent, inTpl)
+ (templateEntity, typeEntity)
+ })
+ case _ =>
+ List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl)))
+ }
+
/** */
- def makeType(aType: Type, inTpl: => TemplateImpl): TypeEntity = {
+ def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
def templatePackage = closestPackage(inTpl.sym)
- new TypeEntity {
+ def createTypeEntity = new TypeEntity {
private val nameBuffer = new StringBuilder
private var refBuffer = new immutable.TreeMap[Int, (TemplateEntity, Int)]
private def appendTypes0(types: List[Type], sep: String): Unit = types match {
@@ -669,7 +863,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// nameBuffer append stripPrefixes.foldLeft(pre.prefixString)(_ stripPrefix _)
// }
val bSym = normalizeTemplate(aSym)
- if (bSym.isNonClassType) {
+ if (bSym.isNonClassType && bSym != AnyRefClass) {
nameBuffer append bSym.decodedName
} else {
val tpl = makeTemplate(bSym)
@@ -719,23 +913,87 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
val refEntity = refBuffer
val name = optimize(nameBuffer.toString)
}
- }
- def templateShouldDocument(aSym: Symbol): Boolean = {
- // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
- (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
- ( aSym.owner == NoSymbol || templateShouldDocument(aSym.owner) ) && !isEmptyJavaObject(aSym)
+ if (aType.isTrivial)
+ typeCache.get(aType) match {
+ case Some(typeEntity) => typeEntity
+ case None =>
+ val typeEntity = createTypeEntity
+ typeCache += aType -> typeEntity
+ typeEntity
+ }
+ else
+ createTypeEntity
}
- def isEmptyJavaObject(aSym: Symbol): Boolean = {
- def hasMembers = aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
- aSym.isModule && aSym.isJavaDefined && !hasMembers
- }
+ def normalizeOwner(aSym: Symbol): Symbol =
+ /*
+ * Okay, here's the explanation of what happens. The code:
+ *
+ * package foo {
+ * object `package` {
+ * class Bar
+ * }
+ * }
+ *
+ * will yield this Symbol structure:
+ *
+ * +---------------+ +--------------------------+
+ * | package foo#1 ----(1)---> module class foo#2 |
+ * +---------------+ | +----------------------+ | +-------------------------+
+ * | | package object foo#3 ------(1)---> module class package#4 |
+ * | +----------------------+ | | +---------------------+ |
+ * +--------------------------+ | | class package$Bar#5 | |
+ * | +---------------------+ |
+ * +-------------------------+
+ * (1) sourceModule
+ * (2) you get out of owners with .owner
+ */
+ normalizeTemplate(aSym) match {
+ case bSym if bSym.isPackageObject =>
+ normalizeOwner(bSym.owner)
+ case bSym =>
+ bSym
+ }
- def localShouldDocument(aSym: Symbol): Boolean = {
+ def inOriginalOnwer(aSym: Symbol, inTpl: TemplateImpl): Boolean =
+ normalizeOwner(aSym.owner) == normalizeOwner(inTpl.sym)
+
+ def templateShouldDocument(aSym: Symbol, inTpl: TemplateImpl): Boolean =
+ (aSym.isClass || aSym.isModule || aSym == AnyRefClass) &&
+ localShouldDocument(aSym) &&
+ !isEmptyJavaObject(aSym) &&
+ // either it's inside the original owner or we can document it later:
+ (!inOriginalOnwer(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null)))
+
+ def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) =
+ // pruning modules that shouldn't be documented
+ // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc
+ // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we
+ // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored
+ // during typer and refchecks, it's not necessary for the current application and there's no need to explore it.
+ (!sym.isModule || sym.moduleClass.isInitialized) &&
+ // documenting only public and protected members
+ localShouldDocument(sym) &&
+ // Only this class's constructors are part of its members, inherited constructors are not.
+ (!sym.isConstructor || sym.owner == inTpl.sym) &&
+ // If the @bridge annotation overrides a normal member, show it
+ !isPureBridge(sym)
+
+ def isEmptyJavaObject(aSym: Symbol): Boolean =
+ aSym.isModule && aSym.isJavaDefined &&
+ aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
+
+ def localShouldDocument(aSym: Symbol): Boolean =
!aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
- }
/** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
+
+ // the classes that are excluded from the index should also be excluded from the diagrams
+ def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
+
+ // the implicit conversions that are excluded from the pages should not appear in the diagram
+ def implicitExcluded(convertorMethod: String): Boolean = settings.hardcoded.commonConversionTargets.contains(convertorMethod)
}
+
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index c3525037cd..8cbf2ac1b6 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -58,13 +58,14 @@ trait ModelFactoryImplicitSupport {
import global._
import global.analyzer._
import global.definitions._
+ import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass}
import settings.hardcoded
// debugging:
val DEBUG: Boolean = settings.docImplicitsDebug.value
val ERROR: Boolean = true // currently we show all errors
- @inline final def debug(msg: => String) = if (DEBUG) println(msg)
- @inline final def error(msg: => String) = if (ERROR) println(msg)
+ @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg)
+ @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg)
/** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
* For example, if an implicit conversion requires that there is a Numeric[T] in scope:
@@ -79,80 +80,8 @@ trait ModelFactoryImplicitSupport {
* - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
* - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
*/
- val implicitsShowAll: Boolean = settings.docImplicitsShowAll.value
class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
- /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
-
- class ImplicitConversionImpl(
- val sym: Symbol,
- val convSym: Symbol,
- val toType: Type,
- val constrs: List[Constraint],
- inTpl: => DocTemplateImpl)
- extends ImplicitConversion {
-
- def source: DocTemplateEntity = inTpl
-
- def targetType: TypeEntity = makeType(toType, inTpl)
-
- def convertorOwner: TemplateEntity =
- if (convSym != NoSymbol)
- makeTemplate(convSym.owner)
- else {
- error("Scaladoc implicits: Implicit conversion from " + sym.tpe + " to " + toType + " done by " + convSym + " = NoSymbol!")
- makeRootPackage.get // surely the root package was created :)
- }
-
- def convertorMethod: Either[MemberEntity, String] = {
- var convertor: MemberEntity = null
-
- convertorOwner match {
- case doc: DocTemplateImpl =>
- val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
- if (convertors.length == 1)
- convertor = convertors.head
- case _ =>
- }
- if (convertor ne null)
- Left(convertor)
- else
- Right(convSym.nameString)
- }
-
- def conversionShortName = convSym.nameString
-
- def conversionQualifiedName = convertorOwner.qualifiedName + "." + convSym.nameString
-
- lazy val constraints: List[Constraint] = constrs
-
- val members: List[MemberEntity] = {
- // Obtain the members inherited by the implicit conversion
- var memberSyms = toType.members.filter(implicitShouldDocument(_))
- val existingMembers = sym.info.members
-
- // Debugging part :)
- debug(sym.nameString + "\n" + "=" * sym.nameString.length())
- debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
-
- // Members inherited by implicit conversions cannot override actual members
- memberSyms = memberSyms.filterNot((sym1: Symbol) =>
- existingMembers.exists(sym2 => sym1.name == sym2.name &&
- !isDistinguishableFrom(toType.memberInfo(sym1), sym.info.memberInfo(sym2))))
-
- debug(" -> full type: " + toType)
- if (constraints.length != 0) {
- debug(" -> constraints: ")
- constraints foreach { constr => debug(" - " + constr) }
- }
- debug(" -> members:")
- memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
- debug("")
-
- memberSyms.flatMap((makeMember(_, this, inTpl)))
- }
- }
-
/* ============== MAKER METHODS ============== */
/**
@@ -162,7 +91,7 @@ trait ModelFactoryImplicitSupport {
* default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
* future we might want to extend this to more complex scopes.
*/
- def makeImplicitConversions(sym: Symbol, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
+ def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
// Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
// But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
@@ -171,16 +100,17 @@ trait ModelFactoryImplicitSupport {
val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
- conversions = conversions.filterNot(_.members.isEmpty)
+ // also keep empty conversions, so they appear in diagrams
+ // conversions = conversions.filter(!_.members.isEmpty)
// Filter out specialized conversions from array
if (sym == ArrayClass)
- conversions = conversions.filterNot((conv: ImplicitConversion) =>
+ conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
// Filter out non-sensical conversions from value types
if (isPrimitiveValueType(sym.tpe))
- conversions = conversions.filter((ic: ImplicitConversion) =>
+ conversions = conversions.filter((ic: ImplicitConversionImpl) =>
hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
// Put the class-specific conversions in front
@@ -218,7 +148,7 @@ trait ModelFactoryImplicitSupport {
* - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
* appears as a constraint
*/
- def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
+ def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
if (result.tree == EmptyTree) Nil
else {
// `result` will contain the type of the view (= implicit conversion method)
@@ -276,11 +206,11 @@ trait ModelFactoryImplicitSupport {
}
}
- def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: => DocTemplateImpl): List[Constraint] =
+ def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] =
types.flatMap((tpe:Type) => {
// TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
val implType = typeVarToOriginOrWildcard(tpe)
- val qualifiedName = implType.typeSymbol.ownerChain.reverse.map(_.nameString).mkString(".")
+ val qualifiedName = makeQualifiedName(implType.typeSymbol)
var available: Option[Boolean] = None
@@ -319,7 +249,7 @@ trait ModelFactoryImplicitSupport {
available match {
case Some(true) =>
Nil
- case Some(false) if (!implicitsShowAll) =>
+ case Some(false) if (!settings.docImplicitsShowAll.value) =>
// if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
throw new ImplicitNotFound(implType)
case _ =>
@@ -333,26 +263,26 @@ trait ModelFactoryImplicitSupport {
case Some(explanation) =>
List(new KnownTypeClassConstraint {
val typeParamName = targ.nameString
- val typeExplanation = explanation
- val typeClassEntity = makeTemplate(sym)
- val implicitType: TypeEntity = makeType(implType, inTpl)
+ lazy val typeExplanation = explanation
+ lazy val typeClassEntity = makeTemplate(sym)
+ lazy val implicitType: TypeEntity = makeType(implType, inTpl)
})
case None =>
List(new TypeClassConstraint {
val typeParamName = targ.nameString
- val typeClassEntity = makeTemplate(sym)
- val implicitType: TypeEntity = makeType(implType, inTpl)
+ lazy val typeClassEntity = makeTemplate(sym)
+ lazy val implicitType: TypeEntity = makeType(implType, inTpl)
})
}
case _ =>
List(new ImplicitInScopeConstraint{
- val implicitType: TypeEntity = makeType(implType, inTpl)
+ lazy val implicitType: TypeEntity = makeType(implType, inTpl)
})
}
}
})
- def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: => DocTemplateImpl): List[Constraint] =
+ def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] =
(subst.from zip subst.to) map {
case (from, to) =>
new EqualTypeParamConstraint {
@@ -362,7 +292,7 @@ trait ModelFactoryImplicitSupport {
}
}
- def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: => DocTemplateImpl): List[Constraint] =
+ def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] =
(tparams zip constrs) flatMap {
case (tparam, constr) => {
uniteConstraints(constr) match {
@@ -372,23 +302,23 @@ trait ModelFactoryImplicitSupport {
case (List(lo), List(up)) if (lo == up) =>
List(new EqualTypeParamConstraint {
val typeParamName = tparam.nameString
- val rhs = makeType(lo, inTpl)
+ lazy val rhs = makeType(lo, inTpl)
})
case (List(lo), List(up)) =>
List(new BoundedTypeParamConstraint {
val typeParamName = tparam.nameString
- val lowerBound = makeType(lo, inTpl)
- val upperBound = makeType(up, inTpl)
+ lazy val lowerBound = makeType(lo, inTpl)
+ lazy val upperBound = makeType(up, inTpl)
})
case (List(lo), Nil) =>
List(new LowerBoundedTypeParamConstraint {
val typeParamName = tparam.nameString
- val lowerBound = makeType(lo, inTpl)
+ lazy val lowerBound = makeType(lo, inTpl)
})
case (Nil, List(up)) =>
List(new UpperBoundedTypeParamConstraint {
val typeParamName = tparam.nameString
- val upperBound = makeType(up, inTpl)
+ lazy val upperBound = makeType(up, inTpl)
})
case other =>
// this is likely an error on the lub/glb side
@@ -399,6 +329,176 @@ trait ModelFactoryImplicitSupport {
}
}
+ def makeQualifiedName(sym: Symbol): String = {
+ val remove = Set[Symbol](RootPackage, RootClass, EmptyPackage, EmptyPackageClass)
+ sym.ownerChain.filterNot(remove.contains(_)).reverse.map(_.nameString).mkString(".")
+ }
+
+ /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+ class ImplicitConversionImpl(
+ val sym: Symbol,
+ val convSym: Symbol,
+ val toType: Type,
+ val constrs: List[Constraint],
+ inTpl: DocTemplateImpl)
+ extends ImplicitConversion {
+
+ def source: DocTemplateEntity = inTpl
+
+ def targetType: TypeEntity = makeType(toType, inTpl)
+
+ def convertorOwner: TemplateEntity =
+ if (convSym != NoSymbol)
+ makeTemplate(convSym.owner)
+ else {
+ error("Scaladoc implicits: Implicit conversion from " + sym.tpe + " to " + toType + " done by " + convSym + " = NoSymbol!")
+ makeRootPackage
+ }
+
+ def targetTemplate: Option[TemplateEntity] = toType match {
+ // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
+ // such as refinement types because the template can't represent the type corectly (a template corresponds to a
+ // package, class, trait or object)
+ case t: TypeRef => Some(makeTemplate(t.sym))
+ case RefinedType(parents, decls) => None
+ case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
+ }
+
+ def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
+
+ def convertorMethod: Either[MemberEntity, String] = {
+ var convertor: MemberEntity = null
+
+ convertorOwner match {
+ case doc: DocTemplateImpl =>
+ val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
+ if (convertors.length == 1)
+ convertor = convertors.head
+ case _ =>
+ }
+ if (convertor ne null)
+ Left(convertor)
+ else
+ Right(convSym.nameString)
+ }
+
+ def conversionShortName = convSym.nameString
+
+ def conversionQualifiedName = makeQualifiedName(convSym)
+
+ lazy val constraints: List[Constraint] = constrs
+
+ lazy val memberImpls: List[MemberImpl] = {
+ // Obtain the members inherited by the implicit conversion
+ val memberSyms = toType.members.filter(implicitShouldDocument(_))
+ val existingSyms = sym.info.members
+
+ // Debugging part :)
+ debug(sym.nameString + "\n" + "=" * sym.nameString.length())
+ debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
+
+ debug(" -> full type: " + toType)
+ if (constraints.length != 0) {
+ debug(" -> constraints: ")
+ constraints foreach { constr => debug(" - " + constr) }
+ }
+ debug(" -> members:")
+ memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
+ debug("")
+
+ memberSyms.flatMap({ aSym =>
+ makeTemplate(aSym.owner) match {
+ case d: DocTemplateImpl =>
+ // we can't just pick up nodes from the previous template, although that would be very convenient:
+ // they need the byConversion field to be attached to themselves -- this is design decision I should
+ // revisit soon
+ //
+ // d.ownMembers.collect({
+ // // it's either a member or has a couple of usecases it's hidden behind
+ // case m: MemberImpl if m.sym == aSym =>
+ // m // the member itself
+ // case m: MemberImpl if m.useCaseOf.isDefined && m.useCaseOf.get.asInstanceOf[MemberImpl].sym == aSym =>
+ // m.useCaseOf.get.asInstanceOf[MemberImpl] // the usecase
+ // })
+ makeMember(aSym, this, d)
+ case _ =>
+ // should only happen if the code for this template is not part of the scaladoc run =>
+ // members won't have any comments
+ makeMember(aSym, this, inTpl)
+ }
+ })
+ }
+
+ lazy val members: List[MemberEntity] = memberImpls
+ }
+
+ /* ========================= HELPER METHODS ========================== */
+ /**
+ * Computes the shadowing table for all the members in the implicit conversions
+ * @param mbrs All template's members, including usecases and full signature members
+ * @param convs All the conversions the template takes part in
+ * @param inTpl the ususal :)
+ */
+ def makeShadowingTable(mbrs: List[MemberImpl],
+ convs: List[ImplicitConversionImpl],
+ inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
+ assert(modelFinished)
+
+ var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]()
+
+ for (conv <- convs) {
+ val otherConvs = convs.filterNot(_ == conv)
+
+ for (member <- conv.memberImpls) {
+ // for each member in our list
+ val sym1 = member.sym
+ val tpe1 = conv.toType.memberInfo(sym1)
+
+ // check if it's shadowed by a member in the original class
+ var shadowedBySyms: List[Symbol] = List()
+ for (mbr <- mbrs) {
+ val sym2 = mbr.sym
+ if (sym1.name == sym2.name) {
+ val shadowed = !settings.docImplicitsSoundShadowing.value || {
+ val tpe2 = inTpl.sym.info.memberInfo(sym2)
+ !isDistinguishableFrom(tpe1, tpe2)
+ }
+ if (shadowed)
+ shadowedBySyms ::= sym2
+ }
+ }
+
+ val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
+
+ // check if it's shadowed by another member
+ var ambiguousByMembers: List[MemberEntity] = List()
+ for (conv <- otherConvs)
+ for (member2 <- conv.memberImpls) {
+ val sym2 = member2.sym
+ if (sym1.name == sym2.name) {
+ val tpe2 = conv.toType.memberInfo(sym2)
+ // Ambiguity should be an equivalence relation
+ val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
+ if (ambiguated)
+ ambiguousByMembers ::= member2
+ }
+ }
+
+ // we finally have the shadowing info
+ val shadowing = new ImplicitMemberShadowing {
+ def shadowingMembers: List[MemberEntity] = shadowedByMembers
+ def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
+ }
+
+ shadowingTable += (member -> shadowing)
+ }
+ }
+
+ shadowingTable
+ }
+
+
/**
* uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
*
@@ -493,8 +593,8 @@ trait ModelFactoryImplicitSupport {
// - common methods (in Any, AnyRef, Object) as they are automatically removed
// - private and protected members (not accessible following an implicit conversion)
// - members starting with _ (usually reserved for internal stuff)
- localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != ObjectClass) &&
- (aSym.owner != AnyClass) && (aSym.owner != AnyRefClass) &&
+ localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) &&
+ (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) &&
(!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
(aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
(aSym.nameString != "getClass")
@@ -506,15 +606,18 @@ trait ModelFactoryImplicitSupport {
* The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
* structure (A => B => C may not override (A, B) => C) and that all the types involved are
* of the implcit conversion's member are subtypes of the parent members' parameters */
- def isDistinguishableFrom(t1: Type, t2: Type): Boolean =
+ def isDistinguishableFrom(t1: Type, t2: Type): Boolean = {
+ // Vlad: I tried using matches but it's not exactly what we need:
+ // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true
+ // !(t1 matches t2)
if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
- if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
- return true // if on the corresponding parameter you give a type that is in t1 but not in t2
- // example:
- // def foo(a: Either[Int, Double]): Int = 3
- // def foo(b: Left[T1]): Int = 6
- // a.foo(Right(4.5d)) prints out 3 :)
+ if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
+ return true // if on the corresponding parameter you give a type that is in t1 but not in t2
+ // def foo(a: Either[Int, Double]): Int = 3
+ // def foo(b: Left[T1]): Int = 6
+ // a.foo(Right(4.5d)) prints out 3 :)
false
} else true // the member structure is different foo(3, 5) vs foo(3)(5)
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
index fe586c4996..bd7534ded4 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -52,7 +52,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
if (asym.isSetter) asym = asym.getter(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
- val mbrs: List[MemberImpl] = makeMember(asym, null, docTmpl)
+ val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
mbrs foreach { mbr => refs += ((start, (mbr,end))) }
case _ =>
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
index ef4047cebf..ecc3273903 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
@@ -67,8 +67,8 @@ final case class Bold(text: Inline) extends Inline
final case class Underline(text: Inline) extends Inline
final case class Superscript(text: Inline) extends Inline
final case class Subscript(text: Inline) extends Inline
+final case class EntityLink(target: String, template: () => Option[TemplateEntity]) extends Inline
final case class Link(target: String, title: Inline) extends Inline
-final case class EntityLink(target: TemplateEntity) extends Inline
final case class Monospace(text: Inline) extends Inline
final case class Text(text: String) extends Inline
final case class HtmlTag(data: String) extends Inline {
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
index 914275dd8d..7b70683db5 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
@@ -108,6 +108,12 @@ abstract class Comment {
/** A description for the primary constructor */
def constructor: Option[Body]
+ /** A set of diagram directives for the inheritance diagram */
+ def inheritDiagram: List[String]
+
+ /** A set of diagram directives for the content diagram */
+ def contentDiagram: List[String]
+
override def toString =
body.toString + "\n" +
(authors map ("@author " + _.toString)).mkString("\n") +
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index 996223b9f9..2099315cc6 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -30,12 +30,12 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
protected val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment]
- def addCommentBody(sym: global.Symbol, inTpl: => TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = {
+ def addCommentBody(sym: global.Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = {
commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos)
sym
}
- def comment(sym: global.Symbol, inTpl: => DocTemplateImpl): Option[Comment] = {
+ def comment(sym: global.Symbol, inTpl: DocTemplateImpl): Option[Comment] = {
val key = (sym, inTpl)
if (commentCache isDefinedAt key)
Some(commentCache(key))
@@ -50,7 +50,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
* cases we have to give some `inTpl` comments (parent class for example)
* to the comment of the symbol.
* This function manages some of those cases : Param accessor and Primary constructor */
- def defineComment(sym: global.Symbol, inTpl: => DocTemplateImpl):Option[Comment] = {
+ def defineComment(sym: global.Symbol, inTpl: DocTemplateImpl):Option[Comment] = {
//param accessor case
// We just need the @param argument, we put it into the body
@@ -97,37 +97,41 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
/* Creates comments with necessary arguments */
def createComment (
- body0: Option[Body] = None,
- authors0: List[Body] = List.empty,
- see0: List[Body] = List.empty,
- result0: Option[Body] = None,
- throws0: Map[String,Body] = Map.empty,
- valueParams0: Map[String,Body] = Map.empty,
- typeParams0: Map[String,Body] = Map.empty,
- version0: Option[Body] = None,
- since0: Option[Body] = None,
- todo0: List[Body] = List.empty,
- deprecated0: Option[Body] = None,
- note0: List[Body] = List.empty,
- example0: List[Body] = List.empty,
- constructor0: Option[Body] = None,
- source0: Option[String] = None
+ body0: Option[Body] = None,
+ authors0: List[Body] = List.empty,
+ see0: List[Body] = List.empty,
+ result0: Option[Body] = None,
+ throws0: Map[String,Body] = Map.empty,
+ valueParams0: Map[String,Body] = Map.empty,
+ typeParams0: Map[String,Body] = Map.empty,
+ version0: Option[Body] = None,
+ since0: Option[Body] = None,
+ todo0: List[Body] = List.empty,
+ deprecated0: Option[Body] = None,
+ note0: List[Body] = List.empty,
+ example0: List[Body] = List.empty,
+ constructor0: Option[Body] = None,
+ source0: Option[String] = None,
+ inheritDiagram0: List[String] = List.empty,
+ contentDiagram0: List[String] = List.empty
) : Comment = new Comment{
- val body = if(body0 isDefined) body0.get else Body(Seq.empty)
- val authors = authors0
- val see = see0
- val result = result0
- val throws = throws0
- val valueParams = valueParams0
- val typeParams = typeParams0
- val version = version0
- val since = since0
- val todo = todo0
- val deprecated = deprecated0
- val note = note0
- val example = example0
- val constructor = constructor0
- val source = source0
+ val body = if(body0 isDefined) body0.get else Body(Seq.empty)
+ val authors = authors0
+ val see = see0
+ val result = result0
+ val throws = throws0
+ val valueParams = valueParams0
+ val typeParams = typeParams0
+ val version = version0
+ val since = since0
+ val todo = todo0
+ val deprecated = deprecated0
+ val note = note0
+ val example = example0
+ val constructor = constructor0
+ val source = source0
+ val inheritDiagram = inheritDiagram0
+ val contentDiagram = contentDiagram0
}
protected val endOfText = '\u0003'
@@ -186,6 +190,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
protected val safeTagMarker = '\u000E'
+ /** A Scaladoc tag not linked to a symbol and not followed by text */
+ protected val SingleTag =
+ new Regex("""\s*@(\S+)\s*""")
+
/** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
protected val SimpleTag =
new Regex("""\s*@(\S+)\s+(.*)""")
@@ -306,6 +314,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ case SingleTag(name) :: ls if (!inCodeBlock) =>
+ val key = SimpleTagKey(name)
+ val value = "" :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
case line :: ls if (lastTagKey.isDefined) =>
val key = lastTagKey.get
val value =
@@ -321,9 +334,24 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
case Nil =>
+ // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
+ val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
+ val contentDiagramTag = SimpleTagKey("contentDiagram")
+
+ val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val tagsWithoutDiagram = tags.filterNot(pair => pair._1 == inheritDiagramTag || pair._1 == contentDiagramTag)
val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map(tags mapValues {tag => tag map (parseWiki(_, pos))} toSeq: _*)
+ mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWiki(_, pos))} toSeq: _*)
def oneTag(key: SimpleTagKey): Option[Body] =
((bodyTags remove key): @unchecked) match {
@@ -356,21 +384,23 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
val com = createComment (
- body0 = Some(parseWiki(docBody.toString, pos)),
- authors0 = allTags(SimpleTagKey("author")),
- see0 = allTags(SimpleTagKey("see")),
- result0 = oneTag(SimpleTagKey("return")),
- throws0 = allSymsOneTag(SimpleTagKey("throws")),
- valueParams0 = allSymsOneTag(SimpleTagKey("param")),
- typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
- version0 = oneTag(SimpleTagKey("version")),
- since0 = oneTag(SimpleTagKey("since")),
- todo0 = allTags(SimpleTagKey("todo")),
- deprecated0 = oneTag(SimpleTagKey("deprecated")),
- note0 = allTags(SimpleTagKey("note")),
- example0 = allTags(SimpleTagKey("example")),
- constructor0 = oneTag(SimpleTagKey("constructor")),
- source0 = Some(clean(src).mkString("\n"))
+ body0 = Some(parseWiki(docBody.toString, pos)),
+ authors0 = allTags(SimpleTagKey("author")),
+ see0 = allTags(SimpleTagKey("see")),
+ result0 = oneTag(SimpleTagKey("return")),
+ throws0 = allSymsOneTag(SimpleTagKey("throws")),
+ valueParams0 = allSymsOneTag(SimpleTagKey("param")),
+ typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
+ version0 = oneTag(SimpleTagKey("version")),
+ since0 = oneTag(SimpleTagKey("since")),
+ todo0 = allTags(SimpleTagKey("todo")),
+ deprecated0 = oneTag(SimpleTagKey("deprecated")),
+ note0 = allTags(SimpleTagKey("note")),
+ example0 = allTags(SimpleTagKey("example")),
+ constructor0 = oneTag(SimpleTagKey("constructor")),
+ source0 = Some(clean(src).mkString("\n")),
+ inheritDiagram0 = inheritDiagramText,
+ contentDiagram0 = contentDiagramText
)
for ((key, _) <- bodyTags)
@@ -686,13 +716,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
)
}
- def entityLink(query: String): Inline = findTemplate(query) match {
- case Some(tpl) =>
- EntityLink(tpl)
- case None =>
- Text(query)
- }
-
def link(): Inline = {
val SchemeUri = """([^:]+:.*)""".r
jump("[[")
@@ -717,7 +740,8 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
if (!qualName.contains(".") && !definitions.packageExists(qualName))
reportError(pos, "entity link to " + qualName + " should be a fully qualified name")
- entityLink(qualName)
+ // move the template resolution as late as possible
+ EntityLink(qualName, () => findTemplate(qualName))
}
}
@@ -733,8 +757,8 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
nextChar()
}
- /**
- * Eliminates the (common) leading spaces in all lines, based on the first line
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
* For indented pieces of code, it reduces the indent to the least whitespace prefix:
* {{{
* indented example
@@ -757,11 +781,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
while (index < code.length) {
code(index) match {
case ' ' =>
- if (wsArea)
+ if (wsArea)
crtSkip += 1
case c =>
wsArea = (c == '\n')
- maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
+ maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
crtSkip = if (c == '\n') 0 else crtSkip
firstLine = if (c == '\n') false else firstLine
emptyLine = if (c == '\n') true else false
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
new file mode 100644
index 0000000000..8527ca4039
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
@@ -0,0 +1,144 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+
+/**
+ * The diagram base classes
+ *
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+abstract class Diagram {
+ def nodes: List[Node]
+ def edges: List[(Node, List[Node])]
+ def isPackageDiagram = false
+ def isClassDiagram = false
+ def depthInfo: DepthInfo
+}
+
+case class PackageDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram {
+ override def isPackageDiagram = true
+ lazy val depthInfo = new PackageDiagramDepth(this)
+}
+
+/** A class diagram */
+case class ClassDiagram(thisNode: ThisNode,
+ superClasses: List[/*Class*/Node],
+ subClasses: List[/*Class*/Node],
+ incomingImplicits: List[ImplicitNode],
+ outgoingImplicits: List[ImplicitNode]) extends Diagram {
+ def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits
+ def edges = (thisNode -> (superClasses ::: outgoingImplicits)) ::
+ (subClasses ::: incomingImplicits).map(_ -> List(thisNode))
+
+ override def isClassDiagram = true
+ lazy val depthInfo = new DepthInfo {
+ def maxDepth = 3
+ def nodeDepth(node: Node) =
+ if (node == thisNode) 1
+ else if (superClasses.contains(node)) 0
+ else if (subClasses.contains(node)) 2
+ else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
+ else -1
+ }
+}
+
+trait DepthInfo {
+ /** Gives the maximum depth */
+ def maxDepth: Int
+ /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
+ def nodeDepth(node: Node): Int
+}
+
+abstract class Node {
+ def name = tpe.name
+ def tpe: TypeEntity
+ def tpl: Option[TemplateEntity]
+ /** shortcut to get a DocTemplateEntity */
+ def doctpl: Option[DocTemplateEntity] = tpl match {
+ case Some(tpl) => tpl match {
+ case d: DocTemplateEntity => Some(d)
+ case _ => None
+ }
+ case _ => None
+ }
+ /* shortcuts to find the node type without matching */
+ def isThisNode = false
+ def isNormalNode = false
+ def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false
+ def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false
+ def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false
+ def isOtherNode = !(isClassNode || isTraitNode || isObjectNode)
+ def isImplicitNode = false
+ def isOutsideNode = false
+ def tooltip: Option[String]
+}
+
+// different matchers, allowing you to use the pattern matcher against any node
+// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to
+// case class specification -- thus a complete match would be:
+// node match {
+// case ThisNode(tpe, _) => /* case for this node, you can still use .isClass, .isTrait and .isOther */
+// case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */
+// case _ => node match {
+// case ClassNode(tpe, _) => /* case for a non-this, non-implicit Class node */
+// case TraitNode(tpe, _) => /* case for a non-this, non-implicit Trait node */
+// case OtherNode(tpe, _) => /* case for a non-this, non-implicit Other node */
+// }
+// }
+object Node { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) }
+object ClassNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode) Some((n.tpe, n.tpl)) else None }
+object TraitNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode) Some((n.tpe, n.tpl)) else None }
+object ObjectNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode) Some((n.tpe, n.tpl)) else None }
+object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None }
+object OtherNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode) Some((n.tpe, n.tpl)) else None }
+
+
+
+/** The node for the current class */
+case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity], tooltip: Option[String] = None) extends Node { override def isThisNode = true }
+
+/** The usual node */
+case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity], tooltip: Option[String] = None) extends Node { override def isNormalNode = true }
+
+/** A class or trait the thisnode can be converted to by an implicit conversion
+ * TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes
+ * since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate
+ */
+case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity], tooltip: Option[String] = None) extends Node { override def isImplicitNode = true }
+
+/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to
+ * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */
+case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity], tooltip: Option[String] = None) extends Node { override def isOutsideNode = true }
+
+
+// Computing and offering node depth information
+class PackageDiagramDepth(pack: PackageDiagram) extends DepthInfo {
+ private[this] var _maxDepth = 0
+ private[this] var _nodeDepth = Map[Node, Int]()
+ private[this] var seedNodes = Set[Node]()
+ private[this] val invertedEdges: Map[Node, List[Node]] =
+ pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil)
+ private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil)
+
+ // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles
+ seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty)
+
+ while (!seedNodes.isEmpty) {
+ var newSeedNodes = Set[Node]()
+ for (node <- seedNodes) {
+ val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max
+ if (depth != _nodeDepth.getOrElse(node, -1)) {
+ _nodeDepth += (node -> depth)
+ newSeedNodes ++= invertedEdges(node)
+ if (depth > _maxDepth) _maxDepth = depth
+ }
+ }
+ seedNodes = newSeedNodes
+ }
+
+ val maxDepth = _maxDepth
+ def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
new file mode 100644
index 0000000000..49cfaffc2e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -0,0 +1,262 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+import comment.CommentFactory
+import java.util.regex.{Pattern, Matcher}
+import scala.util.matching.Regex
+
+// statistics
+import html.page.diagram.DiagramStats
+
+/**
+ * This trait takes care of parsing @{inheritance, content}Diagram annotations
+ *
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+trait DiagramDirectiveParser {
+ this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
+
+ import this.global.definitions.AnyRefClass
+
+ ///// DIAGRAM FILTERS //////////////////////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed
+ *
+ * Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from
+ * all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the
+ * reason is you would break the separate scaladoc compilation:
+ * If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B
+ * A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the
+ * instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the
+ * diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary
+ * information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class
+ * file, could we? (Turns out we could, but that's another story)
+ *
+ * Any flaming for this decision should go to scala-internals@googlegroups.com
+ */
+ trait DiagramFilter {
+ /** A flag to hide the diagram completely */
+ def hideDiagram: Boolean
+ /** Hide incoming implicit conversions (for type hierarchy diagrams) */
+ def hideIncomingImplicits: Boolean
+ /** Hide outgoing implicit conversions (for type hierarchy diagrams) */
+ def hideOutgoingImplicits: Boolean
+ /** Hide superclasses (for type hierarchy diagrams) */
+ def hideSuperclasses: Boolean
+ /** Hide subclasses (for type hierarchy diagrams) */
+ def hideSubclasses: Boolean
+ /** Show related classes from other objects/traits/packages (for content diagrams) */
+ def hideInheritedNodes: Boolean
+ /** Hide a node from the diagram */
+ def hideNode(clazz: Node): Boolean
+ /** Hide an edge from the diagram */
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean
+ }
+
+ /** Main entry point into this trait: generate the filter for inheritance diagrams */
+ def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
+
+ val defaultFilter =
+ if (template.isClass || template.isTrait || template.sym == AnyRefClass)
+ FullDiagram
+ else
+ NoDiagramAtAll
+
+ if (template.comment.isDefined)
+ makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
+ else
+ defaultFilter
+ }
+
+ /** Main entry point into this trait: generate the filter for content diagrams */
+ def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
+ val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
+ if (template.comment.isDefined)
+ makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
+ else
+ defaultFilter
+ }
+
+ protected var tFilter = 0l
+ protected var tModel = 0l
+
+ /** Show the entire diagram, no filtering */
+ case object FullDiagram extends DiagramFilter {
+ val hideDiagram: Boolean = false
+ val hideIncomingImplicits: Boolean = false
+ val hideOutgoingImplicits: Boolean = false
+ val hideSuperclasses: Boolean = false
+ val hideSubclasses: Boolean = false
+ val hideInheritedNodes: Boolean = false
+ def hideNode(clazz: Node): Boolean = false
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean = false
+ }
+
+ /** Hide the diagram completely, no need for special filtering */
+ case object NoDiagramAtAll extends DiagramFilter {
+ val hideDiagram: Boolean = true
+ val hideIncomingImplicits: Boolean = true
+ val hideOutgoingImplicits: Boolean = true
+ val hideSuperclasses: Boolean = true
+ val hideSubclasses: Boolean = true
+ val hideInheritedNodes: Boolean = true
+ def hideNode(clazz: Node): Boolean = true
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean = true
+ }
+
+ /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation
+ * TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */
+ case class AnnotationDiagramFilter(hideDiagram: Boolean,
+ hideIncomingImplicits: Boolean,
+ hideOutgoingImplicits: Boolean,
+ hideSuperclasses: Boolean,
+ hideSubclasses: Boolean,
+ hideInheritedNodes: Boolean,
+ hideNodesFilter: List[Pattern],
+ hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter {
+
+ private[this] def getName(n: Node): String =
+ if (n.tpl.isDefined)
+ n.tpl.get.qualifiedName
+ else
+ n.name
+
+ def hideNode(clazz: Node): Boolean = {
+ val qualifiedName = getName(clazz)
+ for (hideFilter <- hideNodesFilter)
+ if (hideFilter.matcher(qualifiedName).matches) {
+ // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches)
+ return true
+ }
+ false
+ }
+
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean = {
+ val clazz1Name = getName(clazz1)
+ val clazz2Name = getName(clazz2)
+ for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) {
+ if (clazz1Filter.matcher(clazz1Name).matches &&
+ clazz2Filter.matcher(clazz2Name).matches) {
+ // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches)
+ // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches)
+ return true
+ }
+ }
+ false
+ }
+ }
+
+ // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier
+ private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
+ private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
+ private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
+ private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
+ // And the composed regexes:
+ private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
+ private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
+
+ private def makeDiagramFilter(template: DocTemplateImpl,
+ directives: List[String],
+ defaultFilter: DiagramFilter,
+ isInheritanceDiagram: Boolean): DiagramFilter = directives match {
+
+ // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll)
+ case Nil =>
+ defaultFilter
+
+ // compute the exact filters. By including the annotation, the diagram is autmatically added
+ case _ =>
+ tFilter -= System.currentTimeMillis
+ var hideDiagram0: Boolean = false
+ var hideIncomingImplicits0: Boolean = false
+ var hideOutgoingImplicits0: Boolean = false
+ var hideSuperclasses0: Boolean = false
+ var hideSubclasses0: Boolean = false
+ var hideInheritedNodes0: Boolean = false
+ var hideNodesFilter0: List[Pattern] = Nil
+ var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil
+
+ def warning(message: String) = {
+ // we need the position from the package object (well, ideally its comment, but yeah ...)
+ val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
+ assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
+ global.reporter.warning(sym.pos, message)
+ }
+
+ def preparePattern(className: String) =
+ "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$"
+
+ // separate entries:
+ val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim)
+ for (entry <- entries)
+ entry match {
+ case "hideDiagram" =>
+ hideDiagram0 = true
+ case "hideIncomingImplicits" if isInheritanceDiagram =>
+ hideIncomingImplicits0 = true
+ case "hideOutgoingImplicits" if isInheritanceDiagram =>
+ hideOutgoingImplicits0 = true
+ case "hideSuperclasses" if isInheritanceDiagram =>
+ hideSuperclasses0 = true
+ case "hideSubclasses" if isInheritanceDiagram =>
+ hideSubclasses0 = true
+ case "hideInheritedNodes" if !isInheritanceDiagram =>
+ hideInheritedNodes0 = true
+ case HideNodesRegex(last) =>
+ val matcher = NodeSpecPattern.matcher(entry)
+ while (matcher.find()) {
+ val classPattern = Pattern.compile(preparePattern(matcher.group()))
+ hideNodesFilter0 ::= classPattern
+ }
+ case HideEdgesRegex(last) =>
+ val matcher = NodeSpecPattern.matcher(entry)
+ while (matcher.find()) {
+ val class1Pattern = Pattern.compile(preparePattern(matcher.group()))
+ assert(matcher.find()) // it's got to be there, just matched it!
+ val class2Pattern = Pattern.compile(preparePattern(matcher.group()))
+ hideEdgesFilter0 ::= ((class1Pattern, class2Pattern))
+ }
+ case "" =>
+ // don't need to do anything about it
+ case _ =>
+ warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName +
+ ": unmatched entry \"" + entry + "\".\n" +
+ " This could be because:\n" +
+ " - you forgot to separate entries by commas\n" +
+ " - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+
+ " - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)")
+ }
+ val result =
+ if (hideDiagram0)
+ NoDiagramAtAll
+ else if ((hideNodesFilter0.isEmpty) &&
+ (hideEdgesFilter0.isEmpty) &&
+ (hideIncomingImplicits0 == false) &&
+ (hideOutgoingImplicits0 == false) &&
+ (hideSuperclasses0 == false) &&
+ (hideSubclasses0 == false) &&
+ (hideInheritedNodes0 == false) &&
+ (hideDiagram0 == false))
+ FullDiagram
+ else
+ AnnotationDiagramFilter(
+ hideDiagram = hideDiagram0,
+ hideIncomingImplicits = hideIncomingImplicits0,
+ hideOutgoingImplicits = hideOutgoingImplicits0,
+ hideSuperclasses = hideSuperclasses0,
+ hideSubclasses = hideSubclasses0,
+ hideInheritedNodes = hideInheritedNodes0,
+ hideNodesFilter = hideNodesFilter0,
+ hideEdgesFilter = hideEdgesFilter0)
+
+ if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram)
+ settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result)
+ tFilter += System.currentTimeMillis
+
+ result
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
new file mode 100644
index 0000000000..1a8ad193aa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -0,0 +1,258 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+import comment.CommentFactory
+import collection.mutable
+
+// statistics
+import html.page.diagram.DiagramStats
+
+import scala.collection.immutable.SortedMap
+
+/**
+ * This trait takes care of generating the diagram for classes and packages
+ *
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+trait DiagramFactory extends DiagramDirectiveParser {
+ this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
+
+ import this.global.definitions._
+ import this.global._
+
+ // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes
+ lazy val AnyNode = normalNode(AnyClass)
+ lazy val AnyRefNode = normalNode(AnyRefClass)
+ lazy val AnyValNode = normalNode(AnyValClass)
+ lazy val NullNode = normalNode(NullClass)
+ lazy val NothingNode = normalNode(NothingClass)
+ def normalNode(sym: Symbol) =
+ NormalNode(makeTemplate(sym).ownType, Some(makeTemplate(sym)))
+ def aggregationNode(text: String) =
+ NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (TemplateEntity, Int)]() }, None)
+
+ /** Create the inheritance diagram for this template */
+ def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = {
+
+ tFilter = 0
+ tModel = -System.currentTimeMillis
+
+ // the diagram filter
+ val diagramFilter = makeInheritanceDiagramFilter(tpl)
+
+ def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) =
+ Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method "
+ + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName)
+
+ val result =
+ if (diagramFilter == NoDiagramAtAll)
+ None
+ else {
+ // the main node
+ val thisNode = ThisNode(tpl.ownType, Some(tpl), Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
+
+ // superclasses
+ var superclasses: List[Node] =
+ tpl.parentTypes.collect {
+ case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))
+ }.reverse
+
+ // incoming implcit conversions
+ lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map {
+ case (incomingTpl, conv) =>
+ ImplicitNode(incomingTpl.ownType, Some(incomingTpl), implicitTooltip(from=incomingTpl, to=tpl, conv=conv))
+ }
+
+ // subclasses
+ var subclasses: List[Node] =
+ tpl.directSubClasses.flatMap {
+ case d: TemplateEntity if !classExcluded(d) => List(NormalNode(d.ownType, Some(d)))
+ case _ => Nil
+ }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse)
+
+ // outgoing implicit coversions
+ lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map {
+ case (outgoingTpl, outgoingType, conv) =>
+ ImplicitNode(outgoingType, Some(outgoingTpl), implicitTooltip(from=tpl, to=tpl, conv=conv))
+ }
+
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+ // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges
+ // The implementation would need to add the annotations and the logic to select nodes (or create new ones)
+ // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days
+ // at most) and it would be a great add to the diagrams.
+ if (tpl.sym == AnyRefClass)
+ subclasses = List(aggregationNode("All user-defined classes and traits"))
+
+ val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses
+ val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes
+ val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses
+ val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes
+
+ // final diagram filter
+ filterDiagram(ClassDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter)
+ }
+
+ tModel += System.currentTimeMillis
+ DiagramStats.addFilterTime(tFilter)
+ DiagramStats.addModelTime(tModel-tFilter)
+
+ result
+ }
+
+ /** Create the content diagram for this template */
+ def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = {
+
+ tFilter = 0
+ tModel = -System.currentTimeMillis
+
+ // the diagram filter
+ val diagramFilter = makeContentDiagramFilter(pack)
+
+ val result =
+ if (diagramFilter == NoDiagramAtAll)
+ None
+ else {
+ var mapNodes = Map[TemplateEntity, Node]()
+ var nodesShown = Set[TemplateEntity]()
+ var edgesAll = List[(TemplateEntity, List[TemplateEntity])]()
+
+ // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram
+ // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None)
+ val nodesAll = pack.members collect {
+ case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d
+ }
+
+ // for each node, add its subclasses
+ for (node <- nodesAll if !classExcluded(node)) {
+ node match {
+ case dnode: DocTemplateImpl =>
+ var superClasses = dnode.parentTypes.map(_._1)
+
+ superClasses = superClasses.filter(nodesAll.contains(_))
+
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+ if (pack.sym == ScalaPackage)
+ if (dnode.sym == NullClass)
+ superClasses = List(makeTemplate(AnyRefClass))
+ else if (dnode.sym == NothingClass)
+ superClasses = (List(NullClass) ::: ScalaValueClasses).map(makeTemplate(_))
+
+ if (!superClasses.isEmpty) {
+ nodesShown += dnode
+ nodesShown ++= superClasses
+ }
+ edgesAll ::= dnode -> superClasses
+ case _ =>
+ }
+
+ mapNodes += node -> (if (node.inTemplate == pack) NormalNode(node.ownType, Some(node)) else OutsideNode(node.ownType, Some(node)))
+ }
+
+ if (nodesShown.isEmpty)
+ None
+ else {
+ val nodes = nodesAll.filter(nodesShown.contains(_)).map(mapNodes(_))
+ val edges = edgesAll.map(pair => (mapNodes(pair._1), pair._2.map(mapNodes(_)))).filterNot(pair => pair._2.isEmpty)
+ val diagram =
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+ if (pack.sym == ScalaPackage) {
+ // Tried it, but it doesn't look good:
+ // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass)))
+ // var dirty = true
+ // do {
+ // val length = anyRefSubtypes.length
+ // anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 }
+ // anyRefSubtypes = anyRefSubtypes.distinct
+ // dirty = (anyRefSubtypes.length != length)
+ // } while (dirty)
+ // println(anyRefSubtypes)
+ val anyRefSubtypes = Nil
+ val allAnyRefTypes = aggregationNode("All AnyRef subtypes")
+ val nullTemplate = makeTemplate(NullClass)
+ PackageDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate)))
+ } else
+ PackageDiagram(nodes, edges)
+
+ filterDiagram(diagram, diagramFilter)
+ }
+ }
+
+ tModel += System.currentTimeMillis
+ DiagramStats.addFilterTime(tFilter)
+ DiagramStats.addModelTime(tModel-tFilter)
+
+ result
+ }
+
+ /** Diagram filtering logic */
+ private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = {
+ tFilter -= System.currentTimeMillis
+
+ val result =
+ if (diagramFilter == FullDiagram)
+ Some(diagram)
+ else if (diagramFilter == NoDiagramAtAll)
+ None
+ else {
+ // Final diagram, with the filtered nodes and edges
+ diagram match {
+ case ClassDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) =>
+ None
+
+ case ClassDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) =>
+
+ def hideIncoming(node: Node): Boolean =
+ diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode)
+
+ def hideOutgoing(node: Node): Boolean =
+ diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node)
+
+ // println(thisNode)
+ // println(superClasses.map(cl => "super: " + cl + " " + hideOutgoing(cl)).mkString("\n"))
+ // println(subClasses.map(cl => "sub: " + cl + " " + hideIncoming(cl)).mkString("\n"))
+ Some(ClassDiagram(thisNode,
+ superClasses.filterNot(hideOutgoing(_)),
+ subClasses.filterNot(hideIncoming(_)),
+ incomingImplicits.filterNot(hideIncoming(_)),
+ outgoingImplicits.filterNot(hideOutgoing(_))))
+
+ case PackageDiagram(nodes0, edges0) =>
+ // Filter out all edges that:
+ // (1) are sources of hidden classes
+ // (2) are manually hidden by the user
+ // (3) are destinations of hidden classes
+ val edges: List[(Node, List[Node])] =
+ diagram.edges.flatMap({
+ case (source, dests) if !diagramFilter.hideNode(source) =>
+ val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest })
+ if (dests2 != Nil)
+ List((source, dests2))
+ else
+ Nil
+ case _ => Nil
+ })
+
+ // Only show the the non-isolated nodes
+ // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!)
+ // TODO: Does .distinct cause any stability issues?
+ val sourceNodes = edges.map(_._1)
+ val sinkNodes = edges.map(_._2).flatten
+ val nodes = (sourceNodes ::: sinkNodes).distinct
+ Some(PackageDiagram(nodes, edges))
+ }
+ }
+
+ tFilter += System.currentTimeMillis
+
+ // eliminate all empty diagrams
+ if (result.isDefined && result.get.edges.forall(_._2.isEmpty))
+ None
+ else
+ result
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 82ce59d075..8f287a5c7a 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -485,8 +485,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
} catch {
case ex: FreshRunReq => throw ex // propagate a new run request
case ShutdownReq => throw ShutdownReq // propagate a shutdown request
-
- case ex =>
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
println("[%s]: exception during background compile: ".format(unit.source) + ex)
ex.printStackTrace()
for (r <- waitLoadedTypeResponses(unit.source)) {
@@ -755,7 +755,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val tp1 = pre.memberType(alt) onTypeError NoType
val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
matchesType(tp1, tp2, false)
- } catch {
+ }
+ catch {
+ case ex: ControlThrowable => throw ex
case ex: Throwable =>
println("error in hyperlinking: " + ex)
ex.printStackTrace()
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index f622f11ffd..afb8985700 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -82,13 +82,17 @@ abstract class InteractiveTest
/** Test's entry point */
def main(args: Array[String]) {
+ try execute()
+ finally shutdown()
+ }
+
+ protected def execute(): Unit = {
loadSources()
- runTests()
- shutdown()
+ runDefaultTests()
}
/** Load all sources before executing the test. */
- private def loadSources() {
+ protected def loadSources() {
// ask the presentation compiler to track all sources. We do
// not wait for the file to be entirely typed because we do want
// to exercise the presentation compiler on scoped type requests.
@@ -100,7 +104,7 @@ abstract class InteractiveTest
}
/** Run all defined `PresentationCompilerTestDef` */
- protected def runTests() {
+ protected def runDefaultTests() {
//TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles)
testActions.foreach(_.runTest())
}
@@ -109,7 +113,7 @@ abstract class InteractiveTest
private def randomTests(n: Int, files: Array[SourceFile]) {
val tester = new Tester(n, files, settings) {
override val compiler = self.compiler
- override val reporter = compilerReporter
+ override val reporter = new reporters.StoreReporter
}
tester.run()
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
index 36671555d1..4d85ab9d88 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
@@ -4,10 +4,8 @@ package tests
import java.io.File.pathSeparatorChar
import java.io.File.separatorChar
-
import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance
-import scala.tools.nsc.io.File
-
+import scala.tools.nsc.io.{File,Path}
import core.Reporter
import core.TestSettings
@@ -46,6 +44,11 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
println("error processing arguments (unprocessed: %s)".format(rest))
case _ => ()
}
+
+ // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
+ if(settings.sourcepath.isSetByUser)
+ settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
+
adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index 8ccb5aa075..5c1837b3bf 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -2,12 +2,15 @@ package scala.tools.nsc
package interactive
package tests.core
-import reporters.StoreReporter
+import reporters.{Reporter => CompilerReporter}
+import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
-private[tests] trait PresentationCompilerInstance {
+private[tests] trait PresentationCompilerInstance extends TestSettings {
protected val settings = new Settings
- protected val compilerReporter = new StoreReporter
+ protected val compilerReporter: CompilerReporter = new InteractiveReporter {
+ override def compiler = PresentationCompilerInstance.this.compiler
+ }
protected lazy val compiler: Global = {
prepareSettings(settings)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
index 37e4dfaea4..7f5e09842a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
@@ -93,7 +93,7 @@ trait ILoopInit {
postInitThunks foreach (f => addThunk(f()))
runThunks()
} catch {
- case ex =>
+ case ex: Throwable =>
val message = new java.io.StringWriter()
ex.printStackTrace(new java.io.PrintWriter(message))
initError = message.toString
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 2a7adbe781..b385787cce 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -751,7 +751,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private def load(path: String): Class[_] = {
try Class.forName(path, true, classLoader)
- catch { case ex => evalError(path, unwrap(ex)) }
+ catch { case ex: Throwable => evalError(path, unwrap(ex)) }
}
var evalCaught: Option[Throwable] = None
@@ -989,7 +989,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** load and run the code using reflection */
def loadAndRun: (String, Boolean) = {
try { ("" + (lineRep call sessionNames.print), true) }
- catch { case ex => (lineRep.bindError(ex), false) }
+ catch { case ex: Throwable => (lineRep.bindError(ex), false) }
}
override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
index 0c26aa8b28..adb1a2be04 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
package interpreter
+import scala.util.control.ControlThrowable
import util.Exceptional.unwrap
import util.stackTraceString
@@ -38,7 +39,8 @@ trait ReplConfig {
private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
- case t =>
+ case t: ControlThrowable => throw t
+ case t: Throwable =>
repldbg(label + ": " + unwrap(t))
repltrace(stackTraceString(unwrap(t)))
alt
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 9b223a13ba..aa30a7a45b 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -195,6 +195,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
+ val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
/** Groups of Settings.
*/
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index 1bb0948168..f0ee8b11f3 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -40,7 +40,9 @@ trait StandardScalaSettings {
val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.")
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
- val target = ChoiceSetting ("-target", "target", "Target platform for object files.", List("jvm-1.5", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"), "jvm-1.5")
+ val target = ChoiceSetting ("-target", "target", "Target platform for object files.",
+ List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"),
+ "jvm-1.5-asm")
val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked (erasure) warnings.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index d8bf23f4fe..e6499c05a6 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -613,8 +613,8 @@ abstract class ClassfileParser {
parseAttributes(sym, info)
getScope(jflags).enter(sym)
- // sealed java enums (experimental)
- if (isEnum && opt.experimental) {
+ // sealed java enums
+ if (isEnum) {
val enumClass = sym.owner.linkedClassOfClass
if (!enumClass.isSealed)
enumClass setFlag (SEALED | ABSTRACT)
@@ -862,7 +862,7 @@ abstract class ClassfileParser {
}
else in.skip(attrLen)
case tpnme.SyntheticATTR =>
- sym.setFlag(SYNTHETIC)
+ sym.setFlag(SYNTHETIC | HIDDEN)
in.skip(attrLen)
case tpnme.BridgeATTR =>
sym.setFlag(BRIDGE)
@@ -879,7 +879,7 @@ abstract class ClassfileParser {
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute")
- unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString)
+ unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
case tpnme.ScalaATTR =>
@@ -897,7 +897,7 @@ abstract class ClassfileParser {
case Some(san: AnnotationInfo) =>
val bytes =
san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
- unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString)
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
case None =>
throw new RuntimeException("Scala class file does not contain Scala annotation")
}
@@ -1013,9 +1013,16 @@ abstract class ClassfileParser {
} catch {
case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
case ex: Throwable =>
- debuglog("dropping annotation on " + sym + ", an error occured during parsing (e.g. annotation class not found)")
-
- None // ignore malformed annotations ==> t1135
+ // We want to be robust when annotations are unavailable, so the very least
+ // we can do is warn the user about the exception
+ // There was a reference to ticket 1135, but that is outdated: a reference to a class not on
+ // the classpath would *not* end up here. A class not found is signaled
+ // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
+ // and that should never be swallowed silently.
+ warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ if (settings.debug.value) ex.printStackTrace()
+
+ None // ignore malformed annotations
}
/**
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 192cc94b90..cc5ed0f129 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -503,7 +503,7 @@ abstract class Pickler extends SubComponent {
private def writeSymInfo(sym: Symbol) {
writeRef(sym.name)
writeRef(localizedOwner(sym))
- writeLongNat((rawFlagsToPickled(sym.flags & PickledFlags)))
+ writeLongNat((rawToPickledFlags(sym.flags & PickledFlags)))
if (sym.hasAccessBoundary) writeRef(sym.privateWithin)
writeRef(sym.info)
}
@@ -966,7 +966,7 @@ abstract class Pickler extends SubComponent {
TREE
case Modifiers(flags, privateWithin, _) =>
- val pflags = rawFlagsToPickled(flags)
+ val pflags = rawToPickledFlags(flags)
writeNat((pflags >> 32).toInt)
writeNat((pflags & 0xFFFFFFFF).toInt)
writeRef(privateWithin)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 1276d62995..5115c49c87 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -351,7 +351,7 @@ abstract class Erasure extends AddInterfaces
List())
if cast.symbol == Object_asInstanceOf &&
tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.firstParamAccessor =>
+ sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox =>
Some(arg)
case _ =>
None
@@ -498,7 +498,8 @@ abstract class Erasure extends AddInterfaces
ldef setType ldef.rhs.tpe
case _ =>
val tree1 = tree.tpe match {
- case ErasedValueType(clazz) =>
+ case ErasedValueType(tref) =>
+ val clazz = tref.sym
tree match {
case Unboxed(arg) if arg.tpe.typeSymbol == clazz =>
log("shortcircuiting unbox -> box "+arg); arg
@@ -554,25 +555,26 @@ abstract class Erasure extends AddInterfaces
ldef setType ldef.rhs.tpe
case _ =>
val tree1 = pt match {
- case ErasedValueType(clazz) =>
+ case ErasedValueType(tref) =>
tree match {
case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] =>
log("shortcircuiting box -> unbox "+arg)
arg
case _ =>
+ val clazz = tref.sym
log("not boxed: "+tree)
val tree0 = adaptToType(tree, clazz.tpe)
- cast(Apply(Select(tree0, clazz.firstParamAccessor), List()), pt)
+ cast(Apply(Select(tree0, clazz.derivedValueClassUnbox), List()), pt)
}
case _ =>
pt.typeSymbol match {
- case UnitClass =>
- if (treeInfo isExprSafeToInline tree) UNIT
- else BLOCK(tree, UNIT)
- case x =>
- assert(x != ArrayClass)
- // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
- Apply(unboxMethod(pt.typeSymbol), tree)
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) UNIT
+ else BLOCK(tree, UNIT)
+ case x =>
+ assert(x != ArrayClass)
+ // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
+ Apply(unboxMethod(pt.typeSymbol), tree)
}
}
typedPos(tree.pos)(tree1)
@@ -601,7 +603,7 @@ abstract class Erasure extends AddInterfaces
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
- if (settings.debug.value && pt != WildcardType)
+ //if (settings.debug.value && pt != WildcardType)
log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
if (tree.tpe <:< pt)
tree
@@ -629,7 +631,7 @@ abstract class Erasure extends AddInterfaces
* - `x != y` for != in class Any becomes `!(x equals y)` with equals in class Object.
* - x.asInstanceOf[T] becomes x.$asInstanceOf[T]
* - x.isInstanceOf[T] becomes x.$isInstanceOf[T]
- * - x.isInstanceOf[ErasedValueType(clazz)] becomes x.isInstanceOf[clazz.tpe]
+ * - x.isInstanceOf[ErasedValueType(tref)] becomes x.isInstanceOf[tref.sym.tpe]
* - x.m where m is some other member of Any becomes x.m where m is a member of class Object.
* - x.m where x has unboxed value type T and m is not a directly translated member of T becomes T.box(x).m
* - x.m where x is a reference type and m is a directly translated member of value type T becomes x.TValue().m
@@ -651,12 +653,33 @@ abstract class Erasure extends AddInterfaces
atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List()))
else
*/
- if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) unbox(qual1, targ.tpe)
- else tree
+ if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
+ val noNullCheckNeeded = targ.tpe match {
+ case ErasedValueType(tref) =>
+ atPhase(currentRun.erasurePhase) {
+ isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
+ }
+ case _ =>
+ true
+ }
+ if (noNullCheckNeeded) unbox(qual1, targ.tpe)
+ else {
+ def nullConst = Literal(Constant(null)) setType NullClass.tpe
+ val untyped =
+// util.trace("new asinstanceof test") {
+ gen.evalOnce(qual1, context.owner, context.unit) { qual =>
+ If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullClass.tpe)),
+ Literal(Constant(null)) setType targ.tpe,
+ unbox(qual(), targ.tpe))
+ }
+// }
+ typed(untyped)
+ }
+ } else tree
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_isInstanceOf =>
targ.tpe match {
- case ErasedValueType(clazz) => targ.setType(clazz.tpe)
+ case ErasedValueType(tref) => targ.setType(tref.sym.tpe)
case _ =>
}
tree
@@ -711,17 +734,22 @@ abstract class Erasure extends AddInterfaces
val tree1 = try {
tree match {
case InjectDerivedValue(arg) =>
- val clazz = tree.symbol
- val result = typed1(arg, mode, underlyingOfValueClass(clazz)) setType ErasedValueType(clazz)
- log("transforming inject "+arg+":"+underlyingOfValueClass(clazz)+"/"+ErasedValueType(clazz)+" = "+result)
- return result
+ (tree.attachments.get[TypeRefAttachment]: @unchecked) match {
+ case Some(itype) =>
+ val tref = itype.tpe
+ val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+ log(s"transforming inject $arg -> $tref/$argPt")
+ val result = typed(arg, mode, argPt)
+ log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
+ return result setType ErasedValueType(tref)
+ }
case _ =>
- super.typed1(adaptMember(tree), mode, pt)
+ super.typed1(adaptMember(tree), mode, pt)
}
} catch {
case er: TypeError =>
- Console.println("exception when typing " + tree)
+ Console.println("exception when typing " + tree+"/"+tree.getClass)
Console.println(er.msg + " in file " + context.owner.sourceFile)
er.printStackTrace
abort("unrecoverable error")
@@ -731,6 +759,7 @@ abstract class Erasure extends AddInterfaces
finally throw ex
throw ex
}
+
def adaptCase(cdef: CaseDef): CaseDef = {
val newCdef = deriveCaseDef(cdef)(adaptToType(_, tree1.tpe))
newCdef setType newCdef.body.tpe
@@ -970,8 +999,11 @@ abstract class Erasure extends AddInterfaces
else
tree
+
case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
- InjectDerivedValue(arg) setSymbol tpt.tpe.typeSymbol
+// println("inject derived: "+arg+" "+tpt.tpe)
+ InjectDerivedValue(arg) addAttachment //@@@ setSymbol tpt.tpe.typeSymbol
+ new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
case Apply(fn, args) =>
def qualifier = fn match {
case Select(qual, _) => qual
@@ -1125,4 +1157,6 @@ abstract class Erasure extends AddInterfaces
}
}
}
+
+ private class TypeRefAttachment(val tpe: TypeRef)
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 1b8513373d..ab7bbc591b 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -92,7 +92,7 @@ abstract class ExplicitOuter extends InfoTransform
else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
}
def newOuterAccessor(clazz: Symbol) = {
- val accFlags = SYNTHETIC | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val accFlags = SYNTHETIC | HIDDEN | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
@@ -101,7 +101,7 @@ abstract class ExplicitOuter extends InfoTransform
sym setInfo MethodType(Nil, restpe)
}
def newOuterField(clazz: Symbol) = {
- val accFlags = SYNTHETIC | PARAMACCESSOR | ( if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED )
+ val accFlags = SYNTHETIC | HIDDEN | PARAMACCESSOR | ( if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED )
val sym = clazz.newValue(nme.OUTER_LOCAL, clazz.pos, accFlags)
sym setInfo clazz.outerClass.thisType
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index ef158a71f6..999d00520d 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -21,7 +21,8 @@ trait PostErasure extends InfoTransform with TypingTransformers {
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
- case ErasedValueType(clazz) => erasure.underlyingOfValueClass(clazz)
+ case ErasedValueType(tref) =>
+ atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref))
case _ => mapOver(tp)
}
}
@@ -38,7 +39,7 @@ trait PostErasure extends InfoTransform with TypingTransformers {
acc), List())
if atPhase(currentRun.erasurePhase) {
tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.firstParamAccessor
+ sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox
} =>
if (settings.debug.value) log("Removing "+tree+" -> "+arg)
arg
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 1d820afe11..ffcb682cf7 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1355,8 +1355,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
+
+ def reportError[T](body: =>T)(handler: TypeError => T): T =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(te.pos, te.msg)
+ handler(te)
+ }
- override def transform(tree: Tree): Tree = {
+ override def transform(tree: Tree): Tree =
+ reportError { transform1(tree) } {_ => tree}
+
+ def transform1(tree: Tree) = {
val symbol = tree.symbol
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
@@ -1382,14 +1393,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else None
}
- def reportError[T](body: =>T)(handler: TypeError => T): T =
- try body
- catch {
- case te: TypeError =>
- reporter.error(tree.pos, te.msg)
- handler(te)
- }
-
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -1434,7 +1437,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
- case None => super.transform(tree)
+ case None =>
+ treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
+ // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+ // super.transform(tree)
}
case Select(Super(_, _), name) if illegalSpecializedInheritance(currentClass) =>
@@ -1501,13 +1507,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
- def reportTypeError(body: =>Tree) =
- try body
- catch {
- case te: TypeError =>
- reporter.error(te.pos, te.toString)
- ddef
- }
+ def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
+
if (symbol.isConstructor) {
val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 60cc9e5fb8..ba6c43f9d3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -106,7 +106,7 @@ trait ContextErrors {
def errMsg = {
val paramName = param.name
val paramTp = param.tpe
- paramTp.typeSymbol match {
+ paramTp.typeSymbolDirect match {
case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
case _ =>
"could not find implicit value for "+
@@ -562,7 +562,9 @@ trait ContextErrors {
// SelectFromTypeTree
def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = {
- issueNormalTypeError(tree, "illegal type selection from volatile type "+qual.tpe)
+ val hiBound = qual.tpe.bounds.hi
+ val addendum = if (hiBound =:= qual.tpe) "" else s" (with upper bound ${hiBound})"
+ issueNormalTypeError(tree, s"illegal type selection from volatile type ${qual.tpe}${addendum}")
setError(tree)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index e1fb683aa9..69b27045ab 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -88,9 +88,11 @@ trait EtaExpansion { self: Analyzer =>
defs ++= stats
liftoutPrefix(fun)
case Apply(fn, args) =>
- val byName = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe))
- // zipAll: with repeated params, there might be more args than params
- val newArgs = args.zipAll(byName, EmptyTree, false) map { case (arg, byN) => liftout(arg, byN) }
+ val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift
+ val newArgs = mapWithIndex(args) { (arg, i) =>
+ // with repeated params, there might be more or fewer args than params
+ liftout(arg, byName(i).getOrElse(false))
+ }
treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
@@ -107,11 +109,20 @@ trait EtaExpansion { self: Analyzer =>
*/
def expand(tree: Tree, tpe: Type): Tree = tpe match {
case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
- val params = paramSyms map (sym =>
- ValDef(Modifiers(SYNTHETIC | PARAM),
- sym.name.toTermName, TypeTree(sym.tpe) , EmptyTree))
+ val params: List[(ValDef, Boolean)] = paramSyms.map {
+ sym =>
+ val origTpe = sym.tpe
+ val isRepeated = definitions.isRepeatedParamType(origTpe)
+ // SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
+ val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe)
+ val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
+ (valDef, isRepeated)
+ }
atPos(tree.pos.makeTransparent) {
- Function(params, expand(Apply(tree, params map gen.paramToArg), restpe))
+ val args = params.map {
+ case (valDef, isRepeated) => gen.paramToArg(Ident(valDef.name), isRepeated)
+ }
+ Function(params.map(_._1), expand(Apply(tree, args), restpe))
}
case _ =>
tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index f7e00109ae..da045e1a48 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1125,7 +1125,7 @@ trait Implicits {
* such that some part of `tp` has C as one of its superclasses.
*/
private def implicitsOfExpectedType: Infoss = {
- Statistics.incCounter(implicitCacheHits)
+ Statistics.incCounter(implicitCacheAccs)
implicitsCache get pt match {
case Some(implicitInfoss) =>
Statistics.incCounter(implicitCacheHits)
@@ -1224,7 +1224,9 @@ trait Implicits {
* reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
*/
- private def manifestOfType(tp: Type, full: Boolean): SearchResult = {
+ private def manifestOfType(tp: Type, flavor: Symbol): SearchResult = {
+ val full = flavor == FullManifestClass
+ val opt = flavor == OptManifestClass
/** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
@@ -1256,7 +1258,7 @@ trait Implicits {
if (containsExistential(tp1)) EmptyTree
else manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
case ConstantType(value) =>
- manifestOfType(tp1.deconst, full)
+ manifestOfType(tp1.deconst, FullManifestClass)
case TypeRef(pre, sym, args) =>
if (isPrimitiveValueClass(sym) || isPhantomClass(sym)) {
findSingletonManifest(sym.name.toString)
@@ -1299,22 +1301,13 @@ trait Implicits {
mot(tp1.skolemizeExistential, from, to)
case _ =>
EmptyTree
-/* !!! the following is almost right, but we have to splice nested manifest
- * !!! types into this type. This requires a substantial extension of
- * !!! reifiers.
- val reifier = new Reifier()
- val rtree = reifier.reifyTopLevel(tp1)
- manifestFactoryCall("apply", tp, rtree)
-*/
}
}
- val tagInScope =
- if (full) resolveTypeTag(pos, NoType, tp, concrete = true, allowMaterialization = false)
- else resolveClassTag(pos, tp, allowMaterialization = false)
- if (tagInScope.isEmpty) mot(tp, Nil, Nil)
- else {
- if (full) {
+ if (full) {
+ val tagInScope = resolveTypeTag(pos, NoType, tp, concrete = true, allowMaterialization = false)
+ if (tagInScope.isEmpty) mot(tp, Nil, Nil)
+ else {
if (ReflectRuntimeUniverse == NoSymbol) {
// todo. write a test for this
context.error(pos, s"""
@@ -1330,44 +1323,62 @@ trait Implicits {
|to proceed add a class tag to the type `$tp` (e.g. by introducing a context bound) and recompile.""".trim.stripMargin)
return SearchFailure
}
+ val cm = typed(Ident(ReflectRuntimeCurrentMirror))
+ val interop = gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
+ wrapResult(interop)
+ }
+ } else {
+ mot(tp, Nil, Nil) match {
+ case SearchFailure if opt => wrapResult(gen.mkAttributedRef(NoManifest))
+ case result => result
}
-
- val interop =
- if (full) {
- val cm = typed(Ident(ReflectRuntimeCurrentMirror))
- gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
- } else gen.mkMethodCall(ReflectBasis, nme.classTagToClassManifest, List(tp), List(tagInScope))
- wrapResult(interop)
}
}
def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
- /** The tag corresponding to type `pt`, provided `pt` is a flavor of a tag.
+ /** Materializes implicits of magic types (currently, manifests and tags).
+ * Will be replaced by implicit macros once we fix them.
*/
- private def implicitTagOrOfExpectedType(pt: Type): SearchResult = pt.dealias match {
- case TypeRef(pre, sym, arg :: Nil) if ManifestSymbols(sym) =>
- manifestOfType(arg, sym == FullManifestClass) match {
- case SearchFailure if sym == OptManifestClass => wrapResult(gen.mkAttributedRef(NoManifest))
- case result => result
- }
- case TypeRef(pre, sym, arg :: Nil) if TagSymbols(sym) =>
- tagOfType(pre, arg, sym)
- case tp@TypeRef(_, sym, _) if sym.isAbstractType =>
- implicitTagOrOfExpectedType(tp.bounds.lo) // #3977: use tp (==pt.dealias), not pt (if pt is a type alias, pt.bounds.lo == pt)
- case _ =>
+ private def materializeImplicit(pt: Type): SearchResult = {
+ def fallback = {
searchImplicit(implicitsOfExpectedType, false)
// shouldn't we pass `pt` to `implicitsOfExpectedType`, or is the recursive case
// for an abstract type really only meant for tags?
+ }
+
+ pt match {
+ case TypeRef(_, sym, _) if sym.isAbstractType =>
+ materializeImplicit(pt.dealias.bounds.lo) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.bounds.lo == pt)
+ case pt @ TypeRef(pre, sym, arg :: Nil) =>
+ sym match {
+ case sym if ManifestSymbols(sym) => manifestOfType(arg, sym)
+ case sym if TagSymbols(sym) => tagOfType(pre, arg, sym)
+ // as of late ClassManifest is an alias of ClassTag
+ // hence we need to take extra care when performing dealiasing
+ // because it might destroy the flavor of the manifest requested by the user
+ // when the user wants ClassManifest[T], we should invoke `manifestOfType` not `tagOfType`
+ // hence we don't do `pt.dealias` as we did before, but rather do `pt.betaReduce`
+ // unlike `dealias`, `betaReduce` performs at most one step of dealiasing
+ // while dealias pops all aliases in a single invocation
+ case sym if sym.isAliasType => materializeImplicit(pt.betaReduce)
+ case _ => fallback
+ }
+ case _ =>
+ fallback
+ }
}
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
- * // [Eugene] the following two lines should be deleted after we migrate delegate tag materialization to implicit macros
+ * // [Eugene] the following lines should be deleted after we migrate delegate tag materialization to implicit macros
* If that fails, and `pt` is an instance of a ClassTag, try to construct a class tag.
* If that fails, and `pt` is an instance of a TypeTag, try to construct a type tag.
+ * If that fails, and `pt` is an instance of a ClassManifest, try to construct a class manifest.
+ * If that fails, and `pt` is an instance of a Manifest, try to construct a manifest.
+ * If that fails, and `pt` is an instance of a OptManifest, try to construct a class manifest and return NoManifest if construction fails.
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
@@ -1387,7 +1398,7 @@ trait Implicits {
val failstart = Statistics.startTimer(oftypeFailNanos)
val succstart = Statistics.startTimer(oftypeSucceedNanos)
- result = implicitTagOrOfExpectedType(pt)
+ result = materializeImplicit(pt)
if (result == SearchFailure) {
context.updateBuffer(previousErrs)
@@ -1434,7 +1445,16 @@ trait Implicits {
}
object ImplicitNotFoundMsg {
- def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg map (m => (new Message(sym, m)))
+ def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg match {
+ case Some(m) => Some(new Message(sym, m))
+ case None if sym.isAliasType =>
+ // perform exactly one step of dealiasing
+ // this is necessary because ClassManifests are now aliased to ClassTags
+ // but we don't want to intimidate users by showing unrelated error messages
+ unapply(sym.info.resultType.betaReduce.typeSymbolDirect)
+ case _ => None
+ }
+
// check the message's syntax: should be a string literal that may contain occurrences of the string "${X}",
// where `X` refers to a type parameter of `sym`
def check(sym: Symbol): Option[String] =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 688dcd91ac..960c210649 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1104,7 +1104,9 @@ trait Infer {
try {
// debuglog("TVARS "+ (tvars map (_.constr)))
// look at the argument types of the primary constructor corresponding to the pattern
- val variances = undetparams map varianceInType(ctorTp.paramTypes.headOption getOrElse ctorTp)
+ val variances =
+ if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
+ else undetparams map varianceInTypes(ctorTp.paramTypes)
val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
@@ -1612,6 +1614,13 @@ trait Infer {
val saved = context.state
var fallback = false
context.setBufferErrors()
+ // We cache the current buffer because it is impossible to
+ // distinguish errors that occurred before entering tryTwice
+ // and our first attempt in 'withImplicitsDisabled'. If the
+ // first attempt fails we try with implicits on *and* clean
+ // buffer but that would also flush any pre-tryTwice valid
+ // errors, hence some manual buffer tweaking is necessary.
+ val errorsToRestore = context.flushAndReturnBuffer()
try {
context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
@@ -1625,8 +1634,10 @@ trait Infer {
case ex: TypeError => // recoverable cyclic references
context.restoreState(saved)
if (!fallback) infer(true) else ()
+ } finally {
+ context.restoreState(saved)
+ context.updateBuffer(errorsToRestore)
}
- context.restoreState(saved)
}
else infer(true)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index d157666e47..322b9ebb25 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -1221,7 +1221,10 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case ex: Throwable =>
None
}
- } getOrElse realex.getMessage
+ } getOrElse {
+ val msg = realex.getMessage
+ if (msg != null) msg else realex.getClass.getName
+ }
fail(typer, expandee, msg = "exception during macro expansion: " + message)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index bde3ad98c9..3eff5ef024 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -86,10 +86,6 @@ trait Modes {
*/
final val TYPEPATmode = 0x10000
- /** RETmode is set when we are typing a return expression.
- */
- final val RETmode = 0x20000
-
final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
final def onlyStickyModes(mode: Int) =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index decd18b599..9580cd5676 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -52,27 +52,6 @@ trait Namers extends MethodSynthesis {
def newNamerFor(context: Context, tree: Tree): Namer =
newNamer(context.makeNewScope(tree, tree.symbol))
- // In the typeCompleter (templateSig) of a case class (resp it's module),
- // synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
- // their signatures, the corresponding ClassDef is needed.
- // During naming, for each case class module symbol, the corresponding ClassDef
- // is stored in this map. The map is cleared lazily, i.e. when the new symbol
- // is created with the same name, the old one (if present) is wiped out, or the
- // entry is deleted when it is used and no longer needed.
- private val classOfModuleClass = perRunCaches.newWeakMap[Symbol, WeakReference[ClassDef]]()
-
- // Default getters of constructors are added to the companion object in the
- // typeCompleter of the constructor (methodSig). To compute the signature,
- // we need the ClassDef. To create and enter the symbols into the companion
- // object, we need the templateNamer of that module class.
- // This map is extended during naming of classes, the Namer is added in when
- // it's available, i.e. in the type completer (templateSig) of the module class.
- private[typechecker] val classAndNamerOfModule = perRunCaches.newMap[Symbol, (ClassDef, Namer)]()
-
- def resetNamer() {
- classAndNamerOfModule.clear()
- }
-
abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
import NamerErrorGen._
@@ -502,32 +481,29 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethodOrGetter(tree: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = tree.symbol
- val lazyType = completerOf(tree, tparams)
- def completeCopyFirst = sym.isSynthetic && (!sym.hasDefault || sym.owner.info.member(nme.copy).isSynthetic)
- def completeCopyMethod(clazz: Symbol) {
- // the 'copy' method of case classes needs a special type completer to make
- // bug0054.scala (and others) work. the copy method has to take exactly the same
- // parameter types as the primary constructor.
+ def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
+ val sym = copyDefDef.symbol
+ val lazyType = completerOf(copyDefDef, tparams)
+
+ /** Assign the types of the class parameters to the parameters of the
+ * copy method. See comment in `Unapplies.caseClassCopyMeth` */
+ def assignParamTypes() {
+ val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
- val vparamss = tree match { case x: DefDef => x.vparamss ; case _ => Nil }
- val cparamss = constructorType.paramss
-
- map2(vparamss, cparamss)((vparams, cparams) =>
- map2(vparams, cparams)((param, cparam) =>
- // need to clone the type cparam.tpe???
- // problem is: we don't have the new owner yet (the new param symbol)
- param.tpt setType subst(cparam.tpe)
+ val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val classParamss = constructorType.paramss
+ val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
+
+ map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyParams, classParams)((copyP, classP) =>
+ copyP.tpt setType subst(classP.tpe)
)
)
}
- sym setInfo {
- mkTypeCompleter(tree) { copySym =>
- if (completeCopyFirst)
- completeCopyMethod(copySym.owner)
+ sym setInfo {
+ mkTypeCompleter(copyDefDef) { sym =>
+ assignParamTypes()
lazyType complete sym
}
}
@@ -604,8 +580,8 @@ trait Namers extends MethodSynthesis {
val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
- if (name == nme.copy || tree.symbol.name.startsWith(nme.copy + nme.DEFAULT_GETTER_STRING))
- enterCopyMethodOrGetter(tree, tparams)
+ if (name == nme.copy && sym.isSynthetic)
+ enterCopyMethod(tree, tparams)
else
sym setInfo completerOf(tree, tparams)
}
@@ -621,7 +597,7 @@ trait Namers extends MethodSynthesis {
MaxParametersCaseClassError(tree)
val m = ensureCompanionObject(tree, caseModuleDef)
- classOfModuleClass(m.moduleClass) = new WeakReference(tree)
+ m.moduleClass.addAttachment(new ClassForCaseCompanionAttachment(tree))
}
val hasDefault = impl.body exists {
case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
@@ -629,7 +605,7 @@ trait Namers extends MethodSynthesis {
}
if (hasDefault) {
val m = ensureCompanionObject(tree)
- classAndNamerOfModule(m) = (tree, null)
+ m.addAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
@@ -660,7 +636,8 @@ trait Namers extends MethodSynthesis {
if (sym.isLazy)
sym.lazyAccessor andAlso enterIfNotThere
- defaultParametersOfMethod(sym) foreach { symRef => enterIfNotThere(symRef()) }
+ for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+ defAtt.defaultGetters foreach enterIfNotThere
}
this.context
}
@@ -849,23 +826,20 @@ trait Namers extends MethodSynthesis {
// add apply and unapply methods to companion objects of case classes,
// unless they exist already; here, "clazz" is the module class
if (clazz.isModuleClass) {
- Namers.this.classOfModuleClass get clazz foreach { cdefRef =>
- val cdef = cdefRef()
- if (cdef.mods.isCase) addApplyUnapply(cdef, templateNamer)
- classOfModuleClass -= clazz
+ clazz.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
+ val cdef = cma.caseClass
+ assert(cdef.mods.isCase, "expected case class: "+ cdef)
+ addApplyUnapply(cdef, templateNamer)
}
}
// add the copy method to case classes; this needs to be done here, not in SyntheticMethods, because
// the namer phase must traverse this copy method to create default getters for its parameters.
// here, clazz is the ClassSymbol of the case class (not the module).
- // @check: this seems to work only if the type completer of the class runs before the one of the
- // module class: the one from the module class removes the entry from classOfModuleClass (see above).
if (clazz.isClass && !clazz.hasModuleFlag) {
val modClass = companionSymbolOf(clazz, context).moduleClass
- Namers.this.classOfModuleClass get modClass map { cdefRef =>
- val cdef = cdefRef()
-
+ modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
+ val cdef = cma.caseClass
def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol
if (cdef.mods.isCase && !hasCopy(decls) &&
!parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
@@ -877,9 +851,8 @@ trait Namers extends MethodSynthesis {
// if default getters (for constructor defaults) need to be added to that module, here's the namer
// to use. clazz is the ModuleClass. sourceModule works also for classes defined in methods.
val module = clazz.sourceModule
- classAndNamerOfModule get module foreach {
- case (cdef, _) =>
- classAndNamerOfModule(module) = (cdef, templateNamer)
+ for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
+ cda.companionModuleClassNamer = templateNamer
}
ClassInfoType(parents, decls, clazz)
}
@@ -1100,13 +1073,15 @@ trait Namers extends MethodSynthesis {
val module = companionSymbolOf(clazz, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
- classAndNamerOfModule get module match {
- case s @ Some((cdef, nmr)) if nmr != null =>
- moduleNamer = s
- (cdef, nmr)
+ module.attachments.get[ConstructorDefaultsAttachment] match {
+ // by martin: the null case can happen in IDE; this is really an ugly hack on top of an ugly hack but it seems to work
+ // later by lukas: disabled when fixing SI-5975, i think it cannot happen anymore
+ case Some(cda) /*if cma.companionModuleClassNamer == null*/ =>
+ val p = (cda.classWithDefault, cda.companionModuleClassNamer)
+ moduleNamer = Some(p)
+ p
case _ =>
return // fix #3649 (prevent crash in erroneous source code)
- // nmr == null can happen in IDE; this is really an ugly hack on top[ of an ugly hack but it seems to work
}
}
deftParams = cdef.tparams map copyUntypedInvariant
@@ -1144,11 +1119,14 @@ trait Namers extends MethodSynthesis {
clazz.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
- // enter into map from method symbols to default arguments.
- // if compiling the same local block several times (which can happen in interactive mode)
- // we might otherwise not find the default symbol, because the second time it the
- // method symbol will be re-entered in the scope but the default parameter will not.
- defaultParametersOfMethod(meth) += new WeakReference(default)
+ // save the default getters as attachments in the method symbol. if compiling the
+ // same local block several times (which can happen in interactive mode) we might
+ // otherwise not find the default symbol, because the second time it the method
+ // symbol will be re-entered in the scope but the default parameter will not.
+ val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+ case Some(att) => att.defaultGetters += default
+ case None => meth.addAttachment(new DefaultsOfLocalMethodAttachment(default))
+ }
}
} else if (baseHasDefault) {
// the parameter does not have a default itself, but the
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 61443faba0..a0c1342026 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -21,8 +21,19 @@ trait NamesDefaults { self: Analyzer =>
import definitions._
import NamesDefaultsErrorsGen._
- val defaultParametersOfMethod =
- perRunCaches.newWeakMap[Symbol, Set[WeakReference[Symbol]]]() withDefaultValue Set()
+ // Default getters of constructors are added to the companion object in the
+ // typeCompleter of the constructor (methodSig). To compute the signature,
+ // we need the ClassDef. To create and enter the symbols into the companion
+ // object, we need the templateNamer of that module class. These two are stored
+ // as an attachment in the companion module symbol
+ class ConstructorDefaultsAttachment(val classWithDefault: ClassDef, var companionModuleClassNamer: Namer)
+
+ // To attach the default getters of local (term-owned) methods to the method symbol.
+ // Used in Namer.enterExistingSym: it needs to re-enter the method symbol and also
+ // default getters, which could not be found otherwise.
+ class DefaultsOfLocalMethodAttachment(val defaultGetters: mutable.Set[Symbol]) {
+ def this(default: Symbol) = this(mutable.Set(default))
+ }
case class NamedApplyInfo(
qual: Option[Tree],
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 9b8ddffb49..b1e68e2757 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
package typechecker
import symtab._
-import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC}
+import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC, HIDDEN}
import language.postfixOps
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
@@ -47,7 +47,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val phaseName: String = "patmat"
- def patmatDebug(msg: String) = println(msg)
+ // TODO: the inliner fails to inline the closures to patmatDebug
+ // private val printPatmat = settings.Ypatmatdebug.value
+ // @inline final def patmatDebug(s: => String) = if (printPatmat) println(s)
def newTransformer(unit: CompilationUnit): Transformer =
if (opt.virtPatmat) new MatchTransformer(unit)
@@ -123,7 +125,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def zero: M[Nothing]
def one[T](x: P[T]): M[T]
def guard[T](cond: P[Boolean], then: => P[T]): M[T]
- def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt
}
* P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
@@ -137,7 +138,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
- def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty
}
*/
@@ -145,6 +145,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val typer: Typer
val matchOwner = typer.context.owner
+ def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
+ def reportMissingCases(pos: Position, counterExamples: List[String]) = {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
def inMatchMonad(tp: Type): Type
def pureType(tp: Type): Type
final def matchMonadResult(tp: Type): Type =
@@ -177,7 +186,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// (that would require more sophistication when generating trees,
// and the only place that emits Matches after typers is for exception handling anyway)
if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases)
- // patmatDebug("translating "+ cases.mkString("{", "\n", "}"))
+ // patmatDebug ("translating "+ cases.mkString("{", "\n", "}"))
def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
@@ -302,13 +311,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
// if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+ // patmatDebug ("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+
// must use type `tp`, which is provided by extractor's result, not the type expected by binder,
// as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
// (it will later result in a type test when `tp` is not a subtype of `b.info`)
// TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) => b setInfo tp } // patmatDebug("changing "+ b +" : "+ b.info +" -> "+ tp);
+ (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
+ // patmatDebug ("changing "+ b +" : "+ b.info +" -> "+ tp)
+ b setInfo tp
+ }
- // patmatDebug("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
val (typeTestTreeMaker, patBinderOrCasted) =
@@ -412,7 +425,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- // patmatDebug("WARNING: Bind tree with unbound symbol "+ patTree)
+ // patmatDebug ("WARNING: Bind tree with unbound symbol "+ patTree)
noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
// case Star(_) | ArrayValue | This => error("stone age pattern relics encountered!")
@@ -794,7 +807,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) =
(cases, Nil)
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
None
// for catch (no need to customize match failure)
@@ -815,7 +828,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
if (currSub ne null) {
- // patmatDebug("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ // patmatDebug ("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
Thread.dumpStack()
}
else currSub = outerSubst >> substitution
@@ -936,7 +949,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
// if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC | HIDDEN
(Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
}
@@ -981,7 +994,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
**/
case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
import TypeTestTreeMaker._
- // patmatDebug("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+ // patmatDebug ("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
lazy val outerTestNeeded = (
!((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
@@ -1105,11 +1118,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
}
+ // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
fixerUpper(owner, scrut.pos){
- val ptDefined = if (isFullyDefined(pt)) pt else NoType
def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
- // patmatDebug("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+ // patmatDebug ("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (unchecked, requireSwitch) =
if (settings.XnoPatmatAnalysis.value) (true, false)
@@ -1122,7 +1135,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
(false, false)
}
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked).getOrElse{
if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
if (casesNoSubstOnly nonEmpty) {
@@ -1163,12 +1176,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
t match {
case Function(_, _) if t.symbol == NoSymbol =>
t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- // patmatDebug("new symbol for "+ (t, t.symbol.ownerChain))
+ // patmatDebug ("new symbol for "+ (t, t.symbol.ownerChain))
case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- // patmatDebug("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ // patmatDebug ("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- // patmatDebug("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ // patmatDebug ("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor)
d.symbol.lazyAccessor.owner = currentOwner
@@ -1178,7 +1191,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- // patmatDebug("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ // patmatDebug ("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
case _ =>
}
super.traverse(t)
@@ -1210,6 +1223,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// local / context-free
def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree
def _equals(checker: Tree, binder: Symbol): Tree
def _isInstanceOf(b: Symbol, tp: Type): Tree
def and(a: Tree, b: Tree): Tree
@@ -1329,10 +1343,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var currId = 0
}
case class Test(cond: Cond, treeMaker: TreeMaker) {
- // def <:<(other: Test) = cond <:< other.cond
- // def andThen_: (prev: List[Test]): List[Test] =
- // prev.filterNot(this <:< _) :+ this
-
// private val reusedBy = new collection.mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
@@ -1346,38 +1356,20 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
"T"+ id + "C("+ cond +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
}
+ // TODO: remove Cond, replace by Prop from Logic
object Cond {
- // def refines(self: Cond, other: Cond): Boolean = (self, other) match {
- // case (Bottom, _) => true
- // case (Havoc , _) => true
- // case (_ , Top) => true
- // case (_ , _) => false
- // }
var currId = 0
}
abstract class Cond {
- // def testedPath: Tree
- // def <:<(other: Cond) = Cond.refines(this, other)
-
val id = { Cond.currId += 1; Cond.currId}
}
- // does not contribute any knowledge
- case object Top extends Cond {override def toString = "T"}
-
-
- // takes away knowledge. e.g., a user-defined guard
- case object Havoc extends Cond {override def toString = "_|_"}
-
- // we know everything! everything!
- // this either means the case is unreachable,
- // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives
- // case object Bottom extends Cond
-
+ case object TrueCond extends Cond {override def toString = "T"}
+ case object FalseCond extends Cond {override def toString = "F"}
case class AndCond(a: Cond, b: Cond) extends Cond {override def toString = a +"/\\"+ b}
- case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
+ case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
object EqualityCond {
private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
@@ -1385,12 +1377,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
}
class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
-
override def toString = testedPath +" == "+ rhs +"#"+ id
}
@@ -1409,11 +1395,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
}
class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
override def toString = testedPath +" : "+ pt +"#"+ id
}
@@ -1436,9 +1417,27 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => false
})
+ object IrrefutableExtractorTreeMaker {
+ // will an extractor with unapply method of methodtype `tp` always succeed?
+ // note: this assumes the other side-conditions implied by the extractor are met
+ // (argument of the right type, length check succeeds for unapplySeq,...)
+ def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
+ case TypeRef(_, SomeClass, _) => true
+ // probably not useful since this type won't be inferred nor can it be written down (yet)
+ case ConstantType(Constant(true)) => true
+ case _ => false
+ }
+
+ def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol, Substitution)] = xtm match {
+ case ExtractorTreeMaker(extractor, None, nextBinder, subst) if irrefutableExtractorType(extractor.tpe) =>
+ Some(extractor, nextBinder, subst)
+ case _ =>
+ None
+ }
+ }
// returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
- abstract class TreeMakersToConds(val root: Symbol) {
+ class TreeMakersToConds(val root: Symbol) {
def discard() = {
pointsToBound.clear()
trees.clear()
@@ -1473,20 +1472,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// reverse substitution that would otherwise replace a variable we already encountered by a new variable
// NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
- // patmatDebug("normalize: "+ normalize)
+ // patmatDebug ("normalize subst: "+ normalize)
val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
- // patmatDebug("pointsToBound: "+ pointsToBound)
+ // patmatDebug ("pointsToBound: "+ pointsToBound)
accumSubst >>= okSubst
- // patmatDebug("accumSubst: "+ accumSubst)
+ // patmatDebug ("accumSubst: "+ accumSubst)
}
// hashconsing trees (modulo value-equality)
def unique(t: Tree, tpOverride: Type = NoType): Tree =
trees find (a => a.correspondsStructure(t)(sameValue)) match {
- case Some(orig) => orig // patmatDebug("unique: "+ (t eq orig, orig));
+ case Some(orig) =>
+ // patmatDebug("unique: "+ (t eq orig, orig))
+ orig
case _ =>
trees += t
if (tpOverride != NoType) t setType tpOverride
@@ -1506,27 +1507,20 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
final def binderToUniqueTree(b: Symbol) =
unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
- @inline def /\(conds: Iterable[Cond]) = if (conds.isEmpty) Top else conds.reduceLeft(AndCond(_, _))
- @inline def \/(conds: Iterable[Cond]) = if (conds.isEmpty) Havoc else conds.reduceLeft(OrCond(_, _))
+ @inline def /\(conds: Iterable[Cond]) = if (conds.isEmpty) TrueCond else conds.reduceLeft(AndCond(_, _))
+ @inline def \/(conds: Iterable[Cond]) = if (conds.isEmpty) FalseCond else conds.reduceLeft(OrCond(_, _))
// note that the sequencing of operations is important: must visit in same order as match execution
// binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
- final protected def treeMakerToCond(tm: TreeMaker, condMaker: CondMaker): Cond = {
- updateSubstitution(tm.substitution)
- condMaker(tm)(treeMakerToCond(_, condMaker))
- }
+ final def treeMakerToCond(tm: TreeMaker, handleUnknown: TreeMaker => Cond, updateSubst: Boolean, rewriteNil: Boolean = false): Cond = {
+ if (updateSubst) updateSubstitution(tm.substitution)
- final protected def treeMakerToCondNoSubst(tm: TreeMaker, condMaker: CondMaker): Cond =
- condMaker(tm)(treeMakerToCondNoSubst(_, condMaker))
-
- type CondMaker = TreeMaker => (TreeMaker => Cond) => Cond
- final def makeCond(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = {
tm match {
case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
type Result = Cond
def and(a: Result, b: Result) = AndCond(a, b)
- def outerTest(testedBinder: Symbol, expectedTp: Type) = Top // TODO OuterEqCond(testedBinder, expectedType)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = TrueCond // TODO OuterEqCond(testedBinder, expectedType)
def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
val p = binderToUniqueTree(b); AndCond(NonNullCond(p), TypeCond(p, uniqueTp(pt)))
}
@@ -1536,34 +1530,56 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
- case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map recurse)))
+ case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map (treeMakerToCond(_, handleUnknown, updateSubst)))))
case ProductExtractorTreeMaker(testedBinder, None, subst) => NonNullCond(binderToUniqueTree(testedBinder))
- case ExtractorTreeMaker(_, _, _, _)
- | GuardTreeMaker(_)
- | ProductExtractorTreeMaker(_, Some(_), _)
- | BodyTreeMaker(_, _) => Havoc
- case SubstOnlyTreeMaker(_, _) => Top
+ case IrrefutableExtractorTreeMaker(_, _, _) =>
+ // the extra condition is None, the extractor's result indicates it always succeeds,
+ // and the potential type-test for the argument is represented by a separate TypeTestTreeMaker
+ TrueCond
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => TrueCond
+ case ConstantType(Constant(false)) => FalseCond
+ case _ => handleUnknown(tm)
+ }
+ case p @ ExtractorTreeMaker(extractor, Some(lenCheck), testedBinder, _) =>
+ p.checkedLength match {
+ // special-case: interpret pattern `List()` as `Nil`
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
+ case Some(0) if rewriteNil && testedBinder.tpe.typeSymbol == ListClass => // extractor.symbol.owner == SeqFactory
+ EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ case _ => handleUnknown(tm)
+ }
+ case SubstOnlyTreeMaker(_, _) => TrueCond
+ case ProductExtractorTreeMaker(_, Some(_), _) |
+ ExtractorTreeMaker(_, _, _, _) | BodyTreeMaker(_, _) => handleUnknown(tm)
}
}
- final def approximateMatch(cases: List[List[TreeMaker]], condMaker: CondMaker = makeCond): List[List[Test]] = cases.map { _ map (tm => Test(treeMakerToCond(tm, condMaker), tm)) }
+ val constFalse = (_: TreeMaker) => FalseCond
+ val constTrue = (_: TreeMaker) => TrueCond
- final def approximateMatchAgain(cases: List[List[TreeMaker]], condMaker: CondMaker = makeCond): List[List[Test]] = cases.map { _ map (tm => Test(treeMakerToCondNoSubst(tm, condMaker), tm)) }
+ final def approximateMatch(cases: List[List[TreeMaker]], handleUnknown: TreeMaker => Cond = constFalse, rewriteNil: Boolean = false): List[List[Test]] =
+ cases.map { _ map (tm => Test(treeMakerToCond(tm, handleUnknown, updateSubst = true, rewriteNil), tm)) }
+
+ final def approximateMatchAgain(cases: List[List[TreeMaker]], handleUnknown: TreeMaker => Cond = constFalse, rewriteNil: Boolean = false): List[List[Test]] =
+ cases.map { _ map (tm => Test(treeMakerToCond(tm, handleUnknown, updateSubst = false, rewriteNil), tm)) }
}
+
def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
object approximator extends TreeMakersToConds(root)
approximator.approximateMatch(cases)
}
def showTreeMakers(cases: List[List[TreeMaker]]) = {
- patmatDebug("treeMakers:")
- patmatDebug(alignAcrossRows(cases, ">>"))
+ // patmatDebug ("treeMakers:")
+ // patmatDebug (alignAcrossRows(cases, ">>"))
}
def showTests(testss: List[List[Test]]) = {
- patmatDebug("tests: ")
- patmatDebug(alignAcrossRows(testss, "&"))
+ // patmatDebug ("tests: ")
+ // patmatDebug (alignAcrossRows(testss, "&"))
}
}
@@ -1599,24 +1615,47 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class Eq(p: Var, q: Const) extends Prop
type Const <: AbsConst
-
trait AbsConst {
+ // when we know V = C, which other equalities must hold
+ // in general, equality to some type implies equality to its supertypes
+ // (this multi-valued kind of equality is necessary for unreachability)
+ // note that we use subtyping as a model for implication between instanceof tests
+ // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ // unfortunately this is not true in general (see e.g. SI-6022)
def implies(other: Const): Boolean
+
+ // does V = C preclude V having value `other`? V = null is an exclusive assignment,
+ // but V = 1 does not preclude V = Int, or V = Any
def excludes(other: Const): Boolean
}
+ type TypeConst <: Const
+ def TypeConst: TypeConstExtractor
+ trait TypeConstExtractor {
+ def apply(tp: Type): Const
+ }
+
+ def NullConst: Const
+
type Var <: AbsVar
trait AbsVar {
// indicate we may later require a prop for V = C
def registerEquality(c: Const): Unit
- // indicates null is part of the domain
- def considerNull: Unit
+ // call this to indicate null is part of the domain
+ def registerNull: Unit
- // compute the domain and return it (call considerNull first!)
+ // can this variable be null?
+ def mayBeNull: Boolean
+
+ // compute the domain and return it (call registerNull first!)
def domainSyms: Option[Set[Sym]]
+ // the symbol for this variable being equal to its statically known type
+ // (only available if registerEquality has been called for that type before)
+ def symForStaticTp: Option[Sym]
+
// for this var, call it V, turn V = C into the equivalent proposition in boolean logic
// registerEquality(c) must have been called prior to this call
// in fact, all equalities relevant to this variable must have been registered
@@ -1694,7 +1733,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
//
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
- def removeVarEq(props: List[Prop], considerNull: Boolean = false): (Prop, List[Prop]) = {
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
val start = Statistics.startTimer(patmatAnaVarEq)
val vars = new collection.mutable.HashSet[Var]
@@ -1716,7 +1755,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
props foreach gatherEqualities.apply
- if (considerNull) vars foreach (_.considerNull)
+ if (modelNull) vars foreach (_.registerNull)
val pure = props map rewriteEqualsToProp.apply
@@ -1732,7 +1771,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
override def hashCode = a.hashCode ^ b.hashCode
}
- // patmatDebug("vars: "+ vars)
+ // patmatDebug ("removeVarEq vars: "+ vars)
vars.foreach { v =>
val excludedPair = new collection.mutable.HashSet[ExcludedPair]
@@ -1743,15 +1782,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// coverage is formulated as: A \/ B \/ C and the implications are
v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
+ // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
+ // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
+ v.symForStaticTp foreach { symForStaticTp =>
+ if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
+ else addAxiom(symForStaticTp)
+ }
+
val syms = v.equalitySyms
- // patmatDebug("eqSyms "+(v, syms))
+ // patmatDebug ("eqSyms "+(v, syms))
syms foreach { sym =>
// if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
// (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
val todo = syms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
val (excluded, notExcluded) = todo partition (b => sym.const.excludes(b.const))
val implied = notExcluded filter (b => sym.const.implies(b.const))
- // patmatDebug("implications: "+ (sym.const, excluded, implied, syms))
+ // patmatDebug ("eq axioms for: "+ sym.const)
+ // patmatDebug ("excluded: "+ excluded)
+ // patmatDebug ("implied: "+ implied)
// when this symbol is true, what must hold...
implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
@@ -1764,8 +1812,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- // patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
- // patmatDebug("pure:\n"+ cnfString(eqFreePropToSolvable(pure)))
+ // patmatDebug ("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
+ // patmatDebug ("pure:"+ pure.map(p => cnfString(eqFreePropToSolvable(p))).mkString("\n"))
Statistics.stopTimer(patmatAnaVarEq, start)
@@ -1780,7 +1828,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// may throw an CNFBudgetExceeded
def propToSolvable(p: Prop) = {
- val (eqAxioms, pure :: Nil) = removeVarEq(List(p), considerNull = false)
+ val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
eqFreePropToSolvable(And(eqAxioms, pure))
}
@@ -1907,12 +1955,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
if (recursionDepthAllowed == 0) models
else {
- // patmatDebug("solving\n"+ cnfString(f))
+ // patmatDebug ("find all models for\n"+ cnfString(f))
val model = findModelFor(f)
// if we found a solution, conjunct the formula with the model's negation and recurse
if (model ne NoModel) {
val unassigned = (vars -- model.keySet).toList
- // patmatDebug("unassigned "+ unassigned +" in "+ model)
+ // patmatDebug ("unassigned "+ unassigned +" in "+ model)
def force(lit: Lit) = {
val model = withLit(findModelFor(dropUnit(f, lit)), lit)
if (model ne NoModel) List(model)
@@ -1921,7 +1969,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val forced = unassigned flatMap { s =>
force(Lit(s, true)) ++ force(Lit(s, false))
}
- // patmatDebug("forced "+ forced)
+ // patmatDebug ("forced "+ forced)
val negated = negateModel(model)
findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
}
@@ -1944,7 +1992,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def findModelFor(f: Formula): Model = {
@inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
- // patmatDebug("dpll\n"+ cnfString(f))
+ // patmatDebug ("DPLL\n"+ cnfString(f))
val start = Statistics.startTimer(patmatAnaDPLL)
@@ -2006,7 +2054,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private val uniques = new collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
- class Var(val path: Tree, fullTp: Type, checked: Boolean = true) extends AbsVar {
+ class Var(val path: Tree, staticTp: Type) extends AbsVar {
private[this] val id: Int = Var.nextId
// private[this] var canModify: Option[Array[StackTraceElement]] = None
@@ -2018,40 +2066,40 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym]
// when looking at the domain, we only care about types we can check at run time
- val domainTp: Type = checkableType(fullTp)
+ val staticTpCheckable: Type = checkableType(staticTp)
- private[this] var _considerNull = false
- def considerNull: Unit = { ensureCanModify; if (NullTp <:< domainTp) _considerNull = true }
+ private[this] var _mayBeNull = false
+ def registerNull: Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
// case Some(List(tps: _*)) => domain is exactly tps
// we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
- lazy val domain: Option[Set[Const]] =
- if (!checked) None
- else {
- val subConsts = enumerateSubtypes(fullTp).map{ tps =>
- tps.toSet[Type].map{ tp =>
- val domainC = TypeConst(tp)
- registerEquality(domainC)
- domainC
- }
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
}
+ }
- val allConsts =
- if (! _considerNull) subConsts
- else {
- registerEquality(NullConst)
- subConsts map (_ + NullConst)
- }
+ val allConsts =
+ if (mayBeNull) {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ } else
+ subConsts
- observed; allConsts
- }
+ observed; allConsts
+ }
- // accessing after calling considerNull will result in inconsistencies
+ // accessing after calling registerNull will result in inconsistencies
lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+ lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
@@ -2065,8 +2113,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
- // don't call until all equalities have been registered and considerNull has been called (if needed)
- def describe = toString + ": " + fullTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
+ // don't call until all equalities have been registered and registerNull has been called (if needed)
+ def describe = toString + ": " + staticTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
override def toString = "V"+ id
}
@@ -2075,7 +2123,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
// equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
object Const {
- def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear() } // patmatDebug("RESET")
+ def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
private var _nextTypeId = 0
def nextTypeId = {_nextTypeId += 1; _nextTypeId}
@@ -2087,9 +2135,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
- case Some((_, c)) => c
+ case Some((_, c)) =>
+ // patmatDebug ("unique const: "+ (tp, c))
+ c
case _ =>
val fresh = mkFresh
+ // patmatDebug ("uniqued const: "+ (tp, fresh))
uniques(tp) = fresh
fresh
})
@@ -2105,28 +2156,47 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (!t.symbol.isStable) t.tpe.narrow
else trees find (a => a.correspondsStructure(t)(sameValue)) match {
case Some(orig) =>
- // patmatDebug("unique: "+ (orig, orig.tpe))
+ // patmatDebug ("unique tp for tree: "+ (orig, orig.tpe))
orig.tpe
case _ =>
// duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
- // patmatDebug("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ // patmatDebug ("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
trees += treeWithNarrowedType
treeWithNarrowedType.tpe
}
}
sealed abstract class Const extends AbsConst {
- protected def tp: Type
+ def tp: Type
protected def wideTp: Type
def isAny = wideTp.typeSymbol == AnyClass
+ // we use subtyping as a model for implication between instanceof tests
+ // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ // unfortunately this is not true in general:
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ private def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
+ val tpValue = tp.typeSymbol.isPrimitiveValueClass
+
+ // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
+ // (and the subtype is respectively a value type or not a value type)
+ // this allows us to reuse subtyping as a model for implication between instanceOf tests
+ // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
+ val tpImpliedNormalizedToAny =
+ if ((tpValue && tpImplied =:= AnyValClass.tpe) ||
+ (!tpValue && tpImplied =:= AnyRefClass.tpe)) AnyClass.tpe
+ else tpImplied
+
+ tp <:< tpImpliedNormalizedToAny
+ }
+
final def implies(other: Const): Boolean = {
val r = (this, other) match {
case (_: ValueConst, _: ValueConst) => this == other // hashconsed
- case (_: ValueConst, _: TypeConst) => tp <:< other.tp
- case (_: TypeConst, _) => tp <:< other.tp
+ case (_: ValueConst, _: TypeConst) => instanceOfTpImplies(tp, other.tp)
+ case (_: TypeConst, _) => instanceOfTpImplies(tp, other.tp)
case _ => false
}
// if(r) patmatDebug("implies : "+(this, other))
@@ -2143,12 +2213,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// this causes false negative for unreachability, but that's ok:
// example: val X = 1; val Y = 1; (2: Int) match { case X => case Y => /* considered reachable */ }
case (_: ValueConst, _: ValueConst) => this != other
- case (_: ValueConst, _: TypeConst) => !((tp <:< other.tp) || (other.tp <:< wideTp))
- case (_: TypeConst, _: ValueConst) => !((other.tp <:< tp) || (tp <:< other.wideTp))
- case (_: TypeConst, _: TypeConst) => !((tp <:< other.tp) || (other.tp <:< tp))
+ case (_: ValueConst, _: TypeConst) => !(instanceOfTpImplies(tp, other.tp) || instanceOfTpImplies(other.tp, wideTp))
+ case (_: TypeConst, _: ValueConst) => !(instanceOfTpImplies(other.tp, tp) || instanceOfTpImplies(tp, other.wideTp))
+ case (_: TypeConst, _: TypeConst) => !(instanceOfTpImplies(tp, other.tp) || instanceOfTpImplies(other.tp, tp))
case _ => false
}
- // if(r) patmatDebug("excludes : "+(this, other))
+ // if(r) patmatDebug("excludes : "+(this, this.tp, other, other.tp))
// else patmatDebug("NOT excludes: "+(this, other))
r
}
@@ -2157,8 +2227,23 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// the equals inherited from AnyRef does just this
}
+ // find most precise super-type of tp that is a class
+ // we skip non-class types (singleton types, abstract types) so that we can
+ // correctly compute how types relate in terms of the values they rule out
+ // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
+ // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
+ // (At least conceptually: `true` is an instance of class `Boolean`)
+ private def widenToClass(tp: Type) = {
+ // getOrElse to err on the safe side -- all BTS should end in Any, right?
+ val wideTp = tp.widen
+ val clsTp =
+ if (wideTp.typeSymbol.isClass) wideTp
+ else wideTp.baseTypeSeq.toList.find(_.typeSymbol.isClass).getOrElse(AnyClass.tpe)
+ // patmatDebug("Widening to class: "+ (tp, clsTp, tp.widen, tp.widen.baseTypeSeq, tp.widen.baseTypeSeq.toList.find(_.typeSymbol.isClass)))
+ clsTp
+ }
- object TypeConst {
+ object TypeConst extends TypeConstExtractor {
def apply(tp: Type) = {
if (tp =:= NullTp) NullConst
else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
@@ -2172,7 +2257,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
assert(!(tp =:= NullTp))
private[this] val id: Int = Const.nextTypeId
- val wideTp = tp.widen
+ val wideTp = widenToClass(tp)
override def toString = tp.toString //+"#"+ id
}
@@ -2191,10 +2276,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val tp = p.tpe.normalize
if (tp =:= NullTp) NullConst
else {
- val wideTp = {
- if (p.hasSymbol && p.symbol.isStable) tp.asSeenFrom(tp.prefix, p.symbol.owner).widen
- else tp.widen
- }
+ val wideTp = widenToClass(tp)
val narrowTp =
if (tp.isInstanceOf[SingletonType]) tp
@@ -2207,7 +2289,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
singleType(tp.prefix, p.symbol)
case _ =>
- // patmatDebug("unique type for "+(p, Const.uniqueTpForTree(p)))
Const.uniqueTpForTree(p)
}
@@ -2227,7 +2308,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
lazy val NullTp = ConstantType(Constant(null))
case object NullConst extends Const {
- protected def tp = NullTp
+ def tp = NullTp
protected def wideTp = NullTp
def isValue = true
@@ -2241,8 +2322,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def symbolic(t: Cond): Prop = t match {
case AndCond(a, b) => And(symbolic(a), symbolic(b))
case OrCond(a, b) => Or(symbolic(a), symbolic(b))
- case Top => True
- case Havoc => False
+ case TrueCond => True
+ case FalseCond => False
case TypeCond(p, pt) => Eq(Var(p), TypeConst(checkableType(pt)))
case EqualityCond(p, q) => Eq(Var(p), ValueConst(q))
case NonNullCond(p) => if (!modelNull) True else Not(Eq(Var(p), NullConst))
@@ -2266,46 +2347,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// thus, the case is unreachable if there is no model for -(-P /\ C),
// or, equivalently, P \/ -C, or C => P
def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
- // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
- // when approximating the current case (which we hope is reachable), be optimistic about the unknowns
- object reachabilityApproximation extends TreeMakersToConds(prevBinder) {
- def makeCondOptimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
- // for unreachability, let's assume a guard always matches (unless we statically determined otherwise)
- // otherwise, a guarded case would be considered unreachable
- case GuardTreeMaker(guard) =>
- guard.tpe match {
- case ConstantType(Constant(false)) => Havoc // not the best name; however, symbolically, 'Havoc' becomes 'False'
- case _ => Top
- }
- // similar to a guard, user-defined extractors should not cause us to freak out
- // if we're not 100% sure it does not match (i.e., its result type is None or Constant(false) -- TODO),
- // let's stay optimistic and assume it does
- case ExtractorTreeMaker(_, _, _, _)
- | ProductExtractorTreeMaker(_, Some(_), _) => Top
- // TODO: consider length-checks
- case _ =>
- makeCond(tm)(recurse)
- }
-
- // be pessimistic when approximating the prefix of the current case
- // we hope the prefix fails so that we might get to the current case, which we hope is not dead
- def makeCondPessimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = makeCond(tm)(recurse)
- }
-
val start = Statistics.startTimer(patmatAnaReach)
// use the same approximator so we share variables,
// but need different conditions depending on whether we're conservatively looking for failure or success
- val testCasesOk = reachabilityApproximation.approximateMatch(cases, reachabilityApproximation.makeCondOptimistic)
- val testCasesFail = reachabilityApproximation.approximateMatchAgain(cases, reachabilityApproximation.makeCondPessimistic)
+ val reachabilityApproximation = new TreeMakersToConds(prevBinder)
+ val testCasesOk = reachabilityApproximation.approximateMatch(cases, reachabilityApproximation.constTrue)
+ val testCasesFail = reachabilityApproximation.approximateMatchAgain(cases, reachabilityApproximation.constFalse)
reachabilityApproximation.discard()
prepareNewAnalysis()
val propsCasesOk = testCasesOk map (t => symbolicCase(t, modelNull = true))
val propsCasesFail = testCasesFail map (t => Not(symbolicCase(t, modelNull = true)))
- val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, considerNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, considerNull = true)
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
try {
// most of the time eqAxiomsFail == eqAxiomsOk, but the different approximations might cause different variables to disapper in general
@@ -2319,8 +2375,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var reachable = true
var caseIndex = 0
- // patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables) map (_.describe) mkString ("\n")))
- // patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsCNF))
+ // patmatDebug ("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables) map (_.describe) mkString ("\n")))
+ // patmatDebug ("equality axioms:\n"+ cnfString(eqAxiomsCNF))
// invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
// termination: prefixRest.length decreases by 1
@@ -2354,16 +2410,20 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// exhaustivity
- // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- // TODO: domain of feasibly enumerable built-in types (enums, char?)
+ // TODO: domain of other feasibly enumerable built-in types (char?)
def enumerateSubtypes(tp: Type): Option[List[Type]] =
tp.typeSymbol match {
+ // TODO case _ if tp.isTupleType => // recurse into component types?
+ case UnitClass =>
+ Some(List(UnitClass.tpe))
case BooleanClass =>
- // patmatDebug("enum bool "+ tp)
Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
// TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- // patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ // patmatDebug ("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
None
case sym =>
val subclasses = (
@@ -2371,7 +2431,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- // patmatDebug("subclasses "+ (sym, subclasses))
+ // patmatDebug ("enum sealed -- subclasses: "+ (sym, subclasses))
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
@@ -2387,7 +2447,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
})
- // patmatDebug("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ // patmatDebug ("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
Some(validSubTypes)
}
@@ -2407,7 +2467,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
val res = toCheckable(tp)
- // patmatDebug("checkable "+(tp, res))
+ // patmatDebug ("checkable "+(tp, res))
res
}
@@ -2431,37 +2491,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// - there are extractor calls (that we can't secretly/soundly) rewrite
val start = Statistics.startTimer(patmatAnaExhaust)
var backoff = false
- object exhaustivityApproximation extends TreeMakersToConds(prevBinder) {
- def makeCondExhaustivity(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
- case p @ ExtractorTreeMaker(extractor, Some(lenCheck), testedBinder, _) =>
- p.checkedLength match {
- // pattern: `List()` (interpret as `Nil`)
- // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil
- case Some(0) if testedBinder.tpe.typeSymbol == ListClass => // extractor.symbol.owner == SeqFactory
- EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
- case _ =>
- backoff = true
- makeCond(tm)(recurse)
- }
- case ExtractorTreeMaker(_, _, _, _) =>
-// patmatDebug("backing off due to "+ tm)
- backoff = true
- makeCond(tm)(recurse)
- case GuardTreeMaker(guard) =>
- guard.tpe match {
- case ConstantType(Constant(true)) => Top
- case ConstantType(Constant(false)) => Havoc
- case _ =>
-// patmatDebug("can't statically interpret guard: "+(guard, guard.tpe))
- backoff = true
- Havoc
- }
- case _ =>
- makeCond(tm)(recurse)
- }
- }
- val tests = exhaustivityApproximation.approximateMatch(cases, exhaustivityApproximation.makeCondExhaustivity)
+ val exhaustivityApproximation = new TreeMakersToConds(prevBinder)
+ val tests = exhaustivityApproximation.approximateMatch(cases, {
+ case BodyTreeMaker(_, _) => TrueCond // will be discarded by symbolCase later
+ case tm =>
+ // patmatDebug("backing off due to "+ tm)
+ backoff = true
+ FalseCond
+ }, rewriteNil = true)
if (backoff) Nil else {
val prevBinderTree = exhaustivityApproximation.binderToUniqueTree(prevBinder)
@@ -2484,15 +2522,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// when does the match fail?
val matchFails = Not(\/(symbolicCases))
+ val vars = gatherVariables(matchFails)
// debug output:
- // patmatDebug("analysing:")
- // showTreeMakers(cases)
- // showTests(tests)
- //
- // val vars = gatherVariables(matchFails)
+ // patmatDebug ("analysing:")
+ showTreeMakers(cases)
+ showTests(tests)
+
// patmatDebug("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
- //
// patmatDebug("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
try {
@@ -2508,7 +2545,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
pruned
} catch {
case e : CNFBudgetExceeded =>
- // patmatDebug(util.Position.formatMessage(prevBinder.pos, "Cannot check match for exhaustivity", false))
+ // patmatDebug (util.Position.formatMessage(prevBinder.pos, "Cannot check match for exhaustivity", false))
// e.printStackTrace()
Nil // CNF budget exceeded
}
@@ -2529,13 +2566,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
- case class NegativeExample(nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
// require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
override def toString = {
val negation =
if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
- else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("in (", ", ", ")")
- "<not "+ negation +">"
+ else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
+ "(x: "+ eqTo +" forSome x not in "+ negation +")"
}
}
case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
@@ -2581,7 +2618,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
- v +"(="+ v.path +": "+ v.domainTp +") "+ assignment
+ v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
}.mkString("\n")
def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model))
@@ -2598,13 +2635,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// ...
val varAssignment = modelToVarAssignment(model)
- // patmatDebug("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+ // patmatDebug ("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
// chop a path into a list of symbols
def chop(path: Tree): List[Symbol] = path match {
case Ident(_) => List(path.symbol)
case Select(pre, name) => chop(pre) :+ path.symbol
- case _ => // patmatDebug("don't know how to chop "+ path)
+ case _ =>
+ // patmatDebug("don't know how to chop "+ path)
Nil
}
@@ -2645,8 +2683,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// node in the tree that describes how to construct a counter-example
case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: collection.mutable.Map[Symbol, VariableAssignment]) {
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
- private lazy val prunedEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && (better implies subsumed)))
- private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.domainTp }).typeSymbol.primaryConstructor
+ private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && (better implies subsumed)))
+ private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
+ private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
@@ -2663,7 +2702,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def toCounterExample(beBrief: Boolean = false): CounterExample =
if (!allFieldAssignmentsLegal) NoExample
else {
- // patmatDebug("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ // patmatDebug ("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
val res = prunedEqualTo match {
// a definite assignment to a value
case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
@@ -2671,9 +2710,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// constructor call
// or we did not gather any information about equality but we have information about the fields
// --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
- case _ if cls != NoSymbol &&
- ( prunedEqualTo.nonEmpty
- || (fields.nonEmpty && !isPrimitiveValueClass(cls) && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
+ case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
+ ( uniqueEqualTo.nonEmpty
+ || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
@inline def args(brevity: Boolean = beBrief) = {
// figure out the constructor arguments from the field assignment
@@ -2694,13 +2733,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// negative information
case Nil if nonTrivialNonEqualTo.nonEmpty =>
// negation tends to get pretty verbose
- if (beBrief) WildcardExample else NegativeExample(nonTrivialNonEqualTo)
+ if (beBrief) WildcardExample
+ else {
+ val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
+ NegativeExample(eqTo, nonTrivialNonEqualTo)
+ }
// not a valid counter-example, possibly since we have a definite type but there was a field mismatch
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
case _ => NoExample
}
- // patmatDebug("described as: "+ res)
+ // patmatDebug ("described as: "+ res)
res
}
@@ -2735,16 +2778,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val cond = test.cond
def simplify(c: Cond): Set[Cond] = c match {
- case AndCond(a, b) => simplify(a) ++ simplify(b)
- case OrCond(_, _) => Set(Havoc) // TODO: supremum?
- case NonNullCond(_) => Set(Top) // not worth remembering
- case _ => Set(c)
+ case AndCond(a, b) => simplify(a) ++ simplify(b)
+ case OrCond(_, _) => Set(FalseCond) // TODO: make more precise
+ case NonNullCond(_) => Set(TrueCond) // not worth remembering
+ case _ => Set(c)
}
val conds = simplify(cond)
- if (conds(Havoc)) false // stop when we encounter a havoc
+ if (conds(FalseCond)) false // stop when we encounter a definite "no" or a "not sure"
else {
- val nonTrivial = conds filterNot (_ == Top)
+ val nonTrivial = conds filterNot (_ == TrueCond)
if (nonTrivial nonEmpty) {
tested ++= nonTrivial
@@ -2753,7 +2796,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case (priorTest, deps) =>
((simplify(priorTest.cond) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
(nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
- ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
+ ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
} foreach {
case (priorTest, _) =>
// if so, note the dependency in both tests
@@ -2771,7 +2814,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
tested.clear()
tests dropWhile storeDependencies
}
- // patmatDebug("dependencies: "+ dependencies)
+ // patmatDebug ("dependencies: "+ dependencies)
// find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
// then, collapse these contiguous sequences of reusing tests
@@ -2789,7 +2832,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// if there's no sharing, simply map to the tree makers corresponding to the tests
var currDeps = Set[Cond]()
val (sharedPrefix, suffix) = tests span { test =>
- (test.cond eq Top) || (for(
+ (test.cond == TrueCond) || (for(
reusedTest <- test.reuses;
nextDeps <- dependencies.get(reusedTest);
diff <- (nextDeps -- currDeps).headOption;
@@ -2806,23 +2849,23 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
// patmatDebug("sharedPrefix: "+ sharedPrefix)
- // if the shared prefix contains interesting conditions (!= Top)
+ // if the shared prefix contains interesting conditions (!= TrueCond)
// and the last of such interesting shared conditions reuses another treemaker's test
// replace the whole sharedPrefix by a ReusingCondTreeMaker
- for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond == TrueCond).headOption;
lastReused <- lastShared.reuses)
yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
}
- collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains TrueCond-tests, which are dropped above)
}
okToCall = true // TODO: remove (debugging)
// replace original treemakers that are reused (as determined when computing collapsed),
// by ReusedCondTreeMakers
val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
-// patmatDebug("after CSE:")
-// showTreeMakers(reusedMakers)
+ // patmatDebug ("after CSE:")
+ showTreeMakers(reusedMakers)
reusedMakers
}
@@ -2905,55 +2948,197 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def alternativesSupported: Boolean
+ // when collapsing guarded switch cases we may sometimes need to jump to the default case
+ // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
+ // TODO: make more fine-grained, as we don't always need to jump
+ def canJump: Boolean
+
+ def unchecked: Boolean
+
+
def isDefault(x: CaseDef): Boolean
def defaultSym: Symbol
def defaultBody: Tree
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
if (xs exists (_.isEmpty)) None else Some(xs.flatten)
+ object GuardAndBodyTreeMakers {
+ def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
+ tms match {
+ case (btm@BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
+ case (gtm@GuardTreeMaker(guard)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
+ case _ => None
+ }
+ }
+ }
+
+ private val defaultLabel: Symbol = NoSymbol.newLabel(freshName("default"), NoPosition) setFlag SYNTH_CASE
+
+ /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
+ *
+ * `{case C if(G_i) => B_i | case C'_j if G'_j => B'_j}*` is rewritten to
+ * `case C => {if(G_i) B_i}*` ++ rewrite({case C'_j if G'_j => B'_j}*)
+ *
+ */
+ private def collapseGuardedCases(cases: List[CaseDef]) = {
+ // requires(switchesOnSameConst.forall(caseChecksSameConst(switchesOnSameConst.head)))
+ def collapse(switchesOnSameConst: List[CaseDef]): List[CaseDef] =
+ if (switchesOnSameConst.tail.isEmpty && (switchesOnSameConst.head.guard == EmptyTree)) switchesOnSameConst
+ else {
+ val commonPattern = switchesOnSameConst.head.pat
+
+ // jump to default case (either the user-supplied one or the synthetic one)
+ // unless we're collapsing the default case, then re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
+ val jumpToDefault: Tree =
+ if (!canJump || isDefault(CaseDef(commonPattern, EmptyTree, EmptyTree))) defaultBody
+ else Apply(Ident(defaultLabel), Nil)
+
+ val guardedBody = switchesOnSameConst.foldRight(jumpToDefault){
+ // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
+ // --> replace jumpToDefault by the un-guarded case's body
+ case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
+ case (CaseDef(_, g, b), els) if g != EmptyTree => If(g, b, els)
+ // error: the un-guarded case did not come last
+ case _ =>
+ return switchesOnSameConst
+ }
+
+ // if the cases that we're going to collapse bind variables,
+ // must replace them by the single binder introduced by the collapsed case
+ val binders = switchesOnSameConst.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
+ val (pat, guardedBodySubst) =
+ if (binders.isEmpty) (commonPattern, guardedBody)
+ else {
+ // create a single fresh binder to subsume the old binders (and their types)
+ // TODO: I don't think the binder's types can actually be different (due to checks in caseChecksSameConst)
+ // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
+ val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)))
+
+ // the patterns in switchesOnSameConst are equal (according to caseChecksSameConst) and modulo variable-binding
+ // we can thus safely pick the first one arbitrarily, provided we correct binding
+ val origPatWithoutBind = commonPattern match {
+ case Bind(b, orig) => orig
+ case o => o
+ }
+ // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
+ val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
+ (Bind(binder, origPatWithoutBind), unifiedBody)
+ }
+
+ List(CaseDef(pat, EmptyTree, guardedBodySubst))
+ }
+
+ @annotation.tailrec def partitionAndCollapse(cases: List[CaseDef], accum: List[CaseDef] = Nil): List[CaseDef] =
+ if (cases.isEmpty) accum
+ else {
+ val (same, others) = cases.tail partition (caseChecksSameConst(cases.head))
+ partitionAndCollapse(others, accum ++ collapse(cases.head :: same))
+ }
+
+ // common case: no rewrite needed when there are no guards
+ if (cases.forall(c => c.guard == EmptyTree)) cases
+ else partitionAndCollapse(cases)
+ }
+
+ private def caseChecksSameConst(x: CaseDef)(y: CaseDef) = (x, y) match {
+ // regular switch
+ case (CaseDef(Literal(Constant(cx)), _, _), CaseDef(Literal(Constant(cy)), _, _)) => cx == cy
+ case (CaseDef(Ident(nme.WILDCARD), _, _), CaseDef(Ident(nme.WILDCARD), _, _)) => true
+ // type-switch for catch
+ case (CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpX)), _, _), CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpY)), _, _)) => tpX.tpe =:= tpY.tpe
+ case _ => false
+ }
+
+ private def checkNoGuards(cs: List[CaseDef]) =
+ if (cs.exists{case CaseDef(_, g, _) => g != EmptyTree case _ => false}) None
+ else Some(cs)
+
+ // requires(cs.forall(_.guard == EmptyTree))
+ private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
+ var cases = cs
+ var unreachable: Option[CaseDef] = None
+
+ while (cases.nonEmpty && unreachable.isEmpty) {
+ if (isDefault(cases.head) && cases.tail.nonEmpty) unreachable = Some(cases.tail.head)
+ else unreachable = cases.tail.find(caseChecksSameConst(cases.head))
+
+ cases = cases.tail
+ }
+
+ unreachable
+ }
+
// empty list ==> failure
def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] = {
val caseDefs = cases map { case (scrutSym, makers) =>
makers match {
// default case
- case (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(defaultCase(scrutSym, btm.substitution(body)))
+ case GuardAndBodyTreeMakers(guard, body) =>
+ Some(defaultCase(scrutSym, guard, body))
// constant (or typetest for typeSwitch)
- case SwitchableTreeMaker(pattern) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
- Some(CaseDef(pattern, EmptyTree, btm.substitution(body)))
+ case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
+ Some(CaseDef(pattern, guard, body))
// alternatives
- case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil if alternativesSupported =>
- val casePatterns = altss map {
+ case AlternativesTreeMaker(_, altss, _) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
+ val switchableAlts = altss map {
case SwitchableTreeMaker(pattern) :: Nil =>
Some(pattern)
case _ =>
None
}
- sequence(casePatterns) map { patterns =>
- val substedBody = btm.substitution(body)
- CaseDef(Alternative(patterns), EmptyTree, substedBody)
+ // succeed if they were all switchable
+ sequence(switchableAlts) map { switchableAlts =>
+ CaseDef(Alternative(switchableAlts), guard, body)
}
- case _ => // patmatDebug("can't emit switch for "+ makers)
+ case _ =>
+ // patmatDebug("can't emit switch for "+ makers)
None //failure (can't translate pattern to a switch)
}
}
(for(
- caseDefs <- sequence(caseDefs)) yield
- if (caseDefs exists isDefault) caseDefs
- else {
- caseDefs :+ defaultCase()
+ caseDefsWithGuards <- sequence(caseDefs);
+ collapsed = collapseGuardedCases(caseDefsWithGuards);
+ caseDefs <- checkNoGuards(collapsed)) yield {
+ if (!unchecked)
+ unreachableCase(caseDefs) foreach (cd => reportUnreachable(cd.body.pos))
+
+ // if we rewrote, we may need the default label to jump there (but then again, we may not)
+ // TODO: make more precise; we don't need the default label if
+ // - all collapsed cases included an un-guarded case (some of the guards of each case will always be true)
+ // - or: there was no default case (if all the guards of a case fail, it's a matcherror for sure)
+ val needDefaultLabel = (collapsed != caseDefsWithGuards)
+
+ def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
+ if (needDefaultLabel && canJump) deriveCaseDef(cd){ b =>
+ // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
+ defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
+ LabelDef(defaultLabel, Nil, b)
+ } else cd
+
+ (caseDefs partition isDefault) match {
+ case (Nil, caseDefs) => caseDefs :+ wrapInDefaultLabelDef(defaultCase())
+ case (default :: Nil, caseDefs) if canJump || !needDefaultLabel =>
+ // we either didn't collapse (and thus definitely didn't have to emit a jump),
+ // or we canJump (and then the potential jumps in collapsed are ok)
+ caseDefs :+ wrapInDefaultLabelDef(default)
+ case _ => Nil
+ // TODO: if (canJump) error message (but multiple defaults should be caught by unreachability)
+ // if (!canJump) we got ourselves in the situation where we might need to emit a jump when we can't (in exception handler)
+ // --> TODO: refine the condition to detect whether we actually really needed to jump, but this seems relatively rare
}
+ }
) getOrElse Nil
}
}
- class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker {
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
val alternativesSupported = true
+ val canJump = true
object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
@@ -2975,13 +3160,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def defaultSym: Symbol = scrutSym
def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- DEFAULT ==> body
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (DEFAULT IF guard) ==> body
}}
}
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride)
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
// TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
@@ -3001,8 +3186,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// for the catch-cases in a try/catch
private object typeSwitchMaker extends SwitchMaker {
+ val unchecked = false
def switchableTpe(tp: Type) = true
val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+ val canJump = false
// TODO: there are more treemaker-sequences that can be handled by type tests
// analyze the result of approximateTreeMaker rather than the TreeMaker itself
@@ -3024,8 +3211,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
def defaultBody: Tree = Throw(CODE.REF(defaultSym))
- def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) ==> body
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
}}
}
@@ -3069,34 +3256,34 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
matchEnd setInfo MethodType(List(matchRes), restpe)
def newCaseSym = NoSymbol.newLabel(freshName("case"), NoPosition) setInfo MethodType(Nil, restpe) setFlag SYNTH_CASE
- var nextCase = newCaseSym
- def caseDef(mkCase: Casegen => Tree): Tree = {
- val currCase = nextCase
- nextCase = newCaseSym
- val casegen = new OptimizedCasegen(matchEnd, nextCase, restpe)
- LabelDef(currCase, Nil, mkCase(casegen))
+ var _currCase = newCaseSym
+
+ val caseDefs = cases map { (mkCase: Casegen => Tree) =>
+ val currCase = _currCase
+ val nextCase = newCaseSym
+ _currCase = nextCase
+
+ LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase, restpe)))
}
- def catchAll = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
- // must jump to matchEnd, use result generated by matchFailGen (could be `FALSE` for isDefinedAt)
- LabelDef(nextCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
- // don't cast the arg to matchEnd when using PartialFun synth in uncurry, since it won't detect the throw (see gen.withDefaultCase)
- // the cast is necessary when using typedMatchAnonFun-style PartialFun synth:
- // (_asInstanceOf(matchFailGen(scrutRef), restpe))
- } toList
+ // must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+ val catchAllDef = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+
+ LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
+ } toList // at most 1 element
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
// the assumption is once we encounter a case, the remainder of the block will consist of cases
// the prologue may be empty, usually it is the valdef that stores the scrut
// val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
-
- // scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
Block(
- scrutDef ++ (cases map caseDef) ++ catchAll,
+ scrutDef ++ caseDefs ++ catchAllDef,
LabelDef(matchEnd, List(matchRes), REF(matchRes))
)
}
@@ -3166,16 +3353,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) = {
if (!unchecked) {
unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
- typer.context.unit.warning(cases(caseIndex).last.pos, "unreachable code")
+ reportUnreachable(cases(caseIndex).last.pos)
}
- }
- val counterExamples = if (unchecked) Nil else exhaustive(prevBinder, cases, pt)
- if (counterExamples.nonEmpty) {
- val ceString =
- if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
- else "inputs: " + counterExamples.mkString(", ")
-
- typer.context.unit.warning(prevBinder.pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ val counterExamples = exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty)
+ reportMissingCases(prevBinder.pos, counterExamples)
}
val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 119bb0852c..7318538de7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -536,7 +536,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
- !other.isDeferred && other.isJavaDefined && {
+ !other.isDeferred && other.isJavaDefined && !sym.enclClass.isSubClass(other.enclClass) && {
// #3622: erasure operates on uncurried types --
// note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
// !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erreneous of inaccessible type - check whether that's still the case!
@@ -1150,7 +1150,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
nonSensiblyNew()
else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
nonSensiblyNew()
- else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y
+ else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y
if (isEitherNullable)
nonSensible("non-null ", false)
else
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index f01e095856..5465a3b47f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -196,14 +196,14 @@ trait SyntheticMethods extends ast.TreeDSL {
* (this.underlying == that.underlying
*/
def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
- equalsCore(m, List(clazz.firstParamAccessor))
+ equalsCore(m, List(clazz.derivedValueClassUnbox))
}
/** The hashcode method for value classes
* def hashCode(): Int = this.underlying.hashCode
*/
def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
- Select(mkThisSelect(clazz.firstParamAccessor), nme.hashCode_)
+ Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_)
}
/** The _1, _2, etc. methods to implement ProductN.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 69d3fd7f47..5241974793 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -47,7 +47,6 @@ trait Typers extends Modes with Adaptations with Tags {
def resetTyper() {
//println("resetTyper called")
resetContexts()
- resetNamer()
resetImplicits()
transformed.clear()
}
@@ -1380,7 +1379,7 @@ trait Typers extends Modes with Adaptations with Tags {
for (stat <- body)
if (!treeInfo.isAllowedInUniversalTrait(stat) && !isUnderlyingAcc(stat.symbol))
unit.error(stat.pos,
- if (stat.symbol hasFlag PARAMACCESSOR) "illegal parameter for value class"
+ if (stat.symbol != null && (stat.symbol hasFlag PARAMACCESSOR)) "illegal parameter for value class"
else "this statement is not allowed in value class: " + stat)
case x =>
unit.error(clazz.pos, "value class needs to have exactly one public val parameter")
@@ -1753,6 +1752,12 @@ trait Typers extends Modes with Adaptations with Tags {
if (clazz.info.firstParent.typeSymbol == AnyValClass)
validateDerivedValueClass(clazz, body1)
+ if (clazz.isTrait) {
+ for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
+ unit.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.")
+ }
+ }
+
treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
}
@@ -1833,7 +1838,7 @@ trait Typers extends Modes with Adaptations with Tags {
val params = fn.tpe.params
val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
else args.take(params.length - 1) :+ EmptyTree
- assert(sameLength(args2, params), "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
+ assert(sameLength(args2, params) || call.isErrorTyped, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
(superConstr, args1 ::: args2)
case Block(stats, expr) if !stats.isEmpty =>
decompose(stats.last)
@@ -2036,7 +2041,7 @@ trait Typers extends Modes with Adaptations with Tags {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
+ if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) {
// At this point in AnyVal there is no supercall, which will blow up
// in computeParamAliases; there's nothing to be computed for Anyval anyway.
if (meth.isPrimaryConstructor)
@@ -3103,66 +3108,67 @@ trait Typers extends Modes with Adaptations with Tags {
val otpe = fun.tpe
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ //
+ def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
+ case MethodType(param :: _, _) =>
+ (Nil, param.tpe)
+ case PolyType(tparams, restpe) =>
+ createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
+ // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
+ case OverloadedType(_, _) =>
+ OverloadedUnapplyError(fun)
+ (Nil, ErrorType)
+ case _ =>
+ UnapplyWithSingleArgError(fun)
+ (Nil, ErrorType)
+ }
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
+ val unapp = unapplyMember(otpe)
+ val unappType = otpe.memberType(unapp)
+ val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
+ val arg = Ident(argDummy) setType pt
val uncheckedTypeExtractor =
if (unappType.paramTypes.nonEmpty)
extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
else None
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
+ if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
+ //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
+ val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
- val typer1 = newTyper(unapplyContext)
+ val typer1 = newTyper(unapplyContext)
val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg.tpe = pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ arg.tpe = pattp.substSym(freeVars, skolems)
+ argDummy setInfo arg.tpe
+ }
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+ // setType null is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
- if (fun1.tpe.isErroneous) {
- duplErrTree
- } else {
- val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
- val formals1 = formalTypes(formals0, args.length)
- if (sameLength(formals1, args)) {
- val args1 = typedArgs(args, mode, formals0, formals1)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
- val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
+ if (fun1.tpe.isErroneous) duplErrTree
+ else {
+ val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
+ val formals1 = formalTypes(formals0, args.length)
+
+ if (!sameLength(formals1, args)) duplErrorTree(WrongNumberArgsPatternError(tree, fun))
+ else {
+ val args1 = typedArgs(args, mode, formals0, formals1)
+ // This used to be the following (failing) assert:
+ // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
+ // I modified as follows. See SI-1048.
+ val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+
+ val itype = glb(List(pt1, arg.tpe))
+ arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
// if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
@@ -3170,9 +3176,8 @@ trait Typers extends Modes with Adaptations with Tags {
// also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- } else
- duplErrorTree(WrongNumberArgsPatternError(tree, fun))
- }
+ }
+ }
}
def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
@@ -4024,8 +4029,7 @@ trait Typers extends Modes with Adaptations with Tags {
ReturnWithoutTypeError(tree, enclMethod.owner)
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
-
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4426,7 +4430,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!qual.tpe.widen.isErroneous) {
if ((mode & QUALmode) != 0) {
- val lastTry = missingHook(qual.tpe.typeSymbol, name)
+ val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
}
NotAMemberError(tree, qual, name)
@@ -4615,10 +4619,33 @@ trait Typers extends Modes with Adaptations with Tags {
if (impSym.exists) {
var impSym1: Symbol = NoSymbol
var imports1 = imports.tail
+
+ /** It's possible that seemingly conflicting identifiers are
+ * identifiably the same after type normalization. In such cases,
+ * allow compilation to proceed. A typical example is:
+ * package object foo { type InputStream = java.io.InputStream }
+ * import foo._, java.io._
+ */
def ambiguousImport() = {
- if (!(imports.head.qual.tpe =:= imports1.head.qual.tpe && impSym == impSym1))
- ambiguousError(
- "it is imported twice in the same scope by\n"+imports.head + "\nand "+imports1.head)
+ // The types of the qualifiers from which the ambiguous imports come.
+ // If the ambiguous name is a value, these must be the same.
+ def t1 = imports.head.qual.tpe
+ def t2 = imports1.head.qual.tpe
+ // The types of the ambiguous symbols, seen as members of their qualifiers.
+ // If the ambiguous name is a monomorphic type, we can relax this far.
+ def mt1 = t1 memberType impSym
+ def mt2 = t2 memberType impSym1
+ // Monomorphism restriction on types is in part because type aliases could have the
+ // same target type but attach different variance to the parameters. Maybe it can be
+ // relaxed, but doesn't seem worth it at present.
+ if (t1 =:= t2 && impSym == impSym1)
+ log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
+ else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
+ log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
+ else {
+ log(s"Import is genuinely ambiguous: !($t1 =:= $t2)")
+ ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
+ }
}
while (errorContainer == null && !imports1.isEmpty &&
(!imports.head.isExplicitImport(name) ||
@@ -4645,7 +4672,7 @@ trait Typers extends Modes with Adaptations with Tags {
log("Allowing empty package member " + name + " due to settings.")
else {
if ((mode & QUALmode) != 0) {
- val lastTry = missingHook(rootMirror.RootClass, name)
+ val lastTry = rootMirror.missingHook(rootMirror.RootClass, name)
if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
}
if (settings.debug.value) {
@@ -4687,7 +4714,10 @@ trait Typers extends Modes with Adaptations with Tags {
)
val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
// assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
- stabilize(tree2, pre2, mode, pt)
+ val tree3 = stabilize(tree2, pre2, mode, pt)
+ // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
+ // inference errors in pattern matching.
+ tree3 setType dropRepeatedParamType(tree3.tpe)
}
}
}
@@ -4866,9 +4896,10 @@ trait Typers extends Modes with Adaptations with Tags {
for (cdef <- catches1 if cdef.guard.isEmpty) {
def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
cdef.pat match {
- case Bind(name, Ident(_)) => warn(name)
- case Ident(name) => warn(name)
+ case Bind(name, i@Ident(_)) if unbound(i) => warn(name)
+ case i@Ident(name) if unbound(i) => warn(name)
case _ =>
}
}
@@ -4978,7 +5009,7 @@ trait Typers extends Modes with Adaptations with Tags {
typedTypeApply(tree, mode, fun1, args1)
case Apply(Block(stats, expr), args) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args))), mode, pt)
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
case Apply(fun, args) =>
typedApply(fun, args) match {
@@ -5082,7 +5113,7 @@ trait Typers extends Modes with Adaptations with Tags {
case SelectFromTypeTree(qual, selector) =>
val qual1 = typedType(qual, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual)
+ if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(qual1, selector)
case CompoundTypeTree(templ) =>
@@ -5131,7 +5162,7 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
- val startByType = Statistics.pushTimerClass(byTypeNanos, tree.getClass)
+ val startByType = Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass))
Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
@@ -5187,7 +5218,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
- Statistics.popTimerClass(byTypeNanos, startByType)
+ Statistics.popTimer(byTypeStack, startByType)
}
}
@@ -5375,10 +5406,11 @@ object TypersStats {
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
- val failedSilentNanos = Statistics.newSubTimer ("time spent in failed", typerNanos)
- val failedApplyNanos = Statistics.newSubTimer (" failed apply", typerNanos)
- val failedOpEqNanos = Statistics.newSubTimer (" failed op=", typerNanos)
- val isReferencedNanos = Statistics.newSubTimer ("time spent ref scanning", typerNanos)
- val visitsByType = Statistics.newByClass ("#visits by tree node", "typer")(Statistics.newCounter(""))
- val byTypeNanos = Statistics.newByClassTimerStack("time spent by tree node", typerNanos)
+ val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos))
+ val byTypeStack = Statistics.newTimerStack()
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 4c20d14406..ad936ac39d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -23,6 +23,14 @@ trait Unapplies extends ast.TreeDSL
private val unapplyParamName = nme.x_0
+
+ // In the typeCompleter (templateSig) of a case class (resp it's module),
+ // synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
+ // their signatures, the corresponding ClassDef is needed. During naming (in
+ // `enterClassDef`), the case class ClassDef is added as an attachment to the
+ // moduleClass symbol of the companion module.
+ class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
+
/** returns type list for return type of the extraction */
def unapplyTypeList(ufn: Symbol, ufntpe: Type) = {
assert(ufn.isMethod, ufn)
@@ -197,42 +205,61 @@ trait Unapplies extends ast.TreeDSL
)
}
+ /**
+ * Generates copy methods for case classes. Copy only has defaults on the first
+ * parameter list, as of SI-5009.
+ *
+ * The parameter types of the copy method need to be exactly the same as the parameter
+ * types of the primary constructor. Just copying the TypeTree is not enough: a type `C`
+ * might refer to something else *inside* the class (i.e. as parameter type of `copy`)
+ * than *outside* the class (i.e. in the class parameter list).
+ *
+ * One such example is t0054.scala:
+ * class A {
+ * case class B(x: C) extends A { def copy(x: C = x) = ... }
+ * class C {} ^ ^
+ * } (1) (2)
+ *
+ * The reference (1) to C is `A.this.C`. The reference (2) is `B.this.C` - not the same.
+ *
+ * This is fixed with a hack currently. `Unapplies.caseClassCopyMeth`, which creates the
+ * copy method, uses empty `TypeTree()` nodes for parameter types.
+ *
+ * In `Namers.enterDefDef`, the copy method gets a special type completer (`enterCopyMethod`).
+ * Before computing the body type of `copy`, the class parameter types are assigned the copy
+ * method parameters.
+ *
+ * This attachment class stores the copy method parameter ValDefs as an attachment in the
+ * ClassDef of the case class.
+ */
def caseClassCopyMeth(cdef: ClassDef): Option[DefDef] = {
def isDisallowed(vd: ValDef) = isRepeatedParamType(vd.tpt) || isByNameParamType(vd.tpt)
- val cparamss = constrParamss(cdef)
- val flat = cparamss.flatten
+ val classParamss = constrParamss(cdef)
- if (cdef.symbol.hasAbstractFlag || (flat exists isDisallowed)) None
+ if (cdef.symbol.hasAbstractFlag || mexists(classParamss)(isDisallowed)) None
else {
+ def makeCopyParam(vd: ValDef, putDefault: Boolean) = {
+ val rhs = if (putDefault) toIdent(vd) else EmptyTree
+ val flags = PARAM | (vd.mods.flags & IMPLICIT) | (if (putDefault) DEFAULTPARAM else 0)
+ // empty tpt: see comment above
+ val tpt = atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt)
+ treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs)
+ }
+
val tparams = cdef.tparams map copyUntypedInvariant
- // the parameter types have to be exactly the same as the constructor's parameter types; so it's
- // not good enough to just duplicated the (untyped) tpt tree; the parameter types are removed here
- // and re-added in ``finishWith'' in the namer.
- def paramWithDefault(vd: ValDef) =
- treeCopy.ValDef(vd, vd.mods | DEFAULTPARAM, vd.name, atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt), toIdent(vd))
-
- val (copyParamss, funParamss) = cparamss match {
- case Nil => (Nil, Nil)
+ val paramss = classParamss match {
+ case Nil => Nil
case ps :: pss =>
- (List(ps.map(paramWithDefault)), mmap(pss)(p => copyUntyped[ValDef](p).copy(rhs = EmptyTree)))
+ ps.map(makeCopyParam(_, putDefault = true)) :: mmap(pss)(makeCopyParam(_, putDefault = false))
}
val classTpe = classType(cdef, tparams)
- val bodyTpe = funParamss.foldRight(classTpe)((params, restp) => gen.scalaFunctionConstr(params.map(_.tpt), restp))
-
- val argss = copyParamss match {
- case Nil => Nil
- case ps :: _ => mmap(ps :: funParamss)(toIdent)
- }
- def mkFunction(vparams: List[ValDef], body: Tree) = Function(vparams, body)
- val body = funParamss.foldRight(New(classTpe, argss): Tree)(mkFunction)
- // [Eugene++] no longer compiles after I moved the `Function` case class into scala.reflect.internal
- // val body = funParamss.foldRight(New(classTpe, argss): Tree)(Function)
-
- Some(atPos(cdef.pos.focus)(
- DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, copyParamss, bodyTpe,
- body)
- ))
+ val argss = mmap(paramss)(toIdent)
+ val body: Tree = New(classTpe, argss)
+ val copyDefDef = atPos(cdef.pos.focus)(
+ DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, TypeTree(), body)
+ )
+ Some(copyDefDef)
}
}
}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index 59160f87d0..8ea66979bc 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -15,8 +15,9 @@ trait FastTrack {
import definitions._
import language.implicitConversions
- private implicit def context2taggers(c0: MacroContext) : Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
- private implicit def context2contextreifiers(c0: MacroContext) : ContextReifiers { val c: c0.type } = new { val c: c0.type = c0 } with ContextReifiers
+ private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
+ private implicit def context2contextreifiers(c0: MacroContext): ContextReifiers { val c: c0.type } = new { val c: c0.type = c0 } with ContextReifiers
+ private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args)
type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
@@ -42,6 +43,7 @@ trait FastTrack {
ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
MacroContextReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExprForMacroContext(c.prefix.tree, expr) }
ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
+ StringContext_f bindTo { case (c, Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args) }
registry
}
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
new file mode 100644
index 0000000000..a5f7928f55
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -0,0 +1,147 @@
+package scala.tools.reflect
+
+import scala.reflect.makro.{ReificationError, UnexpectedReificationError}
+import scala.reflect.makro.runtime.Context
+import scala.collection.mutable.ListBuffer
+import scala.collection.mutable.Stack
+
+abstract class MacroImplementations {
+ val c: Context
+
+ import c.universe._
+
+ def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree]): Tree = {
+ // the parts all have the same position information (as the expression is generated by the compiler)
+ // the args have correct position information
+
+ // the following conditions can only be violated if invoked directly
+ if (parts.length != args.length + 1) {
+ if(parts.length == 0)
+ c.abort(c.prefix.tree.pos, "too few parts")
+ else if(args.length + 1 < parts.length)
+ c.abort(if(args.length==0) c.enclosingPosition else args.last.pos,
+ "too few arguments for interpolated string")
+ else
+ c.abort(args(parts.length-1).pos,
+ "too many arguments for interpolated string")
+ }
+
+ val stringParts = parts map {
+ case Literal(Constant(s: String)) => s;
+ case _ => throw new IllegalArgumentException("argument parts must be a list of string literals")
+ }
+
+ val pi = stringParts.iterator
+ val bldr = new java.lang.StringBuilder
+ val evals = ListBuffer[ValDef]()
+ val ids = ListBuffer[Ident]()
+ val argsStack = Stack(args : _*)
+
+ def defval(value: Tree, tpe: Type): Unit = {
+ val freshName = newTermName(c.fresh("arg$"))
+ evals += ValDef(Modifiers(), freshName, TypeTree(tpe), value)
+ ids += Ident(freshName)
+ }
+
+ def isFlag(ch: Char): Boolean = {
+ ch match {
+ case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true
+ case _ => false
+ }
+ }
+
+ def checkType(arg: Tree, variants: Type*): Option[Type] = {
+ variants.find(arg.tpe <:< _).orElse(
+ variants.find(c.inferImplicitView(arg, arg.tpe, _) != EmptyTree).orElse(
+ Some(variants(0))
+ )
+ )
+ }
+
+ def conversionType(ch: Char, arg: Tree): Option[Type] = {
+ ch match {
+ case 'b' | 'B' =>
+ if(arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
+ case 'h' | 'H' =>
+ Some(AnyTpe)
+ case 's' | 'S' =>
+ Some(AnyTpe)
+ case 'c' | 'C' =>
+ checkType(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
+ case 'd' | 'o' | 'x' | 'X' =>
+ checkType(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, typeOf[BigInt])
+ case 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' =>
+ checkType(arg, DoubleTpe, FloatTpe, typeOf[BigDecimal])
+ case 't' | 'T' =>
+ checkType(arg, LongTpe, typeOf[java.util.Calendar], typeOf[java.util.Date])
+ case _ => None
+ }
+ }
+
+ def copyString(first: Boolean): Unit = {
+ val str = StringContext.treatEscapes(pi.next())
+ val strLen = str.length
+ val strIsEmpty = strLen == 0
+ var start = 0
+ var idx = 0
+
+ if (!first) {
+ val arg = argsStack.pop
+ if (strIsEmpty || (str charAt 0) != '%') {
+ bldr append "%s"
+ defval(arg, AnyTpe)
+ } else {
+ // PRE str is not empty and str(0) == '%'
+ // argument index parameter is not allowed, thus parse
+ // [flags][width][.precision]conversion
+ var pos = 1
+ while(pos < strLen && isFlag(str charAt pos)) pos += 1
+ while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ if(pos < strLen && str.charAt(pos) == '.') { pos += 1
+ while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ }
+ if(pos < strLen) {
+ conversionType(str charAt pos, arg) match {
+ case Some(tpe) => defval(arg, tpe)
+ case None => c.error(arg.pos, "illegal conversion character")
+ }
+ } else {
+ // TODO: place error message on conversion string
+ c.error(arg.pos, "wrong conversion string")
+ }
+ }
+ idx = 1
+ }
+ if (!strIsEmpty) {
+ val len = str.length
+ while (idx < len) {
+ if (str(idx) == '%') {
+ bldr append (str substring (start, idx)) append "%%"
+ start = idx + 1
+ }
+ idx += 1
+ }
+ bldr append (str substring (start, idx))
+ }
+ }
+
+ copyString(first = true)
+ while (pi.hasNext) {
+ copyString(first = false)
+ }
+
+ val fstring = bldr.toString
+// val expr = c.reify(fstring.format((ids.map(id => Expr(id).eval)) : _*))
+// https://issues.scala-lang.org/browse/SI-5824, therefore
+ val expr =
+ Apply(
+ Select(
+ Literal(Constant(fstring)),
+ newTermName("format")),
+ List(ids: _* )
+ );
+
+ Block(evals.toList, expr)
+ }
+
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
index 4f4db83339..f8ded56ec6 100644
--- a/src/compiler/scala/tools/nsc/ReflectGlobal.scala
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -1,12 +1,15 @@
-package scala.tools.nsc
+package scala.tools
+package reflect
-import reporters.Reporter
+import scala.tools.nsc.Global
+import scala.tools.nsc.reporters.Reporter
+import scala.tools.nsc.Settings
/** A version of Global that uses reflection to get class
* infos, instead of reading class or source files.
*/
class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader)
- extends Global(currentSettings, reporter) with scala.tools.nsc.ReflectSetup with scala.reflect.runtime.SymbolTable {
+ extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
override def transformedType(sym: Symbol) =
erasure.transformInfo(sym,
diff --git a/src/compiler/scala/tools/nsc/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 161391fc2c..116ae24cdd 100644
--- a/src/compiler/scala/tools/nsc/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -1,8 +1,12 @@
-package scala.tools.nsc
+package scala.tools
+package reflect
-import tools.util.PathResolver
-import util.ClassPath.DefaultJavaContext
-import util.ScalaClassLoader
+import scala.tools.nsc.Driver
+import scala.tools.nsc.Global
+import scala.tools.nsc.Settings
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.util.PathResolver
object ReflectMain extends Driver {
diff --git a/src/compiler/scala/tools/nsc/ReflectSetup.scala b/src/compiler/scala/tools/reflect/ReflectSetup.scala
index 26c720a10f..f18c114d62 100644
--- a/src/compiler/scala/tools/nsc/ReflectSetup.scala
+++ b/src/compiler/scala/tools/reflect/ReflectSetup.scala
@@ -1,7 +1,10 @@
-package scala.tools.nsc
+package scala.tools
+package reflect
+
+import scala.tools.nsc.Global
/** A helper trait to initialize things that need to be set before JavaMirrors and other
* reflect specific traits are initialized */
-private[nsc] trait ReflectSetup { this: Global =>
+private[reflect] trait ReflectSetup { this: Global =>
phase = new Run().typerPhase
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index c782181f21..18cbf9c4b7 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -12,7 +12,14 @@ import scala.reflect.runtime.{universe => ru}
object StdTags {
// root mirror is fine for these guys, since scala-library.jar is guaranteed to be reachable from the root mirror
- lazy val tagOfString = ru.TypeTag.String
+ lazy val tagOfString = ru.TypeTag[String](
+ ru.rootMirror,
+ new TypeCreator {
+ def apply[U <: BaseUniverse with Singleton](m: MirrorOf[U]): U # Type = {
+ val u = m.universe
+ u.definitions.StringClass.asTypeConstructor
+ }
+ })
lazy val tagOfListOfString = ru.TypeTag[List[String]](
ru.rootMirror,
new TypeCreator {
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 278f4e3ff7..589c5c7eb0 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -2,7 +2,6 @@ package scala.tools
package reflect
import scala.tools.nsc.reporters._
-import scala.tools.nsc.ReflectGlobal
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.Global
import scala.tools.nsc.typechecker.Modes
@@ -217,7 +216,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
(singleton, jmeth)
}
- def runExpr(expr: Tree, freeTypes: Map[TypeName, Type] = Map[TypeName, Type]()): Any = {
+ def runExpr(expr: Tree): Any = {
val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 464ffc6fab..a20ff1667b 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -171,9 +171,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
vprintln("yes we can!! (byval)")
return true
}
- } else if ((mode & global.analyzer.RETmode) != 0) {
- vprintln("yes we can!! (return)")
- return true
}
}
false
@@ -187,7 +184,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val patMode = (mode & global.analyzer.PATTERNmode) != 0
val exprMode = (mode & global.analyzer.EXPRmode) != 0
val byValMode = (mode & global.analyzer.BYVALmode) != 0
- val retMode = (mode & global.analyzer.RETmode) != 0
val annotsTree = cpsParamAnnotation(tree.tpe)
val annotsExpected = cpsParamAnnotation(pt)
@@ -214,12 +210,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
- } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
- // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
- // tree will look like having no annotation
- val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
- vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
- res
} else tree
}
@@ -476,11 +466,6 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
}
tpe
- case ret @ Return(expr) =>
- if (hasPlusMarker(expr.tpe))
- ret setType expr.tpe
- ret.tpe
-
case _ =>
tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 765cde5a81..3a1dc87a6a 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -3,7 +3,6 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
-import scala.collection.mutable.ListBuffer
trait CPSUtils {
val global: Global
@@ -136,43 +135,4 @@ trait CPSUtils {
case _ => None
}
}
-
- def isTailReturn(retExpr: Tree, body: Tree): Boolean = {
- val removed = ListBuffer[Tree]()
- removeTailReturn(body, removed)
- removed contains retExpr
- }
-
- def removeTailReturn(tree: Tree, removed: ListBuffer[Tree]): Tree = tree match {
- case Block(stms, r @ Return(expr)) =>
- removed += r
- treeCopy.Block(tree, stms, expr)
-
- case Block(stms, expr) =>
- treeCopy.Block(tree, stms, removeTailReturn(expr, removed))
-
- case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
- removed ++= Seq(r1, r2)
- treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
-
- case If(cond, thenExpr, elseExpr) =>
- treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
-
- case Try(block, catches, finalizer) =>
- treeCopy.Try(tree,
- removeTailReturn(block, removed),
- (catches map (t => removeTailReturn(t, removed))).asInstanceOf[List[CaseDef]],
- removeTailReturn(finalizer, removed))
-
- case CaseDef(pat, guard, r @ Return(expr)) =>
- removed += r
- treeCopy.CaseDef(tree, pat, guard, expr)
-
- case CaseDef(pat, guard, body) =>
- treeCopy.CaseDef(tree, pat, guard, removeTailReturn(body, removed))
-
- case _ =>
- tree
- }
-
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index fe465aad0d..017c8d24fd 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -9,8 +9,6 @@ import scala.tools.nsc.plugins._
import scala.tools.nsc.ast._
-import scala.collection.mutable.ListBuffer
-
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -48,20 +46,10 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// this would cause infinite recursion. But we could remove the
// ValDef case here.
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
- val tailReturns = ListBuffer[Tree]()
- val rhs = removeTailReturn(rhs0, tailReturns)
- // throw an error if there is a Return tree which is not in tail position
- rhs0 foreach {
- case r @ Return(_) =>
- if (!tailReturns.contains(r))
- unit.error(r.pos, "return expressions in CPS code must be in tail position")
- case _ => /* do nothing */
- }
-
val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
debuglog("result "+rhs1)
@@ -165,6 +153,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
+
def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
transTailValue(tree, cpsA, cpsR) match {
case (Nil, b) => b
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
new file mode 100644
index 0000000000..7ef775218b
--- /dev/null
+++ b/src/eclipse/README.md
@@ -0,0 +1,52 @@
+Eclipse project files
+=====================
+
+Import all projects inside Eclipse by choosing File/Import Existing Projects
+and navigate to src/eclipse. Check all projects and click ok.
+
+IMPORTANT
+=========
+
+1. You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in
+Preferences/General/Workspace/Linked Resources. The value should be the absolute
+path to your scala checkout. All paths in project files are relative to this one,
+so nothing will work before you do so.
+
+2. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
+JDK. The Scala library uses such APIs, so you'd see this error:
+
+ Access restriction: The method compareAndSwapObject(Object, long, Object, Object)
+ from the type Unsafe is not accessible due to restriction on required library.
+You can *fix* it by allowing calls to restricted APIs in `Java=>Compiler=>Errors/Warnings=>Deprecated and Restricted API`
+settings.
+
+3. The IDE guesses the Scala library version by looking for `library.properties` inside
+the library jar. The `scala-library` project does not have such a file, so you will see
+an error about incompatible libraries. You can work around it by adding a `library.properties`
+inside `src/library` with the following contents:
+
+ #Mon, 04 Jun 2012 02:08:56 +0200
+ version.number=2.10.0-20120603-141530-b34313db72
+ maven.version.number=2.10.0-SNAPSHOT
+ osgi.version.number=2.10.0.v20120603-141530-b34313db72
+ copyright.string=Copyright 2002-2011, LAMP/EPFL
+
+4. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
+from being shown as dirty in `git status`. You can still ignore them by telling Git to
+consider them unchanged:
+
+ git update-index --assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+
+If you want to go back to normal (for instance, to commit your changes to project files), run:
+
+ git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+
+DETAILS
+=======
+
+The compiler project depends on the library, reflect, asm and fjbg projects. The
+builder will take care of the correct ordering, and changes in one project will
+be picked up by the dependent projects.
+
+The output directory is set to be build/quick, so the runner scripts in quick
+work as they are (run an ant build to have them generated once) \ No newline at end of file
diff --git a/src/eclipse/asm/.classpath b/src/eclipse/asm/.classpath
new file mode 100644
index 0000000000..03d9e9788d
--- /dev/null
+++ b/src/eclipse/asm/.classpath
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="asm-quick-bin"/>
+</classpath>
diff --git a/src/eclipse/asm/.project b/src/eclipse/asm/.project
new file mode 100644
index 0000000000..c9051389af
--- /dev/null
+++ b/src/eclipse/asm/.project
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>asm</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>src</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/asm</locationURI>
+ </link>
+ <link>
+ <name>asm-quick-bin</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/asm/classes</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
new file mode 100644
index 0000000000..3e2f55f48a
--- /dev/null
+++ b/src/eclipse/fjbg/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="fjbg"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="libs-classes-fjbg"/>
+</classpath>
diff --git a/project.SAMPLE b/src/eclipse/fjbg/.project
index 0034c397ed..8acea9f5fe 100644
--- a/project.SAMPLE
+++ b/src/eclipse/fjbg/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>scala</name>
+ <name>fjbg</name>
<comment></comment>
<projects>
</projects>
@@ -15,4 +15,16 @@
<nature>org.scala-ide.sdt.core.scalanature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
+ <linkedResources>
+ <link>
+ <name>fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
+ </link>
+ <link>
+ <name>libs-classes-fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
+ </link>
+ </linkedResources>
</projectDescription>
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
new file mode 100644
index 0000000000..3fb1d08d4d
--- /dev/null
+++ b/src/eclipse/reflect/.classpath
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-reflect"/>
+</classpath>
diff --git a/src/eclipse/reflect/.project b/src/eclipse/reflect/.project
new file mode 100644
index 0000000000..1e5cbb4ed9
--- /dev/null
+++ b/src/eclipse/reflect/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>reflect</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/reflect</locationURI>
+ </link>
+ <link>
+ <name>reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/reflect</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
new file mode 100644
index 0000000000..e0264b9856
--- /dev/null
+++ b/src/eclipse/scala-compiler/.classpath
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry kind="output" path="build-quick-compiler"/>
+</classpath>
diff --git a/src/eclipse/scala-compiler/.project b/src/eclipse/scala-compiler/.project
new file mode 100644
index 0000000000..cf8a68c8b6
--- /dev/null
+++ b/src/eclipse/scala-compiler/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-compiler</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/compiler</locationURI>
+ </link>
+ <link>
+ <name>compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/compiler</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath
new file mode 100644
index 0000000000..a3a4933d34
--- /dev/null
+++ b/src/eclipse/scala-library/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="library"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-lib"/>
+</classpath>
diff --git a/src/eclipse/scala-library/.project b/src/eclipse/scala-library/.project
new file mode 100644
index 0000000000..049cf75e0b
--- /dev/null
+++ b/src/eclipse/scala-library/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-library</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/library</locationURI>
+ </link>
+ <link>
+ <name>library</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/library</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/library/scala/Cloneable.scala b/src/library/scala/Cloneable.scala
new file mode 100644
index 0000000000..5ba76ddde5
--- /dev/null
+++ b/src/library/scala/Cloneable.scala
@@ -0,0 +1,14 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/**
+ * Classes extending this trait are cloneable across platforms (Java, .NET).
+ */
+trait Cloneable extends java.lang.Cloneable
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 99bd7f0736..44025d5358 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -100,11 +100,19 @@ object Predef extends LowPriorityImplicits {
// def AnyRef = scala.AnyRef
// Manifest types, companions, and incantations for summoning
+ @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
+ @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
+ @annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+ @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
type Manifest[T] = scala.reflect.Manifest[T]
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
+ @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = scala.reflect.Manifest
+ @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
val NoManifest = scala.reflect.NoManifest
def manifest[T](implicit m: Manifest[T]) = m
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index f400f18dab..f11dfb72ae 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -8,7 +8,7 @@
package scala
-import collection.mutable.ArrayBuffer
+import language.experimental.macros
/** A class to support string interpolation.
* This class supports string interpolation as outlined in Scala SIP-11.
@@ -42,7 +42,7 @@ case class StringContext(parts: String*) {
* @throws A `StringContext.InvalidEscapeException` if if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
- def s(args: Any*) = {
+ def s(args: Any*): String = {
checkLengths(args: _*)
val pi = parts.iterator
val ai = args.iterator
@@ -82,38 +82,8 @@ case class StringContext(parts: String*) {
* string literally. This is achieved by replacing each such occurrence by the
* format specifier `%%`.
*/
- def f(args: Any*) = {
- checkLengths(args: _*)
- val pi = parts.iterator
- val bldr = new java.lang.StringBuilder
- def copyString(first: Boolean): Unit = {
- val str = treatEscapes(pi.next())
- val strIsEmpty = str.length == 0
- var start = 0
- var idx = 0
- if (!first) {
- if (strIsEmpty || (str charAt 0) != '%')
- bldr append "%s"
- idx = 1
- }
- if (!strIsEmpty) {
- val len = str.length
- while (idx < len) {
- if (str(idx) == '%') {
- bldr append (str substring (start, idx)) append "%%"
- start = idx + 1
- }
- idx += 1
- }
- bldr append (str substring (start, idx))
- }
- }
- copyString(first = true)
- while (pi.hasNext) {
- copyString(first = false)
- }
- bldr.toString format (args: _*)
- }
+ // The implementation is magically hardwired into `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ def f(args: Any*): String = macro ???
}
object StringContext {
diff --git a/src/library/scala/cloneable.scala b/src/library/scala/annotation/cloneable.scala
index 32a1ea640d..aa45e8325f 100644
--- a/src/library/scala/cloneable.scala
+++ b/src/library/scala/annotation/cloneable.scala
@@ -6,11 +6,10 @@
** |/ **
\* */
-
-
-package scala
+package scala.annotation
/**
* An annotation that designates the class to which it is applied as cloneable
*/
+@deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
class cloneable extends annotation.StaticAnnotation
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index d00414751a..d961bd84bb 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -27,7 +27,7 @@ import generic._
* @since 2.8
*/
trait DefaultMap[A, +B] extends Map[A, B] { self =>
-
+
/** A default implementation which creates a new immutable map.
*/
override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
@@ -41,7 +41,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
*/
override def - (key: A): Map[A, B] = {
val b = newBuilder
- b ++= this filter (key != _)
+ b ++= this filter (key != _._1)
b.result
}
}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index eaec7a2a76..0d51230623 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -411,12 +411,3 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
def stringPrefix: String
}
-
-object GenTraversableLike {
- /** Manufacture a conversion from collection representation type `Repr` to
- * its corresponding `GenTraversableLike` given an implicitly available
- * instance of `FromRepr[Repr]`.
- * @see [[scala.collection.generic.FromRepr]]
- */
- implicit def fromRepr[Repr](implicit fr : FromRepr[Repr]) = fr.hasElem
-}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index e475865391..25edcfe19c 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -565,10 +565,10 @@ trait GenTraversableOnce[+A] extends Any {
* @tparam Col The collection type to build.
* @return a new collection containing all elements of this $coll.
*
- * @usecase def convertTo[Col[_]]: Col[A]
+ * @usecase def to[Col[_]]: Col[A]
* @inheritdoc
* $willNotTerminateInf
* @return a new collection containing all elements of this $coll.
*/
- def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
+ def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index c842475590..e0c8b21d09 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -44,7 +44,7 @@ trait IterableViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends super[TraversableViewLike].Transformed[B] with Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 42a56a9c5a..19c24c9791 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -51,6 +51,7 @@ object Map extends MapFactory[Map] {
def iterator = underlying.iterator
override def default(key: A): B = d(key)
}
+
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 75f9ff93db..ed2a877631 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -226,7 +226,7 @@ self =>
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
-
+
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
@@ -240,7 +240,7 @@ self =>
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
-
+
protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
@@ -301,11 +301,11 @@ self =>
def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
((repr: Map[A, B1]) /: xs.seq) (_ + _)
- /** Returns a new map with all key/value pairs for which the predicate
+ /** Returns a new map obtained by removing all key/value pairs for which the predicate
* `p` returns `true`.
*
- * '''Note:''' This method works by successively removing elements fro which the
- * predicate is false from this set.
+ * '''Note:''' This method works by successively removing elements for which the
+ * predicate is true from this set.
* If removal is slow, or you expect that most elements of the set
* will be removed, you might consider using `filter`
* with a negated predicate instead.
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index f64045c9f6..73f5dda11c 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -40,7 +40,7 @@ trait SeqViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends super[IterableViewLike].AbstractTransformed[B] with Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 3f92908848..e32e0977df 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -30,9 +30,26 @@ trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B
* @since 2.8
*/
object SortedMap extends SortedMapFactory[SortedMap] {
- def empty[A, B](implicit ord: Ordering[A]): immutable.SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
+ def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] {
+ self =>
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
+ val b = SortedMap.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ override def - (key: A): SortedMap[A, B] = {
+ val b = newBuilder
+ for (kv <- this; if kv._1 != key) b += kv
+ b.result
+ }
+ }
+
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 4dc0820a62..f9e95230ea 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -72,4 +72,27 @@ self =>
for (e <- elems) m = m + e
m
}
+
+ override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ }
+
+ override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ }
+
+ /** Adds a number of elements provided by a traversable object
+ * and returns a new collection with the added elements.
+ *
+ * @param xs the traversable object.
+ */
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+
}
+
+
+
+
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index e5861f5760..9356832afd 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -617,7 +617,7 @@ trait TraversableLike[+A, +Repr] extends Any
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
// Override to provide size hint.
- override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
val b = cbf()
b.sizeHint(this)
b ++= thisCollection
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 8dc6184d88..fb73805cc5 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -240,21 +240,21 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = convertTo[List]
+ def toList: List[A] = to[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq]
+ def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq]
- def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
+ def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]]
+ def toSet[B >: A]: immutable.Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]]
- def toVector: Vector[A] = convertTo[Vector]
+ def toVector: Vector[A] = to[Vector]
- def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
val b = cbf()
b ++= seq
b.result
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index eb2091a5f3..bf4f8205d6 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -117,7 +117,7 @@ trait TraversableViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super.EmptyView
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 8c603dc91b..75707b69b0 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -467,4 +467,5 @@ private[collection] trait Wrappers {
}
}
-object Wrappers extends Wrappers
+@SerialVersionUID(0 - 5857859809262781311L)
+object Wrappers extends Wrappers with Serializable
diff --git a/src/library/scala/collection/generic/FromRepr.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index c08761332c..7288322903 100644
--- a/src/library/scala/collection/generic/FromRepr.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -18,14 +18,12 @@ package generic
*
* Example usage,
* {{{
- * import scala.collection.generic.{ CanBuildFrom, FromRepr, HasElem }
- *
- * class FilterMapImpl[A, Repr](val r : Repr)(implicit hasElem : HasElem[Repr, A]) {
- * def filterMap[B, That](f : A => Option[B])
- * (implicit cbf : CanBuildFrom[Repr, B, That]) : That = r.flatMap(f(_).toSeq)
+ * class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ * r.flatMap(f(_).toSeq)
* }
- *
- * implicit def filterMap[Repr : FromRepr](r : Repr) = new FilterMapImpl(r)
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl(fr.conversion(r))
*
* val l = List(1, 2, 3, 4, 5)
* List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
@@ -33,24 +31,28 @@ package generic
* }}}
*
* @author Miles Sabin
+ * @author J. Suereth
* @since 2.10
*/
-trait FromRepr[Repr] {
+trait IsTraversableLike[Repr] {
+ /** The type of elements we can traverse over. */
type A
- val hasElem: HasElem[Repr, A]
+ /** A conversion from the representation type `Repr` to a `GenTraversableLike[A,Repr]`. */
+ val conversion: Repr => GenTraversableLike[A, Repr]
}
-object FromRepr {
+object IsTraversableLike {
import language.higherKinds
- implicit val stringFromRepr : FromRepr[String] { type A = Char } = new FromRepr[String] {
- type A = Char
- val hasElem = implicitly[HasElem[String, Char]]
- }
+ implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
+ new IsTraversableLike[String] {
+ type A = Char
+ val conversion = implicitly[String => GenTraversableLike[Char, String]]
+ }
- implicit def genTraversableLikeFromRepr[C[_], A0]
- (implicit hasElem0: HasElem[C[A0], A0]) : FromRepr[C[A0]] { type A = A0 } = new FromRepr[C[A0]] {
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
+ new IsTraversableLike[C[A0]] {
type A = A0
- val hasElem = hasElem0
+ val conversion = conv
}
}
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
new file mode 100644
index 0000000000..b336553231
--- /dev/null
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -0,0 +1,62 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `GenTraversableOnce[A]`.
+ *
+ * This type enables simple enrichment of `GenTraversableOnce`s with extension
+ * methods which can make full use of the mechanics of the Scala collections
+ * framework in their implementation.
+ *
+ * Example usage,
+ * {{{
+ * class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ * val b = cbf()
+ * for(e <- r.seq) f(e) foreach (b +=)
+ * b.result
+ * }
+ * }
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+ *
+ * val l = List(1, 2, 3, 4, 5)
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ *
+ * @author Miles Sabin
+ * @author J. Suereth
+ * @since 2.10
+ */
+trait IsTraversableOnce[Repr] {
+ /** The type of elements we can traverse over. */
+ type A
+ /** A conversion from the representation type `Repr` to a `GenTraversableOnce[A]`. */
+ val conversion: Repr => GenTraversableOnce[A]
+}
+
+object IsTraversableOnce {
+ import language.higherKinds
+
+ implicit val stringRepr: IsTraversableOnce[String] { type A = Char } =
+ new IsTraversableOnce[String] {
+ type A = Char
+ val conversion = implicitly[String => GenTraversableOnce[Char]]
+ }
+
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
+ new IsTraversableOnce[C[A0]] {
+ type A = A0
+ val conversion = conv
+ }
+}
+
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 85b9995f2e..6eecb5e3ff 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -6,12 +6,6 @@ import language.higherKinds
package object generic {
type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To]
- /** The type of conversions from a collection representation type
- * `Repr` to its corresponding GenTraversableLike.
- * @see [[scala.collection.generic.FromRepr]]
- */
- type HasElem[Repr, A] = Repr => GenTraversableLike[A, Repr]
-
@deprecated("use ClassTagTraversableFactory instead", "2.10.0")
type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ClassTagTraversableFactory[CC]
@@ -20,4 +14,4 @@ package object generic {
@deprecated("use GenericClassTagTraversableTemplate instead", "2.10.0")
type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericClassTagTraversableTemplate[A, CC]
-} \ No newline at end of file
+}
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 6ae2d78188..2dc08fff24 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -48,7 +48,8 @@ import parallel.immutable.ParMap
trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends scala.collection.MapLike[A, B, This]
with Parallelizable[(A, B), ParMap[A, B]]
-{ self =>
+{
+self =>
protected[this] override def parCombiner = ParMap.newCombiner[A, B]
@@ -84,31 +85,20 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
-
+
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- override def filterKeys(p: A => Boolean): Map[A, B] = new AbstractMap[A, B] with DefaultMap[A, B] {
- override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
- def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
- def get(key: A) = if (!p(key)) None else self.get(key)
- }
-
+ override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B]
+
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- override def mapValues[C](f: B => C): Map[A, C] = new AbstractMap[A, C] with DefaultMap[A, C] {
- override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
- def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
- override def size = self.size
- override def contains(key: A) = self.contains(key)
- def get(key: A) = self.get(key).map(f)
- }
+ override def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) with DefaultMap[A, C]
/** Collects all keys of this map in a set.
* @return a set containing all keys of this map.
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 0f28c4997b..4b573511d1 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -43,7 +43,7 @@ object RedBlackTree {
}
def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
- def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v))
+ def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
case (Some(from), Some(until)) => this.range(tree, from, until)
@@ -122,17 +122,18 @@ object RedBlackTree {
else
mkTree(isBlack, x, xv, a, r)
}
- private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
+ private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
RedTree(k, v, null, null)
} else {
val cmp = ordering.compare(k, tree.key)
- if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v), tree.right)
- else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v))
- else mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v, overwrite), tree.right)
+ else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v, overwrite))
+ else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
}
- // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
- // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
+ /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ * http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
if (isRedTree(tr)) {
@@ -216,7 +217,7 @@ object RedBlackTree {
if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
val newLeft = doFrom(tree.left, from)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -224,7 +225,7 @@ object RedBlackTree {
if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
val newRight = doTo(tree.right, to)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -232,7 +233,7 @@ object RedBlackTree {
if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
val newRight = doUntil(tree.right, until)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -242,8 +243,8 @@ object RedBlackTree {
val newLeft = doFrom(tree.left, from)
val newRight = doUntil(tree.right, until)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value);
- else if (newRight eq null) upd(newLeft, tree.key, tree.value);
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
else rebalance(tree, newLeft, newRight)
}
@@ -254,7 +255,7 @@ object RedBlackTree {
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -264,7 +265,7 @@ object RedBlackTree {
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
@@ -275,8 +276,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value)
- else if (newRight eq null) upd(newLeft, tree.key, tree.value)
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 526f7a1ffe..f147b673f7 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -30,7 +30,9 @@ import annotation.unchecked.uncheckedVariance
trait SortedMap[A, +B] extends Map[A, B]
with scala.collection.SortedMap[A, B]
with MapLike[A, B, SortedMap[A, B]]
- with SortedMapLike[A, B, SortedMap[A, B]] { self =>
+ with SortedMapLike[A, B, SortedMap[A, B]]
+{
+self =>
override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
SortedMap.newBuilder[A, B]
@@ -76,6 +78,17 @@ trait SortedMap[A, +B] extends Map[A, B]
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+
+ override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ }
+
+ override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ }
+
}
/** $factoryInfo
@@ -86,4 +99,20 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] with collection.SortedMap.Default[A, B] {
+ self =>
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
+ val b = SortedMap.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ override def - (key: A): SortedMap[A, B] = {
+ val b = newBuilder
+ for (kv <- this; if kv._1 != key) b += kv
+ b.result
+ }
+ }
}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 4c1a5f2e03..51bc76efc3 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -131,7 +131,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value))
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -171,7 +171,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
assert(!RB.contains(tree, key))
- new TreeMap(RB.update(tree, key, value))
+ new TreeMap(RB.update(tree, key, value, true))
}
def - (key:A): TreeMap[A, B] =
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 882e828c5b..697da2bc4b 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -112,7 +112,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, ()))
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -122,7 +122,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
*/
def insert(elem: A): TreeSet[A] = {
assert(!RB.contains(tree, elem))
- newSet(RB.update(tree, elem, ()))
+ newSet(RB.update(tree, elem, (), false))
}
/** Creates a new `TreeSet` with the entry removed.
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 040a0e2aa7..8f834d265b 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -59,7 +59,7 @@ object ArrayStack extends SeqFactory[ArrayStack] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@cloneable @SerialVersionUID(8565219180626620510L)
+@SerialVersionUID(8565219180626620510L)
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int)
extends AbstractSeq[T]
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index dd225cfab9..fd5dc66292 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -28,10 +28,10 @@ import generic._
* @define Coll `Buffer`
* @define coll buffer
*/
-@cloneable
trait Buffer[A] extends Seq[A]
with GenericTraversableTemplate[A, Buffer]
- with BufferLike[A, Buffer[A]] {
+ with BufferLike[A, Buffer[A]]
+ with scala.Cloneable {
override def companion: GenericCompanion[Buffer] = Buffer
}
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index f82a596b32..3274fe6194 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -58,13 +58,13 @@ import annotation.{migration, bridge}
* mutates the collection in place, unlike similar but
* undeprecated methods throughout the collections hierarchy.
*/
-@cloneable
trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
extends Growable[A]
with Shrinkable[A]
with Scriptable[A]
with Subtractable[A, This]
with SeqLike[A, This]
+ with scala.Cloneable
{ self : This =>
// Abstract methods from Seq:
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index e6fbce415a..6daac3094a 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -17,9 +17,6 @@ package mutable
*
* @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound.
*/
-@cloneable
-trait Cloneable[+A <: AnyRef] {
- // !!! why doesn't this extend java.lang.Cloneable?
- // because neither did @serializable, then we changed it to Serializable
+trait Cloneable[+A <: AnyRef] extends scala.Cloneable {
override def clone: A = super.clone().asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index af55a01ed6..e37cbdc712 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -31,7 +31,6 @@ import generic._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@cloneable
class PriorityQueue[A](implicit val ord: Ordering[A])
extends AbstractIterable[A]
with Iterable[A]
@@ -40,6 +39,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
with Growable[A]
with Builder[A, PriorityQueue[A]]
with Serializable
+ with scala.Cloneable
{
import ord._
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 605d37aec6..2aa19d6cb0 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -30,7 +30,6 @@ import generic._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@cloneable
class Queue[A]
extends MutableList[A]
with GenericTraversableTemplate[A, Queue]
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 042eac517a..db9e48d1cf 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -53,7 +53,6 @@ object Stack extends SeqFactory[Stack] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@cloneable
class Stack[A] private (var elems: List[A])
extends AbstractSeq[A]
with Seq[A]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a7ec833193..d4f1c2f39f 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -72,6 +72,10 @@ import language.implicitConversions
* very fast operation which simply creates wrappers around the receiver collection.
* This can be repeated recursively.
*
+ * Tasks are scheduled for execution through a
+ * [[scala.collection.parallel.TaskSupport]] object, which can be changed
+ * through the `tasksupport` setter of the collection.
+ *
* Method `newCombiner` produces a new combiner. Combiners are an extension of builders.
* They provide a method `combine` which combines two combiners and returns a combiner
* containing elements of both combiners.
@@ -165,6 +169,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
_tasksupport = defaultTaskSupport
}
+ /** The task support object which is responsible for scheduling and
+ * load-balancing tasks to processors.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]]
+ */
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
@@ -173,6 +182,24 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else ts
}
+ /** Changes the task support object which is responsible for scheduling and
+ * load-balancing tasks to processors.
+ *
+ * A task support object can be changed in a parallel collection after it
+ * has been created, but only during a quiescent period, i.e. while there
+ * are no concurrent invocations to parallel collection methods.
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
+ *
+ * @see [[scala.collection.parallel.TaskSupport]]
+ */
def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
def seq: Sequential
@@ -841,7 +868,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
- override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T]
+ override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]]
override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]]
@@ -850,13 +877,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
-
- // TODO(@alex22): make these better
- override def toVector: Vector[T] = seq.toVector
-
- override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = seq.convertTo[Col]
+ override def toVector: Vector[T] = to[Vector]
+ override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
+ toParCollection[T, Col[T]](() => cbf().asCombiner)
+ } else seq.to(cbf)
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 2eaa861429..b2ff5c9e44 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -19,36 +19,73 @@ import scala.concurrent.ExecutionContext
/** A trait implementing the scheduling of
* a parallel collection operation.
+ *
+ * Parallel collections are modular in the way operations are scheduled. Each
+ * parallel collection is parametrized with a task support object which is
+ * responsible for scheduling and load-balancing tasks to processors.
*
- * Task support objects handle how a task is split and
- * distributed across processors. A task support object can be
- * changed in a parallel collection after it has been created,
- * but only during a quiescent period, i.e. while there are no
+ * A task support object can be changed in a parallel collection after it has
+ * been created, but only during a quiescent period, i.e. while there are no
* concurrent invocations to parallel collection methods.
+ *
+ * There are currently a few task support implementations available for
+ * parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]]
+ * uses a fork-join pool
+ * internally and is used by default on JVM 1.6 or greater. The less efficient
+ * [[scala.collection.parallel.ThreadPoolTaskSupport]] is a fallback for JVM
+ * 1.5 and JVMs that do not support the fork join pools. The
+ * [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
+ * default execution context implementation found in scala.concurrent, and it
+ * reuses the thread pool used in scala.concurrent (this is either a fork join
+ * pool or a thread pool executor, depending on the JVM version). The
+ * execution context task support is set to each parallel collection by
+ * default, so parallel collections reuse the same fork-join pool as the
+ * future API.
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
+ *
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section
+ * on the parallel collection's guide for more information.
*/
trait TaskSupport extends Tasks
-/** A task support that uses a fork join pool to schedule tasks */
+/** A task support that uses a fork join pool to schedule tasks.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]] for more information.
+ */
class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool)
extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
-
-/** A task support that uses a thread pool executor to schedule tasks */
+/** A task support that uses a thread pool executor to schedule tasks.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]] for more information.
+ */
class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
/** A task support that uses an execution context to schedule tasks.
*
- * It can be used with the default execution context implementation in the `scala.concurrent` package.
- * It internally forwards the call to either a forkjoin based task support or a thread pool executor one,
- * depending on what the execution context uses.
+ * It can be used with the default execution context implementation in the
+ * `scala.concurrent` package. It internally forwards the call to either a
+ * forkjoin based task support or a thread pool executor one, depending on
+ * what the execution context uses.
*
- * By default, parallel collections are parametrized with this task support object, so parallel collections
- * share the same execution context backend as the rest of the `scala.concurrent` package.
+ * By default, parallel collections are parametrized with this task support
+ * object, so parallel collections share the same execution context backend
+ * as the rest of the `scala.concurrent` package.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]] for more information.
*/
-class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.defaultExecutionContext)
+class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global)
extends TaskSupport with ExecutionContextTasks
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index d8c42d74b0..349f4fa44c 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -34,6 +34,7 @@ extends collection/*.immutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 700d21d0bb..b5747a31cf 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -29,7 +29,8 @@ import scala.collection.GenIterable
trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]] {
+ with ParIterableLike[T, ParIterable[T], Iterable[T]]
+ with Mutable {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
//protected[this] override def newBuilder = ParIterable.newBuilder[T]
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
new file mode 100644
index 0000000000..a5b878c546
--- /dev/null
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -0,0 +1,81 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import java.lang.Thread
+import scala.concurrent.util.Duration
+
+/**
+ * A context to be notified by `scala.concurrent.blocking()` when
+ * a thread is about to block. In effect this trait provides
+ * the implementation for `scala.concurrent.blocking()`. `scala.concurrent.blocking()`
+ * locates an instance of `BlockContext` by first looking for one
+ * provided through `BlockContext.withBlockContext()` and failing that,
+ * checking whether `Thread.currentThread` is an instance of `BlockContext`.
+ * So a thread pool can have its `java.lang.Thread` instances implement
+ * `BlockContext`. There's a default `BlockContext` used if the thread
+ * doesn't implement `BlockContext`.
+ *
+ * Typically, you'll want to chain to the previous `BlockContext`,
+ * like this:
+ * {{{
+ * val oldContext = BlockContext.current
+ * val myContext = new BlockContext {
+ * override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T = {
+ * // you'd have code here doing whatever you need to do
+ * // when the thread is about to block.
+ * // Then you'd chain to the previous context:
+ * oldContext.internalBlockingCall(awaitable, atMost)
+ * }
+ * }
+ * BlockContext.withBlockContext(myContext) {
+ * // then this block runs with myContext as the handler
+ * // for scala.concurrent.blocking
+ * }
+ * }}}
+ */
+trait BlockContext {
+
+ /** Used internally by the framework; blocks execution for at most
+ * `atMost` time while waiting for an `awaitable` object to become ready.
+ *
+ * Clients should use `scala.concurrent.blocking` instead; this is
+ * the implementation of `scala.concurrent.blocking`, generally
+ * provided by a `scala.concurrent.ExecutionContext` or `java.util.concurrent.Executor`.
+ */
+ def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T
+}
+
+object BlockContext {
+ private object DefaultBlockContext extends BlockContext {
+ override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
+ awaitable.result(atMost)(Await.canAwaitEvidence)
+ }
+
+ private val contextLocal = new ThreadLocal[BlockContext]() {
+ override def initialValue = Thread.currentThread match {
+ case ctx: BlockContext => ctx
+ case _ => DefaultBlockContext
+ }
+ }
+
+ /** Obtain the current thread's current `BlockContext`. */
+ def current: BlockContext = contextLocal.get
+
+ /** Pushes a current `BlockContext` while executing `body`. */
+ def withBlockContext[T](blockContext: BlockContext)(body: => T): T = {
+ val old = contextLocal.get
+ try {
+ contextLocal.set(blockContext)
+ body
+ } finally {
+ contextLocal.set(old)
+ }
+ }
+}
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index 330a2f0e25..86a86966ef 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -17,23 +17,6 @@ import language.implicitConversions
/** This package object contains primitives for concurrent and parallel programming.
*/
abstract class ConcurrentPackageObject {
- /** A global execution environment for executing lightweight tasks.
- */
- lazy val defaultExecutionContext: ExecutionContext with Executor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
-
- val currentExecutionContext = new ThreadLocal[ExecutionContext]
-
- val handledFutureException: PartialFunction[Throwable, Throwable] = {
- case t: Throwable if isFutureThrowable(t) => t
- }
-
- // TODO rename appropriately and make public
- private[concurrent] def isFutureThrowable(t: Throwable) = t match {
- case e: Error => false
- case t: scala.util.control.ControlThrowable => false
- case i: InterruptedException => false
- case _ => true
- }
/* concurrency constructs */
@@ -46,8 +29,7 @@ abstract class ConcurrentPackageObject {
* @param execctx the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
- def future[T](body: =>T)(implicit execctx: ExecutionContext = defaultExecutionContext): Future[T] =
- Future[T](body)
+ def future[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = Future[T](body)
/** Creates a promise object which can be completed with a value.
*
@@ -55,8 +37,7 @@ abstract class ConcurrentPackageObject {
* @param execctx the execution context on which the promise is created on
* @return the newly created `Promise` object
*/
- def promise[T]()(implicit execctx: ExecutionContext = defaultExecutionContext): Promise[T] =
- Promise[T]()
+ def promise[T]()(implicit execctx: ExecutionContext): Promise[T] = Promise[T]()
/** Used to block on a piece of code which potentially blocks.
*
@@ -67,8 +48,7 @@ abstract class ConcurrentPackageObject {
* - InterruptedException - in the case that a wait within the blockable object was interrupted
* - TimeoutException - in the case that the blockable object timed out
*/
- def blocking[T](body: =>T): T =
- blocking(impl.Future.body2awaitable(body), Duration.Inf)
+ def blocking[T](body: =>T): T = blocking(impl.Future.body2awaitable(body), Duration.Inf)
/** Blocks on an awaitable object.
*
@@ -79,12 +59,8 @@ abstract class ConcurrentPackageObject {
* - InterruptedException - in the case that a wait within the blockable object was interrupted
* - TimeoutException - in the case that the blockable object timed out
*/
- def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = {
- currentExecutionContext.get match {
- case null => awaitable.result(atMost)(Await.canAwaitEvidence)
- case ec => ec.internalBlockingCall(awaitable, atMost)
- }
- }
+ def blocking[T](awaitable: Awaitable[T], atMost: Duration): T =
+ BlockContext.current.internalBlockingCall(awaitable, atMost)
@inline implicit final def int2durationops(x: Int): DurationOps = new DurationOps(x)
}
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index 96a66d83b6..91e41748f5 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -23,7 +23,7 @@ package scala.concurrent
* @author Paul Phillips
* @version 2.8
*/
-class DelayedLazyVal[T](f: () => T, body: => Unit) {
+class DelayedLazyVal[T](f: () => T, body: => Unit){
@volatile private[this] var _isDone = false
private[this] lazy val complete = f()
@@ -39,7 +39,8 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
*/
def apply(): T = if (isDone) complete else f()
- // TODO replace with scala.concurrent.future { ... }
+ // FIXME need to take ExecutionContext in constructor
+ import ExecutionContext.Implicits.global
future {
body
_isDone = true
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 436a17a33b..b486e5269e 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -9,58 +9,80 @@
package scala.concurrent
-
-import java.util.concurrent.atomic.{ AtomicInteger }
-import java.util.concurrent.{ Executors, Future => JFuture, Callable, ExecutorService, Executor }
+import java.util.concurrent.{ ExecutorService, Executor }
import scala.concurrent.util.Duration
-import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveTask => FJTask, RecursiveAction, ForkJoinWorkerThread }
-import scala.collection.generic.CanBuildFrom
-import collection._
-
-
+import scala.annotation.implicitNotFound
+/**
+ * An `ExecutionContext` is an abstraction over an entity that can execute program logic.
+ */
+@implicitNotFound("Cannot find an implicit ExecutionContext, either require one yourself or import ExecutionContext.Implicits.global")
trait ExecutionContext {
/** Runs a block of code on this execution context.
*/
def execute(runnable: Runnable): Unit
- /** Used internally by the framework - blocks execution for at most `atMost` time while waiting
- * for an `awaitable` object to become ready.
- *
- * Clients should use `scala.concurrent.blocking` instead.
- */
- def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T
-
/** Reports that an asynchronous computation failed.
*/
def reportFailure(t: Throwable): Unit
}
+/**
+ * Union interface since Java does not support union types
+ */
+trait ExecutionContextExecutor extends ExecutionContext with Executor
+
+/**
+ * Union interface since Java does not support union types
+ */
+trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService
+
/** Contains factory methods for creating execution contexts.
*/
object ExecutionContext {
-
- implicit def defaultExecutionContext: ExecutionContext = scala.concurrent.defaultExecutionContext
-
+ /**
+ * The `ExecutionContext` associated with the current `Thread`
+ */
+ val currentExecutionContext: ThreadLocal[ExecutionContext] = new ThreadLocal //FIXME might want to set the initial value to an executionContext that throws an exception on execute and warns that it's not set
+
+ /**
+ * This is the explicit global ExecutionContext,
+ * call this when you want to provide the global ExecutionContext explicitly
+ */
+ def global: ExecutionContextExecutor = Implicits.global
+
+ object Implicits {
+ /**
+ * This is the implicit global ExecutionContext,
+ * import this when you want to provide the global ExecutionContext implicitly
+ */
+ implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
+ }
+
/** Creates an `ExecutionContext` from the given `ExecutorService`.
*/
- def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit = defaultReporter): ExecutionContext with ExecutorService =
+ def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService =
impl.ExecutionContextImpl.fromExecutorService(e, reporter)
+
+ /** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter.
+ */
+ def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
/** Creates an `ExecutionContext` from the given `Executor`.
*/
- def fromExecutor(e: Executor, reporter: Throwable => Unit = defaultReporter): ExecutionContext with Executor =
+ def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor =
impl.ExecutionContextImpl.fromExecutor(e, reporter)
+
+ /** Creates an `ExecutionContext` from the given `Executor` with the default Reporter.
+ */
+ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
- def defaultReporter: Throwable => Unit = {
- // re-throwing `Error`s here causes an exception handling test to fail.
- //case e: Error => throw e
- case t => t.printStackTrace()
- }
-
+ /** The default reporter simply prints the stack trace of the `Throwable` to System.err.
+ */
+ def defaultReporter: Throwable => Unit = { case t => t.printStackTrace() }
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 2a52d0ac0b..75a83d6ef8 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-
+import language.higherKinds
import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
@@ -18,15 +18,14 @@ import java.{ lang => jl }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
import scala.concurrent.util.Duration
-import scala.concurrent.impl.NonFatal
+import scala.util.control.NonFatal
import scala.Option
+import scala.util.{Try, Success, Failure}
import scala.annotation.tailrec
-import scala.collection.mutable.Stack
import scala.collection.mutable.Builder
import scala.collection.generic.CanBuildFrom
import scala.reflect.ClassTag
-import language.higherKinds
@@ -137,7 +136,7 @@ trait Future[+T] extends Awaitable[T] {
* $callbackInContext
*/
def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Left(t) if (isFutureThrowable(t) && callback.isDefinedAt(t)) => callback(t)
+ case Left(t) if (impl.Future.isFutureThrowable(t) && callback.isDefinedAt(t)) => callback(t)
case _ =>
}(executor)
@@ -579,6 +578,20 @@ object Future {
classOf[Double] -> classOf[jl.Double],
classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
)
+
+ /** Creates an already completed Future with the specified exception.
+ *
+ * @tparam T the type of the value in the future
+ * @return the newly created `Future` object
+ */
+ def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
+
+ /** Creates an already completed Future with the specified result.
+ *
+ * @tparam T the type of the value in the future
+ * @return the newly created `Future` object
+ */
+ def successful[T](result: T): Future[T] = Promise.successful(result).future
/** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
*
@@ -709,5 +722,12 @@ object Future {
}
}
-
+/** A marker indicating that a `java.lang.Runnable` provided to `scala.concurrent.ExecutionContext`
+ * wraps a callback provided to `Future.onComplete`.
+ * All callbacks provided to a `Future` end up going through `onComplete`, so this allows an
+ * `ExecutionContext` to special-case callbacks that were executed by `Future` if desired.
+ */
+trait OnCompleteRunnable {
+ self: Runnable =>
+}
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 578642966f..5d1b2c00b6 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -34,6 +34,15 @@ trait Promise[T] {
*/
def future: Future[T]
+ /** Returns whether the promise has already been completed with
+ * a value or an exception.
+ *
+ * $nonDeterministic
+ *
+ * @return `true` if the promise is already completed, `false` otherwise
+ */
+ def isCompleted: Boolean
+
/** Completes the promise with either an exception or a value.
*
* @param result Either the value or the exception to complete the promise with.
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 5a6d95c2ed..292014706d 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -53,6 +53,8 @@ class SyncVar[A] {
value
}
+ /** Waits for this SyncVar to become defined and returns
+ * the result */
def take(): A = synchronized {
try get
finally unsetVal()
@@ -64,7 +66,8 @@ class SyncVar[A] {
* the SyncVar.
*
* @param timeout the amount of milliseconds to wait, 0 means forever
- * @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise
+ * @return the value or a throws an exception if the timeout occurs
+ * @throws NoSuchElementException on timeout
*/
def take(timeout: Long): A = synchronized {
try get(timeout).get
@@ -72,25 +75,28 @@ class SyncVar[A] {
}
// TODO: this method should be private
- // [Heather] the reason why: it doesn't take into consideration
+ // [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
@deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
def set(x: A): Unit = setVal(x)
+ /** Places a value in the SyncVar. If the SyncVar already has a stored value,
+ * it waits until another thread takes it */
def put(x: A): Unit = synchronized {
while (isDefined) wait()
setVal(x)
}
+ /** Checks whether a value is stored in the synchronized variable */
def isSet: Boolean = synchronized {
isDefined
}
// TODO: this method should be private
- // [Heather] the reason why: it doesn't take into consideration
+ // [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, unset has been
- // deprecated in order to eventually be able to make "unsetting" private
+ // deprecated in order to eventually be able to make "unsetting" private
@deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
def unset(): Unit = synchronized {
isDefined = false
@@ -98,7 +104,7 @@ class SyncVar[A] {
notifyAll()
}
- // `setVal` exists so as to retroactively deprecate `set` without
+ // `setVal` exists so as to retroactively deprecate `set` without
// deprecation warnings where we use `set` internally. The
// implementation of `set` was moved to `setVal` to achieve this
private def setVal(x: A): Unit = synchronized {
@@ -107,13 +113,13 @@ class SyncVar[A] {
notifyAll()
}
- // `unsetVal` exists so as to retroactively deprecate `unset` without
+ // `unsetVal` exists so as to retroactively deprecate `unset` without
// deprecation warnings where we use `unset` internally. The
// implementation of `unset` was moved to `unsetVal` to achieve this
private def unsetVal(): Unit = synchronized {
isDefined = false
value = None
- notifyAll()
+ notifyAll()
}
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 7549bf8314..551a444425 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -13,33 +13,34 @@ package scala.concurrent.impl
import java.util.concurrent.{ Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit }
import java.util.Collection
import scala.concurrent.forkjoin._
-import scala.concurrent.{ ExecutionContext, Awaitable }
+import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, ExecutionContextExecutor, ExecutionContextExecutorService }
import scala.concurrent.util.Duration
+import scala.util.control.NonFatal
-private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContext with Executor {
+private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContextExecutor {
val executor: Executor = es match {
case null => createExecutorService
case some => some
}
-
- // to ensure that the current execution context thread local is properly set
- def executorsThreadFactory = new ThreadFactory {
- def newThread(r: Runnable) = new Thread(new Runnable {
- override def run() {
- scala.concurrent.currentExecutionContext.set(ExecutionContextImpl.this)
- r.run()
- }
- })
- }
-
- // to ensure that the current execution context thread local is properly set
+
+ // Implement BlockContext on FJP threads
def forkJoinPoolThreadFactory = new ForkJoinPool.ForkJoinWorkerThreadFactory {
- def newThread(fjp: ForkJoinPool) = new ForkJoinWorkerThread(fjp) {
- override def onStart() {
- scala.concurrent.currentExecutionContext.set(ExecutionContextImpl.this)
+ def newThread(fjp: ForkJoinPool) = new ForkJoinWorkerThread(fjp) with BlockContext {
+ override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T = {
+ var result: T = null.asInstanceOf[T]
+ ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ def block(): Boolean = {
+ result = awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence) // FIXME what happens if there's an exception thrown here?
+ isdone = true
+ true
+ }
+ def isReleasable = isdone
+ })
+ result
}
}
}
@@ -67,7 +68,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
case NonFatal(t) =>
System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to Executors.newCachedThreadPool")
t.printStackTrace(System.err)
- Executors.newCachedThreadPool(executorsThreadFactory) //FIXME use the same desired parallelism here too?
+ Executors.newCachedThreadPool() //FIXME use the same desired parallelism here too?
}
def execute(runnable: Runnable): Unit = executor match {
@@ -83,27 +84,6 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
case generic => generic execute runnable
}
- def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T = {
- Future.releaseStack(this)
-
- executor match {
- case fj: ForkJoinPool =>
- var result: T = null.asInstanceOf[T]
- ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
- @volatile var isdone = false
- def block(): Boolean = {
- result = awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence) // FIXME what happens if there's an exception thrown here?
- isdone = true
- true
- }
- def isReleasable = isdone
- })
- result
- case _ =>
- awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence)
- }
- }
-
def reportFailure(t: Throwable) = reporter(t)
}
@@ -111,8 +91,8 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
private[concurrent] object ExecutionContextImpl {
def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
- def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutorService =
- new ExecutionContextImpl(es, reporter) with ExecutorService {
+ def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
+ new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
override def execute(command: Runnable) = executor.execute(command)
override def shutdown() { asExecutorService.shutdown() }
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 6a3487adde..073e6c4c9f 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -13,6 +13,7 @@ package scala.concurrent.impl
import scala.concurrent.util.Duration
import scala.concurrent.{Awaitable, ExecutionContext, CanAwait}
import scala.collection.mutable.Stack
+import scala.util.control.NonFatal
private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
@@ -45,11 +46,19 @@ private[concurrent] object Future {
def boxedType(c: Class[_]): Class[_] = if (c.isPrimitive) toBoxed(c) else c
- private[impl] class PromiseCompletingTask[T](override val executor: ExecutionContext, body: => T)
- extends Task {
+ // TODO rename appropriately and make public
+ private[concurrent] def isFutureThrowable(t: Throwable) = t match {
+ case e: Error => false
+ case t: scala.util.control.ControlThrowable => false
+ case i: InterruptedException => false
+ case _ => true
+ }
+
+ private[impl] class PromiseCompletingRunnable[T](body: => T)
+ extends Runnable {
val promise = new Promise.DefaultPromise[T]()
- protected override def task() = {
+ override def run() = {
promise complete {
try Right(body) catch {
case NonFatal(e) =>
@@ -62,90 +71,8 @@ private[concurrent] object Future {
}
def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
- val task = new PromiseCompletingTask(executor, body)
- task.dispatch()
-
- task.promise.future
- }
-
- private[impl] val throwableId: Throwable => Throwable = identity _
-
- // an optimization for batching futures
- // TODO we should replace this with a public queue,
- // so that it can be stolen from
- // OR: a push to the local task queue should be so cheap that this is
- // not even needed, but stealing is still possible
-
- private[impl] case class TaskStack(stack: Stack[Task], executor: ExecutionContext)
-
- private val _taskStack = new ThreadLocal[TaskStack]()
-
- private[impl] trait Task extends Runnable {
- def executor: ExecutionContext
-
- // run the original callback (no dispatch)
- protected def task(): Unit
-
- // we implement Runnable to avoid creating
- // an extra object. run() runs ourselves with
- // a TaskStack pushed, and then runs any
- // other tasks that show up in the stack.
- final override def run() = {
- try {
- val taskStack = TaskStack(Stack[Task](this), executor)
- _taskStack set taskStack
- while (taskStack.stack.nonEmpty) {
- val next = taskStack.stack.pop()
- require(next.executor eq executor)
- try next.task() catch { case NonFatal(e) => executor reportFailure e }
- }
- } finally {
- _taskStack.remove()
- }
- }
-
- // send the task to the running executor.execute() via
- // _taskStack, or start a new executor.execute()
- def dispatch(force: Boolean = false): Unit =
- _taskStack.get match {
- case stack if (stack ne null) && (executor eq stack.executor) && !force => stack.stack push this
- case _ => executor.execute(this)
- }
- }
-
- private[impl] class ReleaseTask(override val executor: ExecutionContext, val elems: List[Task])
- extends Task {
- protected override def task() = {
- _taskStack.get.stack.elems = elems
- }
- }
-
- private[impl] def releaseStack(executor: ExecutionContext): Unit =
- _taskStack.get match {
- case stack if (stack ne null) && stack.stack.nonEmpty =>
- val tasks = stack.stack.elems
- stack.stack.clear()
- _taskStack.remove()
- val release = new ReleaseTask(executor, tasks)
- release.dispatch(force=true)
- case null =>
- // do nothing - there is no local batching stack anymore
- case _ =>
- _taskStack.remove()
- }
-
- private[impl] class OnCompleteTask[T](override val executor: ExecutionContext, val onComplete: (Either[Throwable, T]) => Any)
- extends Task {
- private var value: Either[Throwable, T] = null
-
- protected override def task() = {
- require(value ne null) // dispatch(value) must be called before dispatch()
- onComplete(value)
- }
-
- def dispatch(value: Either[Throwable, T]): Unit = {
- this.value = value
- dispatch()
- }
+ val runnable = new PromiseCompletingRunnable(body)
+ executor.execute(runnable)
+ runnable.promise.future
}
}
diff --git a/src/library/scala/concurrent/impl/NonFatal.scala b/src/library/scala/concurrent/impl/NonFatal.scala
deleted file mode 100644
index bc509e664c..0000000000
--- a/src/library/scala/concurrent/impl/NonFatal.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-package impl
-
-/**
- * Extractor of non-fatal Throwables. Will not match fatal errors
- * like VirtualMachineError (OutOfMemoryError)
- * ThreadDeath, LinkageError and InterruptedException.
- * StackOverflowError is matched, i.e. considered non-fatal.
- *
- * Usage to catch all harmless throwables:
- * {{{
- * try {
- * // dangerous stuff
- * } catch {
- * case NonFatal(e) => log.error(e, "Something not that bad")
- * }
- * }}}
- */
-private[concurrent] object NonFatal {
-
- def unapply(t: Throwable): Option[Throwable] = t match {
- case e: StackOverflowError ⇒ Some(e) // StackOverflowError ok even though it is a VirtualMachineError
- // VirtualMachineError includes OutOfMemoryError and other fatal errors
- case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError ⇒ None
- case e ⇒ Some(e)
- }
-
-}
-
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index c5060a2368..3ac34bef8a 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -11,11 +11,12 @@ package scala.concurrent.impl
import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS }
-import scala.concurrent.{ Awaitable, ExecutionContext, blocking, CanAwait, TimeoutException, ExecutionException }
+import scala.concurrent.{ Awaitable, ExecutionContext, blocking, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
//import scala.util.continuations._
import scala.concurrent.util.Duration
import scala.util
import scala.annotation.tailrec
+import scala.util.control.NonFatal
//import scala.concurrent.NonDeterministic
@@ -24,6 +25,21 @@ private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with Fu
def future: this.type = this
}
+private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: (Either[Throwable, T]) => Any) extends Runnable with OnCompleteRunnable {
+ // must be filled in before running it
+ var value: Either[Throwable, T] = null
+
+ override def run() = {
+ require(value ne null) // must set value to non-null before running!
+ try onComplete(value) catch { case NonFatal(e) => executor reportFailure e }
+ }
+
+ def executeWithValue(v: Either[Throwable, T]): Unit = {
+ require(value eq null) // can't complete it twice
+ value = v
+ executor.execute(this)
+ }
+}
object Promise {
@@ -94,10 +110,10 @@ object Promise {
val resolved = resolveEither(value)
(try {
@tailrec
- def tryComplete(v: Either[Throwable, T]): List[Future.OnCompleteTask[T]] = {
+ def tryComplete(v: Either[Throwable, T]): List[CallbackRunnable[T]] = {
getState match {
case raw: List[_] =>
- val cur = raw.asInstanceOf[List[Future.OnCompleteTask[T]]]
+ val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
if (updateState(cur, v)) cur else tryComplete(v)
case _ => null
}
@@ -107,19 +123,19 @@ object Promise {
synchronized { notifyAll() } //Notify any evil blockers
}) match {
case null => false
- case cs if cs.isEmpty => true
- case cs => cs.foreach(c => c.dispatch(resolved)); true
+ case rs if rs.isEmpty => true
+ case rs => rs.foreach(r => r.executeWithValue(resolved)); true
}
}
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val bound = new Future.OnCompleteTask[T](executor, func)
+ val runnable = new CallbackRunnable[T](executor, func)
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
def dispatchOrAddCallback(): Unit =
getState match {
- case r: Either[_, _] => bound.dispatch(r.asInstanceOf[Either[Throwable, T]])
- case listeners: List[_] => if (updateState(listeners, bound :: listeners)) () else dispatchOrAddCallback()
+ case r: Either[_, _] => runnable.executeWithValue(r.asInstanceOf[Either[Throwable, T]])
+ case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback()
}
dispatchOrAddCallback()
}
@@ -139,7 +155,7 @@ object Promise {
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
val completedAs = value.get
- (new Future.OnCompleteTask(executor, func)).dispatch(completedAs)
+ (new CallbackRunnable(executor, func)).executeWithValue(completedAs)
}
def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index af2ab04576..4471e417d9 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -87,6 +87,11 @@ object BigInt {
def apply(x: String, radix: Int): BigInt =
new BigInt(new BigInteger(x, radix))
+ /** Translates a `java.math.BigInteger` into a BigInt.
+ */
+ def apply(x: BigInteger): BigInt =
+ new BigInt(x)
+
/** Returns a positive BigInt that is probably prime, with the specified bitLength.
*/
def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt =
@@ -96,9 +101,13 @@ object BigInt {
*/
implicit def int2bigInt(i: Int): BigInt = apply(i)
- /** Implicit conversion from long to BigInt
+ /** Implicit conversion from `Long` to `BigInt`.
*/
implicit def long2bigInt(l: Long): BigInt = apply(l)
+
+ /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
+ */
+ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x)
}
/**
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index e3890d7a9d..a41cdedfa9 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -9,6 +9,7 @@
/**
* Core Scala types. They are always available without an explicit import.
+ * @contentDiagram hideNodes "scala.Serializable"
*/
package object scala {
type Throwable = java.lang.Throwable
@@ -36,6 +37,9 @@ package object scala {
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
type serializable = annotation.serializable
+ @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
+ type cloneable = annotation.cloneable
+
type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
type Traversable[+A] = scala.collection.Traversable[A]
@@ -105,6 +109,15 @@ package object scala {
type PartialOrdering[T] = scala.math.PartialOrdering[T]
type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T]
+ type Either[+A, +B] = scala.util.Either[A, B]
+ val Either = scala.util.Either
+
+ type Left[+A, +B] = scala.util.Left[A, B]
+ val Left = scala.util.Left
+
+ type Right[+A, +B] = scala.util.Right[A, B]
+ val Right = scala.util.Right
+
// Annotations which we might move to annotation.*
/*
type SerialVersionUID = annotation.SerialVersionUID
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index d89d31f689..f143bf8712 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -11,23 +11,12 @@ package scala.reflect
import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
import java.lang.{ Class => jClass }
-/** A `ClassManifest[T]` is an opaque descriptor for type `T`.
- * It is used by the compiler to preserve information necessary
- * for instantiating `Arrays` in those cases where the element type
- * is unknown at compile time.
- *
- * The type-relation operators make an effort to present a more accurate
- * picture than can be realized with erased types, but they should not be
- * relied upon to give correct answers. In particular they are likely to
- * be wrong when variance is involved or when a subtype has a different
- * number of type arguments than a supertype.
- */
-@deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
-trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with Serializable {
- /** A class representing the type `U` to which `T` would be erased. Note
- * that there is no subtyping relationship between `T` and `U`. */
- def erasure: jClass[_]
- override def runtimeClass: jClass[_] = erasure
+@deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
+ self: ClassManifest[T] =>
+
+ @deprecated("Use runtimeClass instead", "2.10.0")
+ def erasure: jClass[_] = runtimeClass
private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = {
@@ -53,6 +42,7 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def <:<(that: ClassManifest[_]): Boolean = {
// All types which could conform to these types will override <:<.
def cannotMatch = {
@@ -86,6 +76,7 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def >:>(that: ClassManifest[_]): Boolean =
that <:< this
@@ -94,49 +85,47 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
case _ => false
}
- /** Tests whether the type represented by this manifest is equal to
- * the type represented by `that` manifest, subject to the limitations
- * described in the header.
- */
- override def equals(that: Any): Boolean = that match {
- case m: ClassManifest[_] => (m canEqual this) && (this.erasure == m.erasure)
- case _ => false
- }
- override def hashCode = this.erasure.##
-
protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] =
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
+ @deprecated("Use wrap instead", "2.10.0")
def arrayManifest: ClassManifest[Array[T]] =
ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
override def newArray(len: Int): Array[T] =
java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
+ @deprecated("Use wrap.newArray instead", "2.10.0")
def newArray2(len: Int): Array[Array[T]] =
java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len)
.asInstanceOf[Array[Array[T]]]
+ @deprecated("Use wrap.wrap.newArray instead", "2.10.0")
def newArray3(len: Int): Array[Array[Array[T]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len)
.asInstanceOf[Array[Array[Array[T]]]]
+ @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len)
.asInstanceOf[Array[Array[Array[Array[T]]]]]
+ @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len)
.asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
+ @deprecated("Create WrappedArray directly instead", "2.10.0")
def newWrappedArray(len: Int): WrappedArray[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
+ @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0")
def newArrayBuilder(): ArrayBuilder[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0")
def typeArguments: List[OptManifest[_]] = List()
protected def argString =
@@ -145,25 +134,33 @@ trait ClassManifest[T] extends OptManifest[T] with ClassTag[T] with Equals with
else ""
}
-/** The object `ClassManifest` defines factory methods for manifests.
+/** `ClassManifestFactory` defines factory methods for manifests.
* It is intended for use by the compiler and should not be used in client code.
+ *
+ * Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning.
+ * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
+ *
+ * In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object
+ * and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it.
+ * However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`,
+ * so we need to somehow nudge them into migrating prior to removing stuff out of the blue.
+ * Hence we've introduced this design decision as the lesser of two evils.
*/
-@deprecated("Use `@scala.reflect.ClassTag` instead", "2.10.0")
-object ClassManifest {
- val Byte = Manifest.Byte
- val Short = Manifest.Short
- val Char = Manifest.Char
- val Int = Manifest.Int
- val Long = Manifest.Long
- val Float = Manifest.Float
- val Double = Manifest.Double
- val Boolean = Manifest.Boolean
- val Unit = Manifest.Unit
- val Any = Manifest.Any
- val Object = Manifest.Object
- val AnyVal = Manifest.AnyVal
- val Nothing = Manifest.Nothing
- val Null = Manifest.Null
+object ClassManifestFactory {
+ val Byte = ManifestFactory.Byte
+ val Short = ManifestFactory.Short
+ val Char = ManifestFactory.Char
+ val Int = ManifestFactory.Int
+ val Long = ManifestFactory.Long
+ val Float = ManifestFactory.Float
+ val Double = ManifestFactory.Double
+ val Boolean = ManifestFactory.Boolean
+ val Unit = ManifestFactory.Unit
+ val Any = ManifestFactory.Any
+ val Object = ManifestFactory.Object
+ val AnyVal = ManifestFactory.AnyVal
+ val Nothing = ManifestFactory.Nothing
+ val Null = ManifestFactory.Null
def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match {
case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]]
@@ -211,7 +208,7 @@ object ClassManifest {
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassManifest[T] {
- def erasure = clazz
+ override def runtimeClass = clazz
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
@@ -223,7 +220,7 @@ object ClassManifest {
*/
def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassManifest[T] {
- def erasure = upperbound.erasure
+ override def runtimeClass = upperbound.erasure
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
@@ -233,7 +230,7 @@ object ClassManifest {
* a top-level or static class */
private class ClassTypeManifest[T <: AnyRef](
prefix: Option[OptManifest[_]],
- val erasure: jClass[_],
+ val runtimeClass: jClass[_],
override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T]
{
override def toString =
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index f753dfbcbb..5255c44f10 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -19,18 +19,20 @@ import scala.runtime.ScalaRunTime.arrayClass
* @see [[scala.reflect.base.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
-trait ClassTag[T] extends Equals with Serializable {
+trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
// class tags, and all tags in general, should be as minimalistic as possible
- /** Returns a runtime class of type `T` */
+ /** A class representing the type `U` to which `T` would be erased.
+ * Note that there is no subtyping relationship between `T` and `U`.
+ */
def runtimeClass: jClass[_]
/** Produces a `ClassTag` that knows how to build `Array[Array[T]]` */
def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass))
/** Produces a new array with element type `T` and length `len` */
- def newArray(len: Int): Array[T] =
+ override def newArray(len: Int): Array[T] =
runtimeClass match {
case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]]
case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]]
@@ -52,7 +54,7 @@ trait ClassTag[T] extends Equals with Serializable {
* `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
* is uncheckable, but we have an instance of `ClassTag[T]`.
*/
- def unapply(x: Any): Option[T] = if (runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
+ def unapply(x: Any): Option[T] = if (x != null && runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
/** case class accessories */
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
@@ -65,7 +67,6 @@ object ClassTag {
private val NothingTYPE = classOf[scala.runtime.Nothing$]
private val NullTYPE = classOf[scala.runtime.Null$]
private val ObjectTYPE = classOf[java.lang.Object]
- private val StringTYPE = classOf[java.lang.String]
val Byte : ClassTag[scala.Byte] = new ClassTag[scala.Byte]{ def runtimeClass = java.lang.Byte.TYPE; private def readResolve() = ClassTag.Byte }
val Short : ClassTag[scala.Short] = new ClassTag[scala.Short]{ def runtimeClass = java.lang.Short.TYPE; private def readResolve() = ClassTag.Short }
@@ -78,11 +79,10 @@ object ClassTag {
val Unit : ClassTag[scala.Unit] = new ClassTag[scala.Unit]{ def runtimeClass = java.lang.Void.TYPE; private def readResolve() = ClassTag.Unit }
val Any : ClassTag[scala.Any] = new ClassTag[scala.Any]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Any }
val Object : ClassTag[java.lang.Object] = new ClassTag[java.lang.Object]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Object }
- val AnyVal : ClassTag[scala.AnyVal] = new ClassTag[scala.AnyVal]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.AnyVal }
- val AnyRef : ClassTag[scala.AnyRef] = new ClassTag[scala.AnyRef]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.AnyRef }
+ val AnyVal : ClassTag[scala.AnyVal] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyVal]]
+ val AnyRef : ClassTag[scala.AnyRef] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyRef]]
val Nothing : ClassTag[scala.Nothing] = new ClassTag[scala.Nothing]{ def runtimeClass = NothingTYPE; private def readResolve() = ClassTag.Nothing }
val Null : ClassTag[scala.Null] = new ClassTag[scala.Null]{ def runtimeClass = NullTYPE; private def readResolve() = ClassTag.Null }
- val String : ClassTag[java.lang.String] = new ClassTag[java.lang.String]{ def runtimeClass = StringTYPE; private def readResolve() = ClassTag.String }
def apply[T](runtimeClass1: jClass[_]): ClassTag[T] =
runtimeClass1 match {
@@ -96,7 +96,6 @@ object ClassTag {
case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
- case StringTYPE => ClassTag.String.asInstanceOf[ClassTag[T]]
case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 }
}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 7e320b42eb..9347f5b6bb 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -39,7 +39,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
*
*/
@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
-@deprecated("Use TypeTag instead", "2.10.0")
+@deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -72,17 +72,19 @@ abstract class AnyValManifest[T <: AnyVal](override val toString: String) extend
override val hashCode = System.identityHashCode(this)
}
-/** The object `Manifest` defines factory methods for manifests.
- * It is intended for use by the compiler and should not be used
- * in client code.
+/** `ManifestFactory` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ *
+ * Unlike `Manifest`, this factory isn't annotated with a deprecation warning.
+ * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
+ * Why so complicated? Read up the comments for `ClassManifestFactory`.
*/
-@deprecated("Use TypeTag instead", "2.10.0")
-object Manifest {
+object ManifestFactory {
def valueManifests: List[AnyValManifest[_]] =
List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
- def erasure = java.lang.Byte.TYPE
+ def runtimeClass = java.lang.Byte.TYPE
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
@@ -90,7 +92,7 @@ object Manifest {
}
val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
- def erasure = java.lang.Short.TYPE
+ def runtimeClass = java.lang.Short.TYPE
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
@@ -98,7 +100,7 @@ object Manifest {
}
val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
- def erasure = java.lang.Character.TYPE
+ def runtimeClass = java.lang.Character.TYPE
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
@@ -106,7 +108,7 @@ object Manifest {
}
val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
- def erasure = java.lang.Integer.TYPE
+ def runtimeClass = java.lang.Integer.TYPE
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
@@ -114,7 +116,7 @@ object Manifest {
}
val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
- def erasure = java.lang.Long.TYPE
+ def runtimeClass = java.lang.Long.TYPE
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
@@ -122,7 +124,7 @@ object Manifest {
}
val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
- def erasure = java.lang.Float.TYPE
+ def runtimeClass = java.lang.Float.TYPE
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
@@ -130,7 +132,7 @@ object Manifest {
}
val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
- def erasure = java.lang.Double.TYPE
+ def runtimeClass = java.lang.Double.TYPE
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
@@ -138,7 +140,7 @@ object Manifest {
}
val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
- def erasure = java.lang.Boolean.TYPE
+ def runtimeClass = java.lang.Boolean.TYPE
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
@@ -146,7 +148,7 @@ object Manifest {
}
val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
- def erasure = java.lang.Void.TYPE
+ def runtimeClass = java.lang.Void.TYPE
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
@@ -180,7 +182,7 @@ object Manifest {
}
private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] {
- lazy val erasure = value.getClass
+ lazy val runtimeClass = value.getClass
override lazy val toString = value.toString + ".type"
}
@@ -217,7 +219,7 @@ object Manifest {
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
- val erasure: Predef.Class[_],
+ val runtimeClass: Predef.Class[_],
override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
override def toString =
(if (prefix.isEmpty) "" else prefix.get.toString+"#") +
@@ -233,7 +235,7 @@ object Manifest {
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new Manifest[T] {
- def erasure = upperBound
+ def runtimeClass = upperBound
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
@@ -242,7 +244,7 @@ object Manifest {
*/
def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
new Manifest[T] {
- def erasure = upperBound.erasure
+ def runtimeClass = upperBound.erasure
override def toString =
"_" +
(if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
@@ -252,7 +254,7 @@ object Manifest {
/** Manifest for the intersection type `parents_0 with ... with parents_n'. */
def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
new Manifest[T] {
- def erasure = parents.head.erasure
+ def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
} \ No newline at end of file
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index 7b8037272c..95b4ddca1c 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -10,7 +10,7 @@ package scala.reflect
/** One of the branches of an [[scala.reflect.OptManifest]].
*/
-@deprecated("Use `@scala.reflect.TypeTag` instead", "2.10.0")
+@deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
object NoManifest extends OptManifest[Nothing] with Serializable {
override def toString = "<?>"
} \ No newline at end of file
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 46f23c4e22..0ea66cb53d 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -14,5 +14,5 @@ package scala.reflect
*
* @author Martin Odersky
*/
-@deprecated("Use `@scala.reflect.TypeTag` instead", "2.10.0")
+@deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
trait OptManifest[+T] extends Serializable \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 490a9e8c03..a4e6256f4d 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -23,7 +23,7 @@ class Base extends Universe { self =>
new TermSymbol(this, name, flags)
def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = {
- val c = newClassSymbol(name.toTypeName, pos, flags)
+ val c = new ModuleClassSymbol(this, name.toTypeName, flags)
val m = new ModuleSymbol(this, name.toTermName, flags, c)
(m, c)
}
@@ -77,6 +77,8 @@ class Base extends Universe { self =>
class ClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
extends TypeSymbol(owner, name, flags) with ClassSymbolBase
+ class ModuleClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
+ extends ClassSymbol(owner, name, flags) { override def isModuleClass = true }
implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
class FreeTermSymbol(owner: Symbol, name: TermName, flags: FlagSet)
@@ -93,11 +95,14 @@ class Base extends Universe { self =>
}
// todo. write a decent toString that doesn't crash on recursive types
- class Type extends TypeBase { def typeSymbol: Symbol = NoSymbol }
+ class Type extends TypeBase {
+ def typeSymbol: Symbol = NoSymbol
+ def termSymbol: Symbol = NoSymbol
+ }
implicit val TypeTagg = ClassTag[Type](classOf[Type])
- val NoType = new Type
- val NoPrefix = new Type
+ val NoType = new Type { override def toString = "NoType" }
+ val NoPrefix = new Type { override def toString = "NoPrefix" }
class SingletonType extends Type
implicit val SingletonTypeTag = ClassTag[SingletonType](classOf[SingletonType])
@@ -106,7 +111,7 @@ class Base extends Universe { self =>
object ThisType extends ThisTypeExtractor
implicit val ThisTypeTag = ClassTag[ThisType](classOf[ThisType])
- case class SingleType(pre: Type, sym: Symbol) extends SingletonType
+ case class SingleType(pre: Type, sym: Symbol) extends SingletonType { override val termSymbol = sym }
object SingleType extends SingleTypeExtractor
implicit val SingleTypeTag = ClassTag[SingleType](classOf[SingleType])
@@ -341,9 +346,9 @@ class Base extends Universe { self =>
class Mirror extends MirrorOf[self.type] {
val universe: self.type = self
- lazy val RootClass = new ClassSymbol(NoSymbol, tpnme.ROOT, NoFlags)
+ lazy val RootClass = new ClassSymbol(NoSymbol, tpnme.ROOT, NoFlags) { override def isModuleClass = true }
lazy val RootPackage = new ModuleSymbol(NoSymbol, nme.ROOT, NoFlags, RootClass)
- lazy val EmptyPackageClass = new ClassSymbol(RootClass, tpnme.EMPTY_PACKAGE_NAME, NoFlags)
+ lazy val EmptyPackageClass = new ClassSymbol(RootClass, tpnme.EMPTY_PACKAGE_NAME, NoFlags) { override def isModuleClass = true }
lazy val EmptyPackage = new ModuleSymbol(RootClass, nme.EMPTY_PACKAGE_NAME, NoFlags, EmptyPackageClass)
def staticClass(fullName: String): ClassSymbol =
@@ -420,7 +425,6 @@ class Base extends Universe { self =>
lazy val NullTpe = TypeRef(ScalaPrefix, NullClass, Nil)
lazy val ObjectTpe = TypeRef(JavaLangPrefix, ObjectClass, Nil)
lazy val AnyRefTpe = ObjectTpe
- lazy val StringTpe = TypeRef(JavaLangPrefix, StringClass, Nil)
private var nodeCount = 0 // not synchronized
diff --git a/src/library/scala/reflect/base/StandardDefinitions.scala b/src/library/scala/reflect/base/StandardDefinitions.scala
index eff23b539e..2f270a5911 100644
--- a/src/library/scala/reflect/base/StandardDefinitions.scala
+++ b/src/library/scala/reflect/base/StandardDefinitions.scala
@@ -27,7 +27,6 @@ trait StandardTypes {
val NothingTpe: Type
val NullTpe: Type
- val StringTpe: Type
}
trait StandardDefinitions extends StandardTypes {
diff --git a/src/library/scala/reflect/base/Symbols.scala b/src/library/scala/reflect/base/Symbols.scala
index 9404520073..ced1f33395 100644
--- a/src/library/scala/reflect/base/Symbols.scala
+++ b/src/library/scala/reflect/base/Symbols.scala
@@ -174,6 +174,12 @@ trait Symbols { self: Universe =>
*/
def isClass: Boolean = false
+ /** Does this symbol represent the definition of a class implicitly associated
+ * with an object definition (module class in scala compiler parlance).
+ * If yes, `isType` is also guaranteed to be true.
+ */
+ def isModuleClass: Boolean = false
+
/** This symbol cast to a ClassSymbol representing a class or trait.
* Returns ClassCastException if `isClass` is false.
*/
@@ -244,6 +250,8 @@ trait Symbols { self: Universe =>
/** The base API that all module symbols support */
trait ModuleSymbolBase extends TermSymbolBase { this: ModuleSymbol =>
/** The class implicitly associated with the object definition.
+ * One can go back from a module class to the associated module symbol
+ * by inspecting its `selfType.termSymbol`.
*/
def moduleClass: Symbol // needed for tree traversals
// [Eugene++] when this becomes `moduleClass: ClassSymbol`, it will be the happiest day in my life
diff --git a/src/library/scala/reflect/base/TagInterop.scala b/src/library/scala/reflect/base/TagInterop.scala
index 158d1979e5..a9f0b60fd2 100644
--- a/src/library/scala/reflect/base/TagInterop.scala
+++ b/src/library/scala/reflect/base/TagInterop.scala
@@ -4,17 +4,6 @@ package base
import scala.runtime.ScalaRunTime._
trait TagInterop { self: Universe =>
- def classTagToClassManifest[T](tag: ClassTag[T]): ClassManifest[T] = {
- val runtimeClass = tag.runtimeClass
- if (runtimeClass.isArray) {
- val elementClass = arrayElementClass(runtimeClass)
- val elementManifest = classTagToClassManifest(ClassTag(elementClass))
- ClassManifest.arrayType(elementManifest).asInstanceOf[ClassManifest[T]]
- } else {
- ClassManifest.fromClass(runtimeClass.asInstanceOf[Class[T]])
- }
- }
-
// [Eugene++] `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
// if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala
index 774bc6ebea..05b1a079d7 100644
--- a/src/library/scala/reflect/base/TypeTags.scala
+++ b/src/library/scala/reflect/base/TypeTags.scala
@@ -134,10 +134,11 @@ trait TypeTags { self: Universe =>
val Boolean : AbsTypeTag[scala.Boolean] = TypeTag.Boolean
val Unit : AbsTypeTag[scala.Unit] = TypeTag.Unit
val Any : AbsTypeTag[scala.Any] = TypeTag.Any
+ val AnyVal : AbsTypeTag[scala.AnyVal] = TypeTag.AnyVal
+ val AnyRef : AbsTypeTag[scala.AnyRef] = TypeTag.AnyRef
val Object : AbsTypeTag[java.lang.Object] = TypeTag.Object
val Nothing : AbsTypeTag[scala.Nothing] = TypeTag.Nothing
val Null : AbsTypeTag[scala.Null] = TypeTag.Null
- val String : AbsTypeTag[java.lang.String] = TypeTag.String
def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): AbsTypeTag[T] =
tpec1(mirror1) match {
@@ -151,10 +152,11 @@ trait TypeTags { self: Universe =>
case BooleanTpe => AbsTypeTag.Boolean.asInstanceOf[AbsTypeTag[T]]
case UnitTpe => AbsTypeTag.Unit.asInstanceOf[AbsTypeTag[T]]
case AnyTpe => AbsTypeTag.Any.asInstanceOf[AbsTypeTag[T]]
+ case AnyValTpe => AbsTypeTag.AnyVal.asInstanceOf[AbsTypeTag[T]]
+ case AnyRefTpe => AbsTypeTag.AnyRef.asInstanceOf[AbsTypeTag[T]]
case ObjectTpe => AbsTypeTag.Object.asInstanceOf[AbsTypeTag[T]]
case NothingTpe => AbsTypeTag.Nothing.asInstanceOf[AbsTypeTag[T]]
case NullTpe => AbsTypeTag.Null.asInstanceOf[AbsTypeTag[T]]
- case StringTpe => AbsTypeTag.String.asInstanceOf[AbsTypeTag[T]]
case _ => new AbsTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
}
@@ -197,10 +199,11 @@ trait TypeTags { self: Universe =>
val Boolean: TypeTag[scala.Boolean] = new PredefTypeTag[scala.Boolean] (BooleanTpe, _.TypeTag.Boolean)
val Unit: TypeTag[scala.Unit] = new PredefTypeTag[scala.Unit] (UnitTpe, _.TypeTag.Unit)
val Any: TypeTag[scala.Any] = new PredefTypeTag[scala.Any] (AnyTpe, _.TypeTag.Any)
+ val AnyVal: TypeTag[scala.AnyVal] = new PredefTypeTag[scala.AnyVal] (AnyValTpe, _.TypeTag.AnyVal)
+ val AnyRef: TypeTag[scala.AnyRef] = new PredefTypeTag[scala.AnyRef] (AnyRefTpe, _.TypeTag.AnyRef)
val Object: TypeTag[java.lang.Object] = new PredefTypeTag[java.lang.Object] (ObjectTpe, _.TypeTag.Object)
val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing)
val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
- val String: TypeTag[java.lang.String] = new PredefTypeTag[java.lang.String] (StringTpe, _.TypeTag.String)
def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): TypeTag[T] =
tpec1(mirror1) match {
@@ -214,10 +217,11 @@ trait TypeTags { self: Universe =>
case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
+ case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
+ case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case StringTpe => TypeTag.String.asInstanceOf[TypeTag[T]]
case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
}
diff --git a/src/library/scala/reflect/base/Types.scala b/src/library/scala/reflect/base/Types.scala
index 6106e3fde7..6e8ffc7984 100644
--- a/src/library/scala/reflect/base/Types.scala
+++ b/src/library/scala/reflect/base/Types.scala
@@ -6,6 +6,11 @@ trait Types { self: Universe =>
/** The base API that all types support */
abstract class TypeBase {
+ /** The term symbol associated with the type, or `NoSymbol` for types
+ * that do not refer to a term symbol.
+ */
+ def termSymbol: Symbol
+
/** The type symbol associated with the type, or `NoSymbol` for types
* that do not refer to a type symbol.
*/
diff --git a/src/library/scala/reflect/compat.scala b/src/library/scala/reflect/compat.scala
deleted file mode 100644
index fc0e5fbf9c..0000000000
--- a/src/library/scala/reflect/compat.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-// [Eugene++] delete this once we merge with trunk and have a working IDE
-
-package scala.reflect {
- trait ArrayTag[T]
- trait ErasureTag[T]
- trait ConcreteTypeTag[T]
-}
-
-package scala.reflect.api {
- trait TypeTags {
- trait TypeTag[T]
- trait ConcreteTypeTag[T]
- }
-}
-
-package scala {
- import scala.reflect.base.{Universe => BaseUniverse}
-
- trait reflect_compat {
- lazy val mirror: BaseUniverse = ???
- }
-}
-
-package scala.reflect {
- import language.experimental.macros
- import scala.reflect.base.{Universe => BaseUniverse}
-
- trait internal_compat {
- private[scala] def materializeArrayTag[T](u: BaseUniverse): ArrayTag[T] = ???
- private[scala] def materializeErasureTag[T](u: BaseUniverse): ErasureTag[T] = ???
- private[scala] def materializeConcreteTypeTag[T](u: BaseUniverse): ConcreteTypeTag[T] = ???
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/makro/internal/package.scala b/src/library/scala/reflect/makro/internal/package.scala
index d31a0f0d14..78cb0ffb10 100644
--- a/src/library/scala/reflect/makro/internal/package.scala
+++ b/src/library/scala/reflect/makro/internal/package.scala
@@ -9,7 +9,7 @@ import scala.reflect.base.{Universe => BaseUniverse}
//
// todo. once we have implicit macros for tag generation, we can remove these anchors
// [Eugene++] how do I hide this from scaladoc?
-package object internal extends scala.reflect.internal_compat {
+package object internal {
private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = macro ???
private[scala] def materializeAbsTypeTag[T](u: BaseUniverse): u.AbsTypeTag[T] = macro ???
private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = macro ???
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 0ee58df2cd..9f9d4089c4 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,17 +1,54 @@
package scala
-package object reflect extends reflect_compat {
+package object reflect {
lazy val basis: base.Universe = new base.Base
+ // in the new scheme of things ClassManifests are aliased to ClassTags
+ // this is done because we want `toArray` in collections work with ClassTags
+ // but changing it to use the ClassTag context bound without aliasing ClassManifest
+ // will break everyone who subclasses and overrides `toArray`
+ // luckily for us, aliasing doesn't hamper backward compatibility, so it's ideal in this situation
+ // I wish we could do the same for Manifests and TypeTags though
+
+ // note, by the way, that we don't touch ClassManifest the object
+ // because its Byte, Short and so on factory fields are incompatible with ClassTag's
+
+ /** A `ClassManifest[T]` is an opaque descriptor for type `T`.
+ * It is used by the compiler to preserve information necessary
+ * for instantiating `Arrays` in those cases where the element type
+ * is unknown at compile time.
+ *
+ * The type-relation operators make an effort to present a more accurate
+ * picture than can be realized with erased types, but they should not be
+ * relied upon to give correct answers. In particular they are likely to
+ * be wrong when variance is involved or when a subtype has a different
+ * number of type arguments than a supertype.
+ */
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
+ type ClassManifest[T] = scala.reflect.ClassTag[T]
+
+ /** The object `ClassManifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ val ClassManifest = ClassManifestFactory
+
+ /** The object `Manifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+ @deprecated("Use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ val Manifest = ManifestFactory
+
def classTag[T](implicit ctag: ClassTag[T]) = ctag
// typeTag incantation is defined inside scala.reflect.basis and scala.reflect.runtime.universe
// ClassTag class is defined in ClassTag.scala
- type TypeTag[T] = scala.reflect.basis.TypeTag[T]
+ type TypeTag[T] = scala.reflect.basis.TypeTag[T]
// ClassTag object is defined in ClassTag.scala
- lazy val TypeTag = scala.reflect.basis.TypeTag
+ lazy val TypeTag = scala.reflect.basis.TypeTag
@deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
type BeanDescription = scala.beans.BeanDescription
diff --git a/src/library/scala/Either.scala b/src/library/scala/util/Either.scala
index b35d8a7c8a..1a2e2d48d5 100644
--- a/src/library/scala/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -8,7 +8,7 @@
-package scala
+package scala.util
import language.implicitConversions
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 8faba236f0..988f68bc18 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -11,36 +11,77 @@ package scala.util
import collection.Seq
-
+import scala.util.control.NonFatal
/**
- * The `Try` type represents a computation that may either result in an exception,
- * or return a success value. It's analagous to the `Either` type.
+ * The `Try` type represents a computation that may either result in an exception, or return a
+ * successfully computed value. It's similar to, but semantically different from the [[scala.Either]] type.
+ *
+ * Instances of `Try[T]`, are either an instance of [[scala.util.Success]][T] or [[scala.util.Failure]][T].
+ *
+ * For example, `Try` can be used to perform division on a user-defined input, without the need to do explicit
+ * exception-handling in all of the places that an exception might occur.
+ *
+ * Example:
+ * {{{
+ * import scala.util.{Try, Success, Failure}
+ *
+ * def divide: Try[Int] = {
+ * val dividend = Try(Console.readLine("Enter an Int that you'd like to divide:\n").toInt)
+ * val divisor = Try(Console.readLine("Enter an Int that you'd like to divide by:\n").toInt)
+ * val problem = dividend.flatMap(x => divisor.map(y => x/y))
+ * problem match {
+ * case Success(v) =>
+ * println("Result of " + dividend.get + "/"+ divisor.get +" is: " + v)
+ * Success(v)
+ * case Failure(e) =>
+ * println("You must've divided by zero or entered something that's not an Int. Try again!")
+ * println("Info from the exception: " + e.getMessage)
+ * divide
+ * }
+ * }
+ *
+ * }}}
+ *
+ * An important property of `Try` shown in the above example is its ability to ''pipeline'', or chain, operations,
+ * catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially
+ * pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated
+ * upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply
+ * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of
+ * default behavior in the case of failure.
+ *
+ * ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
+ * Serious system errors, on the other hand, will be thrown.
+ *
+ * `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack.
+ *
+ * @since 2.10
*/
sealed abstract class Try[+T] {
- /**
- * Returns true if the `Try` is a `Failure`, false otherwise.
+
+ /** Returns `true` if the `Try` is a `Failure`, `false` otherwise.
*/
def isFailure: Boolean
- /**
- * Returns true if the `Try` is a `Success`, false otherwise.
+ /** Returns `true` if the `Try` is a `Success`, `false` otherwise.
*/
def isSuccess: Boolean
- /**
- * Returns the value from this `Success` or the given argument if this is a `Failure`.
+ /** Returns the value from this `Success` or the given `default` argument if this is a `Failure`.
*/
def getOrElse[U >: T](default: => U) = if (isSuccess) get else default
- /**
- * Returns the value from this `Success` or throws the exception if this is a `Failure`.
+ /** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
+ */
+ def orElse[U >: T](default: => Try[U]) = if (isSuccess) this else default
+
+ /** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
def get: T
/**
- * Applies the given function f if this is a Result.
+ * Applies the given function `f` if this is a `Success`, otherwise returns `Unit` if this is a `Failure`.
*/
def foreach[U](f: T => U): Unit
@@ -54,39 +95,35 @@ sealed abstract class Try[+T] {
*/
def map[U](f: T => U): Try[U]
- def collect[U](pf: PartialFunction[T, U]): Try[U]
-
- def exists(p: T => Boolean): Boolean
-
/**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
def filter(p: T => Boolean): Try[T]
/**
- * Converts this to a `Failure` if the predicate is not satisfied.
- */
- def filterNot(p: T => Boolean): Try[T] = filter(x => !p(x))
-
- /**
- * Calls the exceptionHandler with the exception if this is a `Failure`. This is like `flatMap` for the exception.
+ * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
+ * This is like `flatMap` for the exception.
*/
- def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U]
+ def rescue[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U]
/**
- * Calls the exceptionHandler with the exception if this is a `Failure`. This is like map for the exception.
+ * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
+ * This is like map for the exception.
*/
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U]
+ def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U]
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
def toOption = if (isSuccess) Some(get) else None
+ /**
+ * Returns an empty `Seq` (usually a `List`) if this is a `Failure` or a `Seq` containing the value if this is a `Success`.
+ */
def toSeq = if (isSuccess) Seq(get) else Seq()
/**
- * Returns the given function applied to the value from this Success or returns this if this is a `Failure`.
+ * Returns the given function applied to the value from this `Success` or returns this if this is a `Failure`.
* Alias for `flatMap`.
*/
def andThen[U](f: T => Try[U]): Try[U] = flatMap(f)
@@ -97,42 +134,76 @@ sealed abstract class Try[+T] {
*/
def flatten[U](implicit ev: T <:< Try[U]): Try[U]
+ /**
+ * Completes this `Try` with an exception wrapped in a `Success`. The exception is either the exception that the
+ * `Try` failed with (if a `Failure`) or an `UnsupportedOperationException`.
+ */
def failed: Try[Throwable]
+
+ /** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
+ * `s` if this is a `Success`.
+ */
+ def transform[U](f: Throwable => Try[U], s: T => Try[U]): Try[U] = this match {
+ case Success(v) => s(v)
+ case Failure(e) => f(e)
+ }
+
}
+object Try {
+
+ implicit def try2either[T](tr: Try[T]): Either[Throwable, T] = {
+ tr match {
+ case Success(v) => Right(v)
+ case Failure(t) => Left(t)
+ }
+ }
-final class Failure[+T](val exception: Throwable) extends Try[T] {
- def isFailure: Boolean = true
- def isSuccess: Boolean = false
- def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = {
- try {
- if (rescueException.isDefinedAt(exception)) rescueException(exception) else this
- } catch {
- case e2 => Failure(e2)
+ implicit def either2try[T](ei: Either[Throwable, T]): Try[T] = {
+ ei match {
+ case Right(v) => Success(v)
+ case Left(t) => Failure(t)
+ }
+ }
+
+ def apply[T](r: => T): Try[T] = {
+ try { Success(r) } catch {
+ case NonFatal(e) => Failure(e)
}
}
+
+}
+
+final case class Failure[+T](val exception: Throwable) extends Try[T] {
+ def isFailure: Boolean = true
+ def isSuccess: Boolean = false
+ def rescue[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
+ if (f.isDefinedAt(exception)) f(exception) else this
def get: T = throw exception
def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
def flatten[U](implicit ev: T <:< Try[U]): Try[U] = Failure[U](exception)
def foreach[U](f: T => U): Unit = {}
def map[U](f: T => U): Try[U] = Failure[U](exception)
- def collect[U](pf: PartialFunction[T, U]): Try[U] = Failure[U](exception)
def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
- if (rescueException.isDefinedAt(exception)) {
- Try(rescueException(exception))
- } else {
- this
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = {
+ try {
+ if (rescueException.isDefinedAt(exception)) {
+ Try(rescueException(exception))
+ } else {
+ this
+ }
+ } catch {
+ case NonFatal(e) => Failure(e)
}
- def exists(p: T => Boolean): Boolean = false
+ }
def failed: Try[Throwable] = Success(exception)
}
-final class Success[+T](value: T) extends Try[T] {
+final case class Success[+T](value: T) extends Try[T] {
def isFailure: Boolean = false
def isSuccess: Boolean = true
- def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = Success(value)
+ def rescue[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = Success(value)
def get = value
def flatMap[U](f: T => Try[U]): Try[U] =
try f(value)
@@ -142,43 +213,14 @@ final class Success[+T](value: T) extends Try[T] {
def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
def foreach[U](f: T => U): Unit = f(value)
def map[U](f: T => U): Try[U] = Try[U](f(value))
- def collect[U](pf: PartialFunction[T, U]): Try[U] =
- if (pf isDefinedAt value) Success(pf(value))
- else Failure[U](new NoSuchElementException("Partial function not defined at " + value))
- def filter(p: T => Boolean): Try[T] =
- if (p(value)) this
- else Failure(new NoSuchElementException("Predicate does not hold for " + value))
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
- def exists(p: T => Boolean): Boolean = p(value)
- def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
-}
-
-object Failure {
- def apply[T](e: Throwable): Failure[T] = new Failure(e)
- def unapply(scrutinizee: Any): Option[Throwable] = scrutinizee match {
- case Right(_) => None
- case Left(e) => Some(e.asInstanceOf[Throwable])
- case s: Success[_] => None
- case f: Failure[_] => Some(f.exception)
- }
-}
-
-object Success {
- def apply[T](value: T): Success[T] = new Success(value)
- def unapply[T](scrutinizee: Any): Option[T] = scrutinizee match {
- case Right(v) => Some(v.asInstanceOf[T])
- case Left(_) => None
- case s: Success[_] => Some(s.get.asInstanceOf[T])
- case f: Failure[Throwable] => None
- }
-}
-
-object Try {
-
- def apply[T](r: => T): Try[T] = {
- try { Success(r) } catch {
- case e => Failure(e)
+ def filter(p: T => Boolean): Try[T] = {
+ try {
+ if (p(value)) this
+ else Failure(new NoSuchElementException("Predicate does not hold for " + value))
+ } catch {
+ case NonFatal(e) => Failure(e)
}
}
-
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
+ def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
}
diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
new file mode 100644
index 0000000000..5137f0f2f5
--- /dev/null
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -0,0 +1,45 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.control
+
+/**
+ * Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
+ * (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`,
+ * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`.
+ * However, `StackOverflowError` is matched, i.e. considered non-fatal.
+ *
+ * Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by
+ * `NonFatal` (and would therefore be thrown).
+ *
+ * For example, all harmless Throwables can be caught by:
+ * {{{
+ * try {
+ * // dangerous stuff
+ * } catch {
+ * case NonFatal(e) => log.error(e, "Something not that bad.")
+ * // or
+ * case e if NonFatal(e) => log.error(e, "Something not that bad.")
+ * }
+ * }}}
+ */
+object NonFatal {
+ /**
+ * Returns true if the provided `Throwable` is to be considered non-fatal, or false if it is to be considered fatal
+ */
+ def apply(t: Throwable): Boolean = t match {
+ case _: StackOverflowError => true // StackOverflowError ok even though it is a VirtualMachineError
+ // VirtualMachineError includes OutOfMemoryError and other fatal errors
+ case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => false
+ case _ => true
+ }
+ /**
+ * Returns Some(t) if NonFatal(t) == true, otherwise None
+ */
+ def unapply(t: Throwable): Option[Throwable] = if (apply(t)) Some(t) else None
+}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index 2223a6db0f..2aa9a99054 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -336,7 +336,6 @@ import ILGenerator._
emitSpecialLabel(Label.Try)
val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
excStack.push(Label.Try, endExc)
- return endExc
}
/** Begins a catch block. */
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
index 142f2baea5..de5354d4a0 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala
@@ -81,9 +81,9 @@ abstract class ScaladocModelTest extends DirectTest {
private[this] var settings: Settings = null
// create a new scaladoc compiler
- def newDocFactory: DocFactory = {
+ private[this] def newDocFactory: DocFactory = {
settings = new Settings(_ => ())
- settings.reportModel = false // yaay, no more "model contains X documentable templates"!
+ settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
val args = extraSettings + " " + scaladocSettings
val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
val docFact = new DocFactory(new ConsoleReporter(settings), settings)
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 7f4ff8a7fb..27d3b8ba7d 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -10,12 +10,15 @@ trait Printers { self: Universe =>
protected var printTypes = false
protected var printIds = false
protected var printKinds = false
+ protected var printMirrors = false
def withTypes: this.type = { printTypes = true; this }
def withoutTypes: this.type = { printTypes = false; this }
def withIds: this.type = { printIds = true; this }
def withoutIds: this.type = { printIds = false; this }
def withKinds: this.type = { printKinds = true; this }
def withoutKinds: this.type = { printKinds = false; this }
+ def withMirrors: this.type = { printMirrors = true; this }
+ def withoutMirrors: this.type = { printMirrors = false; this }
}
case class BooleanFlag(val value: Option[Boolean])
@@ -25,13 +28,14 @@ trait Printers { self: Universe =>
implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
}
- protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String = {
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = {
val buffer = new StringWriter()
val writer = new PrintWriter(buffer)
var printer = mkPrinter(writer)
printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
+ printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors)
printer.print(what)
writer.flush()
buffer.toString
@@ -40,42 +44,25 @@ trait Printers { self: Universe =>
/** By default trees are printed with `show` */
override protected def treeToString(tree: Tree) = show(tree)
- /** Renders a prettified representation of a tree.
+ /** Renders a prettified representation of a reflection artifact.
* Typically it looks very close to the Scala code it represents.
- * This function is used in Tree.toString.
*/
- def show(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
- render(tree, newTreePrinter(_), printTypes, printIds, printKinds)
+ def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
+ render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
- /** Hook to define what `show(tree)` means.
+ /** Hook to define what `show(...)` means.
*/
def newTreePrinter(out: PrintWriter): TreePrinter
- /** Renders internal structure of a tree.
+ /** Renders internal structure of a reflection artifact.
*/
- def showRaw(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
- render(tree, newRawTreePrinter(_), printTypes, printIds, printKinds)
+ def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
+ render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
- /** Hook to define what `showRaw(tree)` means.
+ /** Hook to define what `showRaw(...)` means.
*/
def newRawTreePrinter(out: PrintWriter): TreePrinter
- /** Renders a prettified representation of a symbol.
- */
- def show(sym: Symbol): String = sym.toString
-
- /** Renders internal structure of a symbol.
- */
- def showRaw(sym: Symbol): String = render(sym, newRawTreePrinter(_))
-
- /** Renders a prettified representation of a type.
- */
- def show(tpe: Type): String = tpe.toString
-
- /** Renders internal structure of a type.
- */
- def showRaw(tpe: Type): String = render(tpe, newRawTreePrinter(_))
-
/** Renders a prettified representation of a name.
*/
def show(name: Name): String
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index eb9921a31a..1d2888961b 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -232,7 +232,31 @@ trait Symbols extends base.Symbols { self: Universe =>
/** The overloaded alternatives of this symbol */
def alternatives: List[Symbol]
- def resolveOverloaded(pre: Type = NoPrefix, targs: Seq[Type] = List(), actuals: Seq[Type]): Symbol
+ /** Performs method overloading resolution. More precisely, resolves an overloaded TermSymbol
+ * to a single, non-overloaded TermSymbol that accepts the specified argument types.
+ * @param pre The prefix type, i.e. the type of the value the method is dispatched on.
+ * This is required when resolving references to type parameters of the type
+ * the method is declared in. For example if the method is declared in class `List[A]`,
+ * providing the prefix as `List[Int]` allows the overloading resolution to use
+ * `Int` instead of `A`.
+ * @param targs Type arguments that a candidate alternative must be able to accept. Candidates
+ * will be considered with these arguments substituted for their corresponding
+ * type parameters.
+ * @param posVargs Positional argument types that a candidate alternative must be able to accept.
+ * @param nameVargs Named argument types that a candidate alternative must be able to accept.
+ * Each element in the sequence should be a pair of a parameter name and an
+ * argument type.
+ * @param expected Return type that a candidate alternative has to be compatible with.
+ * @return Either a single, non-overloaded Symbol referring to the selected alternative
+ * or NoSymbol if no single member could be selected given the passed arguments.
+ */
+ def resolveOverloaded(
+ pre: Type = NoPrefix,
+ targs: Seq[Type] = List(),
+ posVargs: Seq[Type] = List(),
+ nameVargs: Seq[(TermName, Type)] = List(),
+ expected: Type = NoType
+ ): Symbol
}
/** The API of type symbols */
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index b797c71f6d..231b9b248b 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -62,6 +62,10 @@ trait Types extends base.Types { self: Universe =>
* the empty list for all other types */
def typeParams: List[Symbol]
+ /** For a (nullary) method or poly type, its direct result type,
+ * the type itself for all other types. */
+ def resultType: Type
+
/** Is this type a type constructor that is missing its type arguments?
*/
def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no?
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 3bde57ded8..ad59605760 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -7,19 +7,21 @@ trait BuildUtils extends base.BuildUtils { self: SymbolTable =>
class BuildImpl extends BuildBase {
- def selectType(owner: Symbol, name: String): TypeSymbol = {
- val result = owner.info.decl(newTypeName(name))
- if (result ne NoSymbol) result.asTypeSymbol
- else MissingRequirementError.notFound("type %s in %s".format(name, owner.fullName))
- }
+ def selectType(owner: Symbol, name: String): TypeSymbol =
+ select(owner, newTypeName(name)).asTypeSymbol
def selectTerm(owner: Symbol, name: String): TermSymbol = {
- val sym = owner.info.decl(newTermName(name))
- val result =
- if (sym.isOverloaded) sym.suchThat(!_.isMethod)
- else sym
- if (result ne NoSymbol) result.asTermSymbol
- else MissingRequirementError.notFound("term %s in %s".format(name, owner.fullName))
+ val result = select(owner, newTermName(name)).asTermSymbol
+ if (result.isOverloaded) result.suchThat(!_.isMethod).asTermSymbol
+ else result
+ }
+
+ private def select(owner: Symbol, name: Name): Symbol = {
+ val result = owner.info decl name
+ if (result ne NoSymbol) result
+ else
+ mirrorThatLoaded(owner).missingHook(owner, name) orElse
+ MissingRequirementError.notFound("%s %s in %s".format(if (name.isTermName) "term" else "type", name, owner.fullName))
}
def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol = {
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 320cd3ddae..cd243b9df0 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -35,7 +35,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val AnyRefTpe = definitions.AnyRefClass.asType
lazy val NothingTpe = definitions.NothingClass.asType
lazy val NullTpe = definitions.NullClass.asType
- lazy val StringTpe = definitions.StringClass.asType
/** Since both the value parameter types and the result type may
* require access to the type parameter symbols, we model polymorphic
@@ -369,6 +368,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val SerializableClass = requiredClass[scala.Serializable]
lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
+ lazy val CloneableClass = requiredClass[scala.Cloneable]
lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable]
lazy val JavaNumberClass = requiredClass[java.lang.Number]
lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
@@ -454,10 +454,10 @@ trait Definitions extends api.StandardDefinitions {
def ReflectRuntimeUniverse = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
- lazy val PartialManifestClass = requiredClass[scala.reflect.ClassManifest[_]]
- lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifest.type]
+ lazy val PartialManifestClass = getMemberType(ReflectPackage, tpnme.ClassManifest)
+ lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
lazy val FullManifestClass = requiredClass[scala.reflect.Manifest[_]]
- lazy val FullManifestModule = requiredModule[scala.reflect.Manifest.type]
+ lazy val FullManifestModule = requiredModule[scala.reflect.ManifestFactory.type]
lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
@@ -497,6 +497,9 @@ trait Definitions extends api.StandardDefinitions {
def MacroInternal_materializeAbsTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeAbsTypeTag)
def MacroInternal_materializeTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeTypeTag)
+ lazy val StringContextClass = requiredClass[scala.StringContext]
+ def StringContext_f = getMemberMethod(StringContextClass, nme.f)
+
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
@@ -901,7 +904,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
- lazy val CloneableAttr = requiredClass[scala.cloneable]
+ lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val NativeAttr = requiredClass[scala.native]
@@ -1126,6 +1129,39 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a phantom class for which no runtime representation exists? */
lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
+ lazy val magicSymbols = List(
+ AnnotationDefaultAttr, // #2264
+ RepeatedParamClass,
+ JavaRepeatedParamClass,
+ ByNameParamClass,
+ AnyClass,
+ AnyRefClass,
+ AnyValClass,
+ NullClass,
+ NothingClass,
+ SingletonClass,
+ EqualsPatternClass,
+ Any_==,
+ Any_!=,
+ Any_equals,
+ Any_hashCode,
+ Any_toString,
+ Any_getClass,
+ Any_isInstanceOf,
+ Any_asInstanceOf,
+ Any_##,
+ Object_eq,
+ Object_ne,
+ Object_==,
+ Object_!=,
+ Object_##,
+ Object_synchronized,
+ Object_isInstanceOf,
+ Object_asInstanceOf,
+ String_+,
+ ComparableClass,
+ JavaSerializableClass
+ )
/** Is the symbol that of a parent which is added during parsing? */
lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
@@ -1189,41 +1225,7 @@ trait Definitions extends api.StandardDefinitions {
def init() {
if (isInitialized) return
-
- val forced = List( // force initialization of every symbol that is entered as a side effect
- AnnotationDefaultAttr, // #2264
- RepeatedParamClass,
- JavaRepeatedParamClass,
- ByNameParamClass,
- AnyClass,
- AnyRefClass,
- AnyValClass,
- NullClass,
- NothingClass,
- SingletonClass,
- EqualsPatternClass,
- Any_==,
- Any_!=,
- Any_equals,
- Any_hashCode,
- Any_toString,
- Any_getClass,
- Any_isInstanceOf,
- Any_asInstanceOf,
- Any_##,
- Object_eq,
- Object_ne,
- Object_==,
- Object_!=,
- Object_##,
- Object_synchronized,
- Object_isInstanceOf,
- Object_asInstanceOf,
- String_+,
- ComparableClass,
- JavaSerializableClass
- )
-
+ val forced = magicSymbols // force initialization of every symbol that is entered as a side effect
isInitialized = true
} //init
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 37e5a23819..55fa00dd4d 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -135,7 +135,7 @@ class Flags extends ModifierFlags {
final val CAPTURED = 1 << 16 // variable is accessed from nested function. Set by LambdaLift.
final val LABEL = 1 << 17 // method symbol is a label. Set by TailCall
final val INCONSTRUCTOR = 1 << 17 // class symbol is defined in this/superclass constructor.
- final val SYNTHETIC = 1 << 21 // symbol is compiler-generated
+ final val SYNTHETIC = 1 << 21 // symbol is compiler-generated (compare with HIDDEN)
final val STABLE = 1 << 22 // functions that are assumed to be stable
// (typically, access methods for valdefs)
// or classes that do not contain abstract types.
@@ -165,6 +165,8 @@ class Flags extends ModifierFlags {
// A Java method's type is ``cooked'' by transforming raw types to existentials
final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
+ final val HIDDEN = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+
// ------- shift definitions -------------------------------------------------------
final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
@@ -174,6 +176,11 @@ class Flags extends ModifierFlags {
final val AntiShift = 56L
// Flags which sketchily share the same slot
+ // 16: BYNAMEPARAM/M CAPTURED COVARIANT/M
+ // 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL
+ // 25: DEFAULTPARAM/M TRAIT/M
+ // 35: EXISTENTIAL MIXEDIN
+ // 37: IMPLCLASS PRESUPER/M
val OverloadedFlagsMask = 0L | BYNAMEPARAM | CONTRAVARIANT | DEFAULTPARAM | EXISTENTIAL | IMPLCLASS
// ------- late flags (set by a transformer phase) ---------------------------------
@@ -211,7 +218,7 @@ class Flags extends ModifierFlags {
/** To be a little clearer to people who aren't habitual bit twiddlers.
*/
final val AllFlags = -1L
-
+
/** These flags can be set when class or module symbol is first created.
* They are the only flags to survive a call to resetFlags().
*/
@@ -279,6 +286,16 @@ class Flags extends ModifierFlags {
/** Module flags inherited by their module-class */
final val ModuleToClassFlags = AccessFlags | TopLevelCreationFlags | CASE | SYNTHETIC
+ /** These flags are not pickled */
+ final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING
+
+ // A precaution against future additions to FlagsNotPickled turning out
+ // to be overloaded flags thus not-pickling more than intended.
+ assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled))
+
+ /** These flags are pickled */
+ final val PickledFlags = InitialFlags & ~FlagsNotPickled
+
def getterFlags(fieldFlags: Long): Long = ACCESSOR + (
if ((fieldFlags & MUTABLE) != 0) fieldFlags & ~MUTABLE & ~PRESUPER
else fieldFlags & ~PRESUPER | STABLE
@@ -307,47 +324,45 @@ class Flags extends ModifierFlags {
private final val PKL_MASK = 0x00000FFF
- final val PickledFlags = 0xFFFFFFFFL
-
- private def rawPickledCorrespondence = Array(
- (IMPLICIT, IMPLICIT_PKL),
- (FINAL, FINAL_PKL),
+ /** Pickler correspondence, ordered roughly by frequency of occurrence */
+ private def rawPickledCorrespondence = Array[(Long, Long)](
+ (METHOD, METHOD_PKL),
(PRIVATE, PRIVATE_PKL),
+ (FINAL, FINAL_PKL),
(PROTECTED, PROTECTED_PKL),
- (SEALED, SEALED_PKL),
- (OVERRIDE, OVERRIDE_PKL),
(CASE, CASE_PKL),
- (ABSTRACT, ABSTRACT_PKL),
(DEFERRED, DEFERRED_PKL),
- (METHOD, METHOD_PKL),
(MODULE, MODULE_PKL),
- (INTERFACE, INTERFACE_PKL)
+ (OVERRIDE, OVERRIDE_PKL),
+ (INTERFACE, INTERFACE_PKL),
+ (IMPLICIT, IMPLICIT_PKL),
+ (SEALED, SEALED_PKL),
+ (ABSTRACT, ABSTRACT_PKL)
)
- private val rawFlags: Array[Int] = rawPickledCorrespondence map (_._1)
- private val pickledFlags: Array[Int] = rawPickledCorrespondence map (_._2)
-
- private def r2p(flags: Int): Int = {
- var result = 0
- var i = 0
- while (i < rawFlags.length) {
- if ((flags & rawFlags(i)) != 0)
- result |= pickledFlags(i)
-
- i += 1
- }
- result
- }
- private def p2r(flags: Int): Int = {
- var result = 0
- var i = 0
- while (i < rawFlags.length) {
- if ((flags & pickledFlags(i)) != 0)
- result |= rawFlags(i)
-
- i += 1
+
+ private val mappedRawFlags = rawPickledCorrespondence map (_._1)
+ private val mappedPickledFlags = rawPickledCorrespondence map (_._2)
+
+ private class MapFlags(from: Array[Long], to: Array[Long]) extends (Long => Long) {
+ val fromSet = (0L /: from) (_ | _)
+
+ def apply(flags: Long): Long = {
+ var result = flags & ~fromSet
+ var tobeMapped = flags & fromSet
+ var i = 0
+ while (tobeMapped != 0) {
+ if ((tobeMapped & from(i)) != 0) {
+ result |= to(i)
+ tobeMapped &= ~from(i)
+ }
+ i += 1
+ }
+ result
}
- result
}
+
+ val rawToPickledFlags: Long => Long = new MapFlags(mappedRawFlags, mappedPickledFlags)
+ val pickledToRawFlags: Long => Long = new MapFlags(mappedPickledFlags, mappedRawFlags)
// ------ displaying flags --------------------------------------------------------
@@ -462,18 +477,12 @@ class Flags extends ModifierFlags {
}
}
- def rawFlagsToPickled(flags: Long): Long =
- (flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
-
- def pickledToRawFlags(pflags: Long): Long =
- (pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
-
// List of the raw flags, in pickled order
final val MaxBitPosition = 62
final val pickledListOrder: List[Long] = {
val all = 0 to MaxBitPosition map (1L << _)
- val front = rawFlags map (_.toLong)
+ val front = mappedRawFlags map (_.toLong)
front.toList ++ (all filterNot (front contains _))
}
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index c7c0882209..7ead9d6a1b 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -92,6 +92,7 @@ trait HasFlags {
def isCaseAccessor = hasFlag(CASEACCESSOR)
def isDeferred = hasFlag(DEFERRED)
def isFinal = hasFlag(FINAL)
+ def isHidden = hasFlag(HIDDEN)
def isImplicit = hasFlag(IMPLICIT)
def isInterface = hasFlag(INTERFACE)
def isJavaDefined = hasFlag(JAVA)
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index e3680b14d5..210af661ee 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -41,7 +41,7 @@ trait Mirrors extends api.Mirrors {
if (result != NoSymbol) result
else {
if (settings.debug.value) { log(sym.info); log(sym.info.members) }//debug
- mirrorMissingHook(owner, name) orElse symbolTableMissingHook(owner, name) orElse {
+ thisMirror.missingHook(owner, name) orElse {
MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror)
}
}
@@ -51,6 +51,8 @@ trait Mirrors extends api.Mirrors {
protected def symbolTableMissingHook(owner: Symbol, name: Name): Symbol = self.missingHook(owner, name)
+ private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse symbolTableMissingHook(owner, name)
+
/** If you're looking for a class, pass a type name.
* If a module, a term name.
*/
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 82a8c42f7c..c018ddc88e 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -10,6 +10,7 @@ package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
+import compat.Platform.EOL
trait Printers extends api.Printers { self: SymbolTable =>
@@ -65,6 +66,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
printTypes = settings.printtypes.value
printIds = settings.uniqid.value
printKinds = settings.Yshowsymkinds.value
+ printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there
protected def doPrintPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
@@ -477,22 +479,61 @@ trait Printers extends api.Printers { self: SymbolTable =>
def flush = { /* do nothing */ }
}
- // provides footnotes for types
- private var typeCounter = 0
- private val typeMap = collection.mutable.WeakHashMap[Type, Int]()
-
def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
+ // provides footnotes for types and mirrors
+ import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
+ private val footnoteIndex = new FootnoteIndex
+ private class FootnoteIndex {
+ private val index = Map[Class[_], WeakHashMap[Any, Int]]()
+ private def classIndex[T: ClassTag] = index.getOrElseUpdate(classTag[T].runtimeClass, WeakHashMap[Any, Int]())
+ private val counters = Map[Class[_], Int]()
+ private def nextCounter[T: ClassTag] = {
+ val clazz = classTag[T].runtimeClass
+ counters.getOrElseUpdate(clazz, 0)
+ counters(clazz) = counters(clazz) + 1
+ counters(clazz)
+ }
+
+ def mkFootnotes() = new Footnotes
+ class Footnotes {
+ private val footnotes = Map[Class[_], SortedSet[Int]]()
+ private def classFootnotes[T: ClassTag] = footnotes.getOrElseUpdate(classTag[T].runtimeClass, SortedSet[Int]())
+
+ def put[T: ClassTag](any: T): Int = {
+ val index = classIndex[T].getOrElseUpdate(any, nextCounter[T])
+ classFootnotes[T] += index
+ index
+ }
+
+ def get[T: ClassTag]: List[(Int, Any)] =
+ classFootnotes[T].toList map (fi => (fi, classIndex[T].find{ case (any, ii) => ii == fi }.get._1))
+
+ def print[T: ClassTag](printer: Printers.super.TreePrinter): Unit = {
+ val footnotes = get[T]
+ if (footnotes.nonEmpty) {
+ printer.print(EOL)
+ footnotes.zipWithIndex foreach {
+ case ((fi, any), ii) =>
+ printer.print("[", fi, "] ", any)
+ if (ii < footnotes.length - 1) printer.print(EOL)
+ }
+ }
+ }
+ }
+ }
+
// emits more or less verbatim representation of the provided tree
class RawTreePrinter(out: PrintWriter) extends super.TreePrinter {
private var depth = 0
- private var footnotes = collection.mutable.Map[Int, Type]()
- private var printingFootnotes = false
private var printTypesInFootnotes = true
+ private var printingFootnotes = false
+ private var footnotes = footnoteIndex.mkFootnotes()
def print(args: Any*): Unit = {
+ // don't print type footnotes if the argument is a mere type
if (depth == 0 && args.length == 1 && args(0) != null && args(0).isInstanceOf[Type])
printTypesInFootnotes = false
@@ -544,14 +585,15 @@ trait Printers extends api.Printers { self: SymbolTable =>
else print(sym.name)
if (printIds) print("#", sym.id)
if (printKinds) print("#", sym.abbreviatedKindString)
+ if (printMirrors) print("%M", footnotes.put[MirrorOf[_]](mirrorThatLoaded(sym)))
case NoType =>
print("NoType")
case NoPrefix =>
print("NoPrefix")
- case tpe: Type if printTypesInFootnotes && !printingFootnotes =>
- val index = typeMap.getOrElseUpdate(tpe, { typeCounter += 1; typeCounter })
- footnotes(index) = tpe
- print("[", index, "]")
+ case tpe: Type =>
+ val defer = printTypesInFootnotes && !printingFootnotes
+ if (defer) print("[", footnotes.put(tpe), "]")
+ else printProduct(tpe.asInstanceOf[Product])
case mods: Modifiers =>
print("Modifiers(")
if (mods.flags != NoFlags || mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) print(show(mods.flags))
@@ -569,16 +611,11 @@ trait Printers extends api.Printers { self: SymbolTable =>
out.print(arg)
}
depth -= 1
- if (depth == 0 && footnotes.nonEmpty && !printingFootnotes) {
+ if (depth == 0 && !printingFootnotes) {
printingFootnotes = true
- out.println()
- val typeIndices = footnotes.keys.toList.sorted
- typeIndices.zipWithIndex foreach {
- case (typeIndex, i) =>
- print("[" + typeIndex + "] ")
- print(footnotes(typeIndex))
- if (i < typeIndices.length - 1) out.println()
- }
+ footnotes.print[Type](this)
+ footnotes.print[MirrorOf[_]](this)
+ printingFootnotes = false
}
}
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 4ea9b27da9..60b3a6f436 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -4,9 +4,24 @@ package internal
trait StdAttachments {
self: SymbolTable =>
+ /**
+ * Common code between reflect-internal Symbol and Tree related to Attachments.
+ */
+ trait Attachable {
+ protected var rawatt: base.Attachments { type Pos = Position } = NoPosition
+ def attachments = rawatt
+ def addAttachment(attachment: Any): this.type = { rawatt = rawatt.add(attachment); this }
+ def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
+
+ // cannot be final due to SynchronizedSymbols
+ def pos: Position = rawatt.pos
+ def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos)
+ def setPos(newpos: Position): this.type = { pos = newpos; this }
+ }
+
case object BackquotedIdentifierAttachment
case class CompoundTypeTreeOriginalAttachment(parents: List[Tree], stats: List[Tree])
case class MacroExpansionAttachment(original: Tree)
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index bd02013037..22b0908cab 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -232,6 +232,7 @@ trait StdNames {
final val Annotation: NameType = "Annotation"
final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
+ final val ClassManifest: NameType = "ClassManifest"
final val Enum: NameType = "Enum"
final val Group: NameType = "Group"
final val Tree: NameType = "Tree"
@@ -639,8 +640,8 @@ trait StdNames {
val bytes: NameType = "bytes"
val canEqual_ : NameType = "canEqual"
val checkInitialized: NameType = "checkInitialized"
+ val ClassManifestFactory: NameType = "ClassManifestFactory"
val classOf: NameType = "classOf"
- val classTagToClassManifest: NameType = "classTagToClassManifest"
val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
val conforms: NameType = "conforms"
val copy: NameType = "copy"
@@ -661,6 +662,7 @@ trait StdNames {
val eval: NameType = "eval"
val ex: NameType = "ex"
val experimental: NameType = "experimental"
+ val f: NameType = "f"
val false_ : NameType = "false"
val filter: NameType = "filter"
val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
@@ -696,6 +698,7 @@ trait StdNames {
val macroContext : NameType = "c"
val main: NameType = "main"
val manifest: NameType = "manifest"
+ val ManifestFactory: NameType = "ManifestFactory"
val manifestToTypeTag: NameType = "manifestToTypeTag"
val map: NameType = "map"
val materializeClassTag: NameType = "materializeClassTag"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 18adab7c68..5ae8f22c64 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -129,11 +129,15 @@ abstract class SymbolTable extends makro.Universe
// sigh, this has to be public or atPhase doesn't inline.
var phStack: List[Phase] = Nil
- private var ph: Phase = NoPhase
- private var per = NoPeriod
+ private[this] var ph: Phase = NoPhase
+ private[this] var per = NoPeriod
final def atPhaseStack: List[Phase] = phStack
- final def phase: Phase = ph
+ final def phase: Phase = {
+ if (Statistics.hotEnabled)
+ Statistics.incCounter(SymbolTableStats.phaseCounter)
+ ph
+ }
def atPhaseStackMessage = atPhaseStack match {
case Nil => ""
@@ -330,3 +334,7 @@ abstract class SymbolTable extends makro.Universe
*/
def isCompilerUniverse = false
}
+
+object SymbolTableStats {
+ val phaseCounter = Statistics.newCounter("#phase calls")
+}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 4b0ceeb86b..04fa01c6f3 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -10,6 +10,7 @@ import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import util.Statistics
import Flags._
+import base.Attachments
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
@@ -82,71 +83,280 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def getAnnotations: List[AnnotationInfo] = { initialize; annotations }
def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
- private def lastElemType(ts: Seq[Type]): Type = ts.last.normalize.typeArgs.head
+ def resolveOverloaded(
+ pre: Type,
+ targs: Seq[Type],
+ posVargTypes: Seq[Type],
+ nameVargTypes: Seq[(TermName, Type)],
+ expected: Type
+ ): Symbol = {
+
+ // Begin Correlation Helpers
+
+ def isCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
+ case TypeRef(_, ByNameParamClass, List(res)) if !definitions.isByNameParamType(tp) =>
+ isCompatible(tp, res)
+ case _ =>
+ false
+ }
+ (tp <:< pt) || isCompatibleByName(tp, pt)
+ }
- private def formalTypes(formals: List[Type], nargs: Int): List[Type] = {
- val formals1 = formals mapConserve {
- case TypeRef(_, ByNameParamClass, List(arg)) => arg
- case formal => formal
+ def signatureAsSpecific(method1: MethodSymbol, method2: MethodSymbol): Boolean = {
+ (substituteTypeParams(method1), substituteTypeParams(method2)) match {
+ case (NullaryMethodType(r1), NullaryMethodType(r2)) =>
+ r1 <:< r2
+ case (NullaryMethodType(_), MethodType(_, _)) =>
+ true
+ case (MethodType(_, _), NullaryMethodType(_)) =>
+ false
+ case (MethodType(p1, _), MethodType(p2, _)) =>
+ val len = p1.length max p2.length
+ val sub = extend(p1 map (_.typeSignature), len)
+ val sup = extend(p2 map (_.typeSignature), len)
+ (sub corresponds sup)(isCompatible)
+ }
}
- if (isVarArgTypes(formals1)) {
- val ft = lastElemType(formals)
- formals1.init ::: List.fill(nargs - (formals1.length - 1))(ft)
- } else formals1
- }
-
- def resolveOverloaded(pre: Type, targs: Seq[Type], actuals: Seq[Type]): Symbol = {
- def firstParams(tpe: Type): (List[Symbol], List[Type]) = tpe match {
- case PolyType(tparams, restpe) =>
- val (Nil, formals) = firstParams(restpe)
- (tparams, formals)
- case MethodType(params, _) =>
- (Nil, params map (_.tpe))
- case _ =>
- (Nil, Nil)
+
+ def scopeMoreSpecific(method1: MethodSymbol, method2: MethodSymbol): Boolean = {
+ val o1 = method1.owner.asClassSymbol
+ val o2 = method2.owner.asClassSymbol
+ val c1 = if (o1.hasFlag(Flag.MODULE)) o1.companionSymbol else o1
+ val c2 = if (o2.hasFlag(Flag.MODULE)) o2.companionSymbol else o2
+ c1.typeSignature <:< c2.typeSignature
}
- def isApplicable(alt: Symbol, targs: List[Type], actuals: Seq[Type]) = {
- def isApplicableType(tparams: List[Symbol], tpe: Type): Boolean = {
- val (tparams, formals) = firstParams(pre memberType alt)
- val formals1 = formalTypes(formals, actuals.length)
- val actuals1 =
- if (isVarArgTypes(actuals)) {
- if (!isVarArgTypes(formals)) return false
- actuals.init :+ lastElemType(actuals)
- } else actuals
- if (formals1.length != actuals1.length) return false
-
- if (tparams.isEmpty) return (actuals1 corresponds formals1)(_ <:< _)
-
- if (targs.length == tparams.length)
- isApplicableType(List(), tpe.instantiateTypeParams(tparams, targs))
- else if (targs.nonEmpty)
- false
- else {
- val tvars = tparams map (TypeVar(_))
- (actuals1 corresponds formals1) { (actual, formal) =>
- val tp1 = actual.deconst.instantiateTypeParams(tparams, tvars)
- val pt1 = actual.instantiateTypeParams(tparams, tvars)
- tp1 <:< pt1
- } &&
- solve(tvars, tparams, List.fill(tparams.length)(COVARIANT), upper = false)
+
+ def moreSpecific(method1: MethodSymbol, method2: MethodSymbol): Boolean = {
+ def points(m1: MethodSymbol, m2: MethodSymbol) = {
+ val p1 = if (signatureAsSpecific(m1, m2)) 1 else 0
+ val p2 = if (scopeMoreSpecific(m1, m2)) 1 else 0
+ p1 + p2
+ }
+ points(method1, method2) > points(method2, method1)
+ }
+
+ def combineInto (
+ variadic: Boolean
+ )(
+ positional: Seq[Type],
+ named: Seq[(TermName, Type)]
+ )(
+ target: Seq[TermName],
+ defaults: Map[Int, Type]
+ ): Option[Seq[Type]] = {
+
+ val offset = positional.length
+ val unfilled = target.zipWithIndex drop offset
+ val canAcceptAllNameVargs = named forall { case (argName, _) =>
+ unfilled exists (_._1 == argName)
+ }
+
+ val paramNamesUnique = {
+ named.length == named.map(_._1).distinct.length
+ }
+
+ if (canAcceptAllNameVargs && paramNamesUnique) {
+
+ val rest = unfilled map { case (paramName, paramIndex) =>
+ val passedIn = named.collect {
+ case (argName, argType) if argName == paramName => argType
+ }.headOption
+ if (passedIn isDefined) passedIn
+ else defaults.get(paramIndex).map(_.asInstanceOf[Type])
}
+
+ val rest1 = {
+ if (variadic && !rest.isEmpty && !rest.last.isDefined) rest.init
+ else rest
+ }
+
+
+ if (rest1 forall (_.isDefined)) {
+ val joined = positional ++ rest1.map(_.get)
+ val repeatedCollapsed = {
+ if (variadic) {
+ val (normal, repeated) = joined.splitAt(target.length - 1)
+ if (repeated.forall(_ =:= repeated.head)) Some(normal ++ repeated.headOption)
+ else None
+ }
+ else Some(joined)
+ }
+ if (repeatedCollapsed.exists(_.length == target.length))
+ repeatedCollapsed
+ else if (variadic && repeatedCollapsed.exists(_.length == target.length - 1))
+ repeatedCollapsed
+ else None
+ } else None
+
+ } else None
+ }
+
+ // Begin Reflection Helpers
+
+ // Replaces a repeated parameter type at the end of the parameter list
+ // with a number of non-repeated parameter types in order to pad the
+ // list to be nargs in length
+ def extend(types: Seq[Type], nargs: Int): Seq[Type] = {
+ if (isVarArgTypes(types)) {
+ val repeatedType = types.last.normalize.typeArgs.head
+ types.init ++ Seq.fill(nargs - (types.length - 1))(repeatedType)
+ } else types
+ }
+
+ // Replaces by-name parameters with their result type and
+ // TypeRefs with the thing they reference
+ def unwrap(paramType: Type): Type = paramType match {
+ case TypeRef(_, IntClass, _) => typeOf[Int]
+ case TypeRef(_, LongClass, _) => typeOf[Long]
+ case TypeRef(_, ShortClass, _) => typeOf[Short]
+ case TypeRef(_, ByteClass, _) => typeOf[Byte]
+ case TypeRef(_, CharClass, _) => typeOf[Char]
+ case TypeRef(_, FloatClass, _) => typeOf[Float]
+ case TypeRef(_, DoubleClass, _) => typeOf[Double]
+ case TypeRef(_, BooleanClass, _) => typeOf[Boolean]
+ case TypeRef(_, UnitClass, _) => typeOf[Unit]
+ case TypeRef(_, NullClass, _) => typeOf[Null]
+ case TypeRef(_, AnyClass, _) => typeOf[Any]
+ case TypeRef(_, NothingClass, _) => typeOf[Nothing]
+ case TypeRef(_, AnyRefClass, _) => typeOf[AnyRef]
+ case TypeRef(_, ByNameParamClass, List(resultType)) => unwrap(resultType)
+ case t: Type => t
+ }
+
+ // Gives the names of the parameters to a method
+ def paramNames(signature: Type): Seq[TermName] = signature match {
+ case PolyType(_, resultType) => paramNames(resultType)
+ case MethodType(params, _) => params.map(_.name.asInstanceOf[TermName])
+ case NullaryMethodType(_) => Seq.empty
+ }
+
+ def valParams(signature: Type): Seq[TermSymbol] = signature match {
+ case PolyType(_, resultType) => valParams(resultType)
+ case MethodType(params, _) => params.map(_.asTermSymbol)
+ case NullaryMethodType(_) => Seq.empty
+ }
+
+ // Returns a map from parameter index to default argument type
+ def defaultTypes(method: MethodSymbol): Map[Int, Type] = {
+ val typeSig = substituteTypeParams(method)
+ val owner = method.owner
+ valParams(typeSig).zipWithIndex.filter(_._1.hasFlag(Flag.DEFAULTPARAM)).map { case(_, index) =>
+ val name = nme.defaultGetterName(method.name.decodedName, index + 1)
+ val default = owner.asType member name
+ index -> default.typeSignature.asInstanceOf[NullaryMethodType].resultType
+ }.toMap
+ }
+
+ // True if any of method's parameters have default values. False otherwise.
+ def usesDefault(method: MethodSymbol): Boolean = valParams(method.typeSignature) drop(posVargTypes).length exists { param =>
+ (param hasFlag Flag.DEFAULTPARAM) && nameVargTypes.forall { case (argName, _) =>
+ param.name != argName
+ }
+ }
+
+ // The number of type parameters that the method takes
+ def numTypeParams(x: MethodSymbol): Int = {
+ x.typeSignature.typeParams.length
+ }
+
+ def substituteTypeParams(m: MethodSymbol): Type = {
+ (pre memberType m) match {
+ case m: MethodType => m
+ case n: NullaryMethodType => n
+ case PolyType(tparams, rest) => rest.substituteTypes(tparams, targs.toList)
}
- isApplicableType(List(), pre.memberType(alt))
}
- def isAsGood(alt1: Symbol, alt2: Symbol): Boolean = {
- alt1 == alt2 ||
- alt2 == NoSymbol || {
- val (tparams, formals) = firstParams(pre memberType alt1)
- isApplicable(alt2, tparams map (_.tpe), formals)
+
+ // Begin Selection Helpers
+
+ def select(
+ alternatives: Seq[MethodSymbol],
+ filters: Seq[Seq[MethodSymbol] => Seq[MethodSymbol]]
+ ): Seq[MethodSymbol] =
+ filters.foldLeft(alternatives)((a, f) => {
+ if (a.size > 1) f(a) else a
+ })
+
+ // Drop arguments that take the wrong number of type
+ // arguments.
+ val posTargLength: Seq[MethodSymbol] => Seq[MethodSymbol] = _.filter { alt =>
+ numTypeParams(alt) == targs.length
+ }
+
+ // Drop methods that are not applicable to the arguments
+ val applicable: Seq[MethodSymbol] => Seq[MethodSymbol] = _.filter { alt =>
+ // Note: combine returns None if a is not applicable and
+ // None.exists(_ => true) == false
+ val paramTypes =
+ valParams(substituteTypeParams(alt)).map(p => unwrap(p.typeSignature))
+ val variadic = isVarArgTypes(paramTypes)
+ val maybeArgTypes =
+ combineInto(variadic)(posVargTypes, nameVargTypes)(paramNames(alt.typeSignature), defaultTypes(alt))
+ maybeArgTypes exists { argTypes =>
+ if (isVarArgTypes(argTypes) && !isVarArgTypes(paramTypes)) false
+ else {
+ val a = argTypes
+ val p = extend(paramTypes, argTypes.length)
+ (a corresponds p)(_ <:< _)
}
+ }
}
- assert(isOverloaded)
- val applicables = alternatives filter (isApplicable(_, targs.toList, actuals))
- def winner(alts: List[Symbol]) =
- ((NoSymbol: Symbol) /: alts)((best, alt) => if (isAsGood(alt, best)) alt else best)
- val best = winner(applicables)
- if (best == winner(applicables.reverse)) best else NoSymbol
+
+ // Always prefer methods that don't need to use default
+ // arguments over those that do.
+ // e.g. when resolving foo(1), prefer def foo(x: Int) over
+ // def foo(x: Int, y: Int = 4)
+ val noDefaults: Seq[MethodSymbol] => Seq[MethodSymbol] =
+ _ filterNot usesDefault
+
+ // Try to select the most specific method. If that's not possible,
+ // return all of the candidates (this will likely cause an error
+ // higher up in the call stack)
+ val mostSpecific: Seq[MethodSymbol] => Seq[MethodSymbol] = { alts =>
+ val sorted = alts.sortWith(moreSpecific)
+ val mostSpecific = sorted.head
+ val agreeTest: MethodSymbol => Boolean =
+ moreSpecific(mostSpecific, _)
+ val disagreeTest: MethodSymbol => Boolean =
+ moreSpecific(_, mostSpecific)
+ if (!sorted.tail.forall(agreeTest)) {
+ mostSpecific +: sorted.tail.filterNot(agreeTest)
+ } else if (sorted.tail.exists(disagreeTest)) {
+ mostSpecific +: sorted.tail.filter(disagreeTest)
+ } else {
+ Seq(mostSpecific)
+ }
+ }
+
+ def finalResult(t: Type): Type = t match {
+ case PolyType(_, rest) => finalResult(rest)
+ case MethodType(_, result) => finalResult(result)
+ case NullaryMethodType(result) => finalResult(result)
+ case t: Type => t
+ }
+
+ // If a result type is given, drop alternatives that don't meet it
+ val resultType: Seq[MethodSymbol] => Seq[MethodSymbol] =
+ if (expected == NoType) identity
+ else _.filter { alt =>
+ finalResult(substituteTypeParams(alt)) <:< expected
+ }
+
+ def defaultFilteringOps =
+ Seq(posTargLength, resultType, applicable, noDefaults, mostSpecific)
+
+ // Begin Method Proper
+
+
+ val alts = alternatives.map(_.asMethodSymbol)
+
+ val selection = select(alts, defaultFilteringOps)
+
+ val knownApplicable = applicable(selection)
+
+ if (knownApplicable.size == 1) knownApplicable.head
+ else NoSymbol
}
}
@@ -154,7 +364,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
extends SymbolContextApiImpl
with HasFlags
- with Annotatable[Symbol] {
+ with Annotatable[Symbol]
+ with Attachable {
type AccessBoundaryType = Symbol
type AnnotationType = AnnotationInfo
@@ -176,7 +387,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def rawowner = _rawowner
def rawflags = _rawflags
- private var rawpos = initPos
+ rawatt = initPos
val id = nextId() // identity displayed when -uniqid
//assert(id != 3390, initName)
@@ -189,8 +400,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def validTo = _validTo
def validTo_=(x: Period) { _validTo = x}
- def pos = rawpos
- def setPos(pos: Position): this.type = { this.rawpos = pos; this }
def setName(name: Name): this.type = { this.name = asNameType(name) ; this }
// Update the surrounding scopes
@@ -539,7 +748,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isConcreteClass = false
def isImplClass = false // the implementation class of a trait
def isJavaInterface = false
- def isModuleClass = false
def isNumericValueClass = false
def isPrimitiveValueClass = false
def isRefinementClass = false
@@ -645,6 +853,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def flags: Long = {
+ if (Statistics.hotEnabled) Statistics.incCounter(flagsCount)
val fs = _rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
@@ -748,13 +957,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this symbol an accessor method for outer? */
final def isOuterAccessor = {
- hasFlag(STABLE | SYNTHETIC) &&
+ hasFlag(STABLE | HIDDEN) &&
originalName == nme.OUTER
}
/** Is this symbol an accessor method for outer? */
final def isOuterField = {
- hasFlag(SYNTHETIC) &&
+ hasFlag(HIDDEN) &&
originalName == nme.OUTER_LOCAL
}
@@ -899,15 +1108,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (owner.isTerm) return false
if (isLocalDummy) return false
+ if (isAliasType) return true
if (isType && isNonClassType) return false
if (isRefinementClass) return false
return true
}
- // [Eugene] is it a good idea to add ``dealias'' to Symbol?
- /** Expands type aliases */
- def dealias: Symbol = this
-
/** The variance of this symbol as an integer */
final def variance: Int =
if (isCovariant) 1
@@ -936,7 +1142,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
- def owner: Symbol = rawowner
+ def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
+ rawowner
+ }
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
@@ -1433,24 +1643,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def classBound: Type = {
val tp = refinedType(info.parents, owner)
- val thistp = tp.typeSymbol.thisType
- val oldsymbuf = new ListBuffer[Symbol]
- val newsymbuf = new ListBuffer[Symbol]
- for (sym <- info.decls) {
- // todo: what about public references to private symbols?
- if (sym.isPublic && !sym.isConstructor) {
- oldsymbuf += sym
- newsymbuf += (
- if (sym.isClass)
- tp.typeSymbol.newAbstractType(sym.name.toTypeName, sym.pos).setInfo(sym.existentialBound)
- else
- sym.cloneSymbol(tp.typeSymbol))
+ // SI-4589 refinedType only creates a new refinement class symbol before erasure; afterwards
+ // the first parent class is returned, to which we must not add members.
+ if (!phase.erasedTypes) {
+ val thistp = tp.typeSymbol.thisType
+ val oldsymbuf = new ListBuffer[Symbol]
+ val newsymbuf = new ListBuffer[Symbol]
+ for (sym <- info.decls) {
+ // todo: what about public references to private symbols?
+ if (sym.isPublic && !sym.isConstructor) {
+ oldsymbuf += sym
+ newsymbuf += (
+ if (sym.isClass)
+ tp.typeSymbol.newAbstractType(sym.name.toTypeName, sym.pos).setInfo(sym.existentialBound)
+ else
+ sym.cloneSymbol(tp.typeSymbol))
+ }
+ }
+ val oldsyms = oldsymbuf.toList
+ val newsyms = newsymbuf.toList
+ for (sym <- newsyms) {
+ addMember(thistp, tp, sym modifyInfo (_ substThisAndSym(this, thistp, oldsyms, newsyms)))
}
- }
- val oldsyms = oldsymbuf.toList
- val newsyms = newsymbuf.toList
- for (sym <- newsyms) {
- addMember(thistp, tp, sym modifyInfo (_ substThisAndSym(this, thistp, oldsyms, newsyms)))
}
tp
}
@@ -1607,6 +1821,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setInfo (this.info cloneInfo clone)
setAnnotations this.annotations
)
+ this.attachments.all.foreach(clone.addAttachment)
if (clone.thisSym != clone)
clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
@@ -2024,10 +2239,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
base.info.decl(sname) filter (_.hasAccessorFlag)
}
- /** Return the accessor method of the first parameter of this class.
+ /** If this is a derived value class, return its unbox method
* or NoSymbol if it does not exist.
*/
- def firstParamAccessor: Symbol = NoSymbol
+ def derivedValueClassUnbox: Symbol = NoSymbol
/** The case module corresponding to this case class
* @pre case class is a member of some other class or package
@@ -2324,7 +2539,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawname: TermName = initName
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
def name_=(name: Name) {
if (name != rawname) {
log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
@@ -2493,11 +2711,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def companionClass =
flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
- override def owner = (
+ override def owner = {
+ Statistics.incCounter(ownerCount)
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
- )
- override def name: TermName = (
+ }
+ override def name: TermName = {
+ Statistics.incCounter(nameCount)
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
@@ -2505,7 +2725,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
}
implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
@@ -2550,7 +2770,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
extends TypeSymbol(initOwner, initPos, initName) {
type TypeOfClonedSymbol = TypeSymbol
final override def isAliasType = true
- final override def dealias = info.typeSymbol.dealias
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
owner.newNonClassSymbol(name, pos, newFlags)
}
@@ -2576,7 +2795,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// cloneSymbolImpl still abstract in TypeSymbol.
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
final def asNameType(n: Name) = n.toTypeName
override def isNonClassType = true
@@ -2888,10 +3110,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thisTypeCache
}
- override def owner: Symbol =
+ override def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
if (needsFlatClasses) rawowner.owner else rawowner
+ }
- override def name: TypeName = (
+ override def name: TypeName = {
+ Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
@@ -2899,7 +3124,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -2921,8 +3146,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
clone
}
- override def firstParamAccessor =
- info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse NoSymbol
+ override def derivedValueClassUnbox =
+ (info.decl(nme.unbox)) orElse
+ (info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse
+ NoSymbol)
private[this] var childSet: Set[Symbol] = Set()
override def children = childSet
@@ -3194,4 +3421,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
object SymbolsStats {
val typeSymbolCount = Statistics.newCounter("#type symbols")
val classSymbolCount = Statistics.newCounter("#class symbols")
+ val flagsCount = Statistics.newCounter("#flags ops")
+ val ownerCount = Statistics.newCounter("#owner ops")
+ val nameCount = Statistics.newCounter("#name ops")
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index dd13dd4c4c..e92d644f4a 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -15,20 +15,13 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[scala] var nodeCount = 0
- abstract class Tree extends TreeContextApiImpl with Product {
+ abstract class Tree extends TreeContextApiImpl with Attachable with Product {
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
Statistics.incCounter(TreesStats.nodeByType, getClass)
- @inline final def pos: Position = rawatt.pos
- def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos)
- def setPos(newpos: Position): this.type = { pos = newpos; this }
-
- private var rawatt: Attachments { type Pos = Position } = NoPosition
- def attachments = rawatt
- def addAttachment(attachment: Any): this.type = { rawatt = rawatt.add(attachment); this }
- def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
+ @inline final override def pos: Position = rawatt.pos
private[this] var rawtpe: Type = _
@inline final def tpe = rawtpe
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index d4b895bcb4..f3dd1f03ad 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -66,7 +66,7 @@ import util.Statistics
// inst is the instantiation and constr is a list of bounds.
case DeBruijnIndex(level, index)
// for dependent method types: a type referring to a method parameter.
- case ErasedValueType(tp)
+ case ErasedValueType(clazz, underlying)
// only used during erasure of derived value classes.
*/
@@ -568,6 +568,24 @@ trait Types extends api.Types { self: SymbolTable =>
/** Expands type aliases. */
def dealias = this
+ def etaExpand: Type = this
+
+ /** Performs a single step of beta-reduction on types.
+ * Given:
+ *
+ * type C[T] = B[T]
+ * type B[T] = A
+ * class A
+ *
+ * The following will happen after `betaReduce` is invoked:
+ * TypeRef(pre, <C>, List(Int)) is replaced by
+ * TypeRef(pre, <B>, List(Int))
+ *
+ * Unlike `dealias`, which recursively applies beta reduction, until it's stuck,
+ * `betaReduce` performs exactly one step and then returns.
+ */
+ def betaReduce: Type = this
+
/** For a classtype or refined type, its defined or declared members;
* inherited by subtypes and typerefs.
* The empty scope for all other types.
@@ -675,20 +693,21 @@ trait Types extends api.Types { self: SymbolTable =>
* = Int
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type = {
- if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this
- else {
-// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
- Statistics.incCounter(asSeenFromCount)
- val start = Statistics.startTimer(asSeenFromNanos)
- val m = new AsSeenFromMap(pre.normalize, clazz)
- val tp = m apply this
- val tp1 = existentialAbstraction(m.capturedParams, tp)
- val result: Type =
+ TypesStats.timedTypeOp(asSeenFromNanos) {
+ val trivial = (
+ this.isTrivial
+ || phase.erasedTypes && pre.typeSymbol != ArrayClass
+ || pre.normalize.isTrivial && !isPossiblePrefix(clazz)
+ )
+ if (trivial) this
+ else {
+ val m = new AsSeenFromMap(pre.normalize, clazz)
+ val tp = m(this)
+ val tp1 = existentialAbstraction(m.capturedParams, tp)
+
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
-
- Statistics.stopTimer(asSeenFromNanos, start)
- result
+ }
}
}
@@ -826,12 +845,12 @@ trait Types extends api.Types { self: SymbolTable =>
def stat_<:<(that: Type): Boolean = {
Statistics.incCounter(subtypeCount)
- val start = Statistics.startTimer(subtypeNanos)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- Statistics.stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -839,12 +858,12 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def weak_<:<(that: Type): Boolean = {
Statistics.incCounter(subtypeCount)
- val start = Statistics.startTimer(subtypeNanos)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- Statistics.stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -1018,7 +1037,7 @@ trait Types extends api.Types { self: SymbolTable =>
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
Statistics.incCounter(findMemberCount)
- val start = Statistics.startTimer(findMemberNanos)
+ val start = Statistics.pushTimer(typeOpsStack, findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1045,7 +1064,7 @@ trait Types extends api.Types { self: SymbolTable =>
!sym.isPrivateLocal ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
- Statistics.stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
@@ -1091,7 +1110,7 @@ trait Types extends api.Types { self: SymbolTable =>
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
- Statistics.stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
if (member == NoSymbol) Statistics.incCounter(noMemberCount)
@@ -1271,7 +1290,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** A class for this-types of the form <sym>.this.type
*/
abstract case class ThisType(sym: Symbol) extends SingletonType with ThisTypeApi {
- assert(sym.isClass)
+ assert(sym.isClass, sym)
//assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
override def isTrivial: Boolean = sym.isPackageClass
override def isNotNull = true
@@ -1534,11 +1553,17 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
Statistics.incCounter(compoundBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
- tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
- else
- compoundBaseTypeSeq(tpe)
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache =
+ if (tpe.typeSymbol.isRefinementClass)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(tpe)
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
// [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
// when compiling with
// scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
@@ -2104,7 +2129,7 @@ trait Types extends api.Types { self: SymbolTable =>
//
// this crashes pos/depmet_implicit_tpbetareduce.scala
// appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
- def betaReduce = transform(sym.info.resultType)
+ override def betaReduce = transform(sym.info.resultType)
// #3731: return sym1 for which holds: pre bound sym.name to sym and
// pre1 now binds sym.name to sym1, conceptually exactly the same
@@ -2183,6 +2208,8 @@ trait Types extends api.Types { self: SymbolTable =>
* @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty
*/
abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type with TypeRefApi {
+ override val isTrivial: Boolean = !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
+
private[reflect] var parentsCache: List[Type] = _
private[reflect] var parentsPeriod = NoPeriod
private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
@@ -2215,7 +2242,7 @@ trait Types extends api.Types { self: SymbolTable =>
|| pre.isGround && args.forall(_.isGround)
)
- def etaExpand: Type = {
+ override def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
if (tpars.isEmpty) this
@@ -2245,9 +2272,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeSymbol = sym
override def typeSymbolDirect = sym
- override lazy val isTrivial: Boolean =
- !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
-
override def isNotNull =
sym.isModuleClass || sym == NothingClass || (sym isNonBottomSubClass NotNullClass) || super.isNotNull
@@ -2390,8 +2414,13 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
Statistics.incCounter(typerefBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
}
}
if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
@@ -2406,11 +2435,15 @@ trait Types extends api.Types { self: SymbolTable =>
*/
case class MethodType(override val params: List[Symbol],
override val resultType: Type) extends Type with MethodTypeApi {
- override def isTrivial: Boolean = isTrivial0 && (resultType eq resultType.withoutAnnotations)
- private lazy val isTrivial0 =
- resultType.isTrivial && params.forall{p => p.tpe.isTrivial && (
- !(params.exists(_.tpe.contains(p)) || resultType.contains(p)))
- }
+
+ override lazy val isTrivial: Boolean =
+ isTrivialResult && (params forall isTrivialParam)
+
+ private def isTrivialResult =
+ resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
+
+ private def isTrivialParam(p: Symbol) =
+ p.tpe.isTrivial && !(params.exists(_.tpe contains p) || (resultType contains p))
def isImplicit = params.nonEmpty && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
@@ -3182,8 +3215,7 @@ trait Types extends api.Types { self: SymbolTable =>
override protected def rewrap(tp: Type) = copy(underlying = tp)
- override def isTrivial: Boolean = isTrivial0
- private lazy val isTrivial0 = underlying.isTrivial && annotations.forall(_.isTrivial)
+ override def isTrivial: Boolean = underlying.isTrivial && annotations.forall(_.isTrivial)
override def safeToString = annotations.mkString(underlying + " @", " @", "")
@@ -3276,16 +3308,21 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- abstract case class ErasedValueType(sym: Symbol) extends Type {
- override def safeToString = sym.name+"$unboxed"
+ /** A temporary type representing the reasure of a user-defined value type.
+ * Created during phase reasure, elimintaed again in posterasure.
+ * @param sym The value class symbol
+ * @param underlying The underlying type before erasure
+ */
+ abstract case class ErasedValueType(original: TypeRef) extends Type {
+ override def safeToString = "ErasedValueType("+original+")"
}
- final class UniqueErasedValueType(sym: Symbol) extends ErasedValueType(sym) with UniqueType
+ final class UniqueErasedValueType(original: TypeRef) extends ErasedValueType(original) with UniqueType
object ErasedValueType {
- def apply(sym: Symbol): Type = {
- assert(sym ne NoSymbol, "ErasedValueType cannot be NoSymbol")
- unique(new UniqueErasedValueType(sym))
+ def apply(original: TypeRef): Type = {
+ assert(original.sym ne NoSymbol, "ErasedValueType over NoSymbol")
+ unique(new UniqueErasedValueType(original))
}
}
@@ -4221,67 +4258,58 @@ trait Types extends api.Types { self: SymbolTable =>
def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
+ /** Might the given symbol be important when calculating the prefix
+ * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
+ * the result will be `tp` unchanged if `pre` is trivial and `clazz`
+ * is a symbol such that isPossiblePrefix(clazz) == false.
+ */
+ def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+
/** A map to compute the asSeenFrom method */
class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
- var capturedPre = emptySymMap
+ @inline private def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object annotationArgRewriter extends TypeMapTransformer {
+ private def canRewriteThis(sym: Symbol) = (
+ (sym isNonBottomSubClass clazz)
+ && (pre.widen.typeSymbol isNonBottomSubClass sym)
+ && (pre.isStable || giveup())
+ )
+ // what symbol should really be used?
+ private def newTermSym() = {
+ val p = pre.typeSymbol
+ p.owner.newValue(p.name.toTermName, p.pos) setInfo pre
+ }
/** Rewrite `This` trees in annotation argument trees */
- def rewriteThis(tree: Tree): Tree =
- tree match {
- case This(_)
- if (tree.symbol isNonBottomSubClass clazz) &&
- (pre.widen.typeSymbol isNonBottomSubClass tree.symbol) =>
- if (pre.isStable) { // XXX why is this in this method? pull it out and guard the call `annotationArgRewriter.transform(tree)`?
- val termSym = (
- pre.typeSymbol.owner.newValue(pre.typeSymbol.name.toTermName, pre.typeSymbol.pos) // what symbol should really be used?
- setInfo pre
- )
- gen.mkAttributedQualifier(pre, termSym)
- } else
- giveup()
-
- case tree => tree
- }
-
- override def transform(tree: Tree): Tree = {
- val tree1 = rewriteThis(super.transform(tree))
- tree1
+ override def transform(tree: Tree): Tree = super.transform(tree) match {
+ case This(_) if canRewriteThis(tree.symbol) => gen.mkAttributedQualifier(pre, newTermSym())
+ case tree => tree
}
}
-
annotationArgRewriter.transform(tree)
}
- def stabilize(pre: Type, clazz: Symbol): Type =
- capturedPre.getOrElse(clazz, {
- val qvar = clazz freshExistential ".type" setInfo singletonBounds(pre)
- capturedPre += (clazz -> qvar)
- capturedParams = qvar :: capturedParams
- qvar
- }).tpe
-
- /** Return `pre.baseType(clazz)`, or if that's `NoType` and `clazz` is a refinement, `pre` itself.
- * See bug397.scala for an example where the second alternative is needed.
- * The problem is that when forming the base type sequence of an abstract type,
- * any refinements in the base type list might be regenerated, and thus acquire
- * new class symbols. However, since refinements always have non-interesting prefixes
- * it looks OK to me to just take the prefix directly. */
- def base(pre: Type, clazz: Symbol) = {
- val b = pre.baseType(clazz)
- if (b == NoType && clazz.isRefinementClass) pre
- else b
+ def stabilize(pre: Type, clazz: Symbol): Type = {
+ capturedParams find (_.owner == clazz) match {
+ case Some(qvar) => qvar.tpe
+ case _ =>
+ val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+ capturedParams ::= qvar
+ qvar.tpe
+ }
}
def apply(tp: Type): Type =
- if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) tp
+ if (skipPrefixOf(pre, clazz)) tp
else tp match {
case ThisType(sym) =>
def toPrefix(pre: Type, clazz: Symbol): Type =
- if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) tp
+ if (skipPrefixOf(pre, clazz)) tp
else if ((sym isNonBottomSubClass clazz) &&
(pre.widen.typeSymbol isNonBottomSubClass sym)) {
val pre1 = pre match {
@@ -4296,7 +4324,7 @@ trait Types extends api.Types { self: SymbolTable =>
pre1
}
} else {
- toPrefix(base(pre, clazz).prefix, clazz.owner)
+ toPrefix(pre.baseType(clazz).prefix, clazz.owner)
}
toPrefix(pre, clazz)
case SingleType(pre, sym) =>
@@ -4317,7 +4345,7 @@ trait Types extends api.Types { self: SymbolTable =>
// (skolems also aren't affected: they are ruled out by the isTypeParameter check)
case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) =>
def toInstance(pre: Type, clazz: Symbol): Type =
- if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) mapOver(tp)
+ if (skipPrefixOf(pre, clazz)) mapOver(tp)
//@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
else {
def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
@@ -4376,7 +4404,7 @@ trait Types extends api.Types { self: SymbolTable =>
case t =>
throwError
}
- } else toInstance(base(pre, clazz).prefix, clazz.owner)
+ } else toInstance(pre.baseType(clazz).prefix, clazz.owner)
}
toInstance(pre, clazz)
case _ =>
@@ -4587,7 +4615,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (existentials(pid) eq null) {
val param = params(pid)
existentials(pid) = (
- param.owner.newExistential(newTypeName(param.name + ".type"), param.pos, param.flags)
+ param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
setInfo singletonBounds(actuals(pid))
)
}
@@ -5096,7 +5124,7 @@ trait Types extends api.Types { self: SymbolTable =>
false
private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
/** Do `tp1` and `tp2` denote equivalent types? */
@@ -5583,7 +5611,7 @@ trait Types extends api.Types { self: SymbolTable =>
val sym2 = tr2.sym
val pre1 = tr1.pre
val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || isSubType(pre1, pre2, depth)
+ (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
(isUnifiable(pre1, pre2) ||
isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
@@ -6306,13 +6334,13 @@ trait Types extends api.Types { self: SymbolTable =>
case List(t) => t
case _ =>
Statistics.incCounter(lubCount)
- val start = Statistics.startTimer(lubNanos)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6454,13 +6482,13 @@ trait Types extends api.Types { self: SymbolTable =>
case List(t) => t
case ts0 =>
Statistics.incCounter(lubCount)
- val start = Statistics.startTimer(lubNanos)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6884,11 +6912,19 @@ object TypesStats {
val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
- val lubNanos = Statistics.newSubTimer ("time spent in lubs", typerNanos)
- val subtypeNanos = Statistics.newSubTimer ("time spent in <:<", typerNanos)
- val findMemberNanos = Statistics.newSubTimer ("time spent in findmember", typerNanos)
- val asSeenFromNanos = Statistics.newSubTimer ("time spent in asSeenFrom", typerNanos)
+ val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos)
+ val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos)
+ val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos)
val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val typeOpsStack = Statistics.newTimerStack()
+
+ @inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = {
+ val start = Statistics.pushTimer(typeOpsStack, c)
+ try op
+ finally Statistics.popTimer(typeOpsStack, start)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 757163a074..4411b79b97 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -373,12 +373,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
NullaryMethodType(restpe)
case EXISTENTIALtpe =>
val restpe = readTypeRef()
- // @PP: Where is the flag setting supposed to happen? I infer
- // from the lack of flag setting in the rest of the unpickler
- // that it isn't right here. See #4757 for the immediate
- // motivation to fix it.
- val tparams = until(end, readSymbolRef) map (_ setFlag EXISTENTIAL)
- newExistentialType(tparams, restpe)
+ newExistentialType(until(end, readSymbolRef), restpe)
case ANNOTATEDtpe =>
var typeRef = readNat()
@@ -818,7 +813,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol =
- missingHook(owner, name) orElse MissingRequirementError.signal(
+ mirrorThatLoaded(owner).missingHook(owner, name) orElse MissingRequirementError.signal(
s"bad reference while unpickling $filename: ${name.longString} not found in ${owner.tpe.widen}"
)
@@ -832,8 +827,10 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
* Similar in intent to what SymbolLoader does (but here we don't have access to
* error reporting, so we rely on the typechecker to report the error).
*/
- def toTypeError(e: MissingRequirementError) =
+ def toTypeError(e: MissingRequirementError) = {
+ // e.printStackTrace()
new TypeError(e.msg)
+ }
/** A lazy type which when completed returns type at index `i`. */
private class LazyTypeRef(i: Int) extends LazyType {
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 5beec70d62..368d55a59c 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -2,7 +2,7 @@ package scala.reflect
package internal
package transform
-import Flags.PARAMACCESSOR
+import Flags.{PARAMACCESSOR, METHOD}
trait Erasure {
@@ -72,8 +72,37 @@ trait Erasure {
if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
}
+ def unboxDerivedValueClassMethod(clazz: Symbol): Symbol =
+ (clazz.info.decl(nme.unbox)) orElse
+ (clazz.info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse
+ NoSymbol)
+
def underlyingOfValueClass(clazz: Symbol): Type =
- clazz.firstParamAccessor.tpe.resultType
+ clazz.derivedValueClassUnbox.tpe.resultType
+
+ /** The type of the argument of a value class reference after erasure
+ * This method needs to be called at a phase no later than erasurephase
+ */
+ def erasedValueClassArg(tref: TypeRef): Type = {
+ assert(!phase.erasedTypes)
+ val clazz = tref.sym
+ if (valueClassIsParametric(clazz)) {
+ val underlying = tref.memberType(clazz.derivedValueClassUnbox).resultType
+ boxingErasure(underlying)
+ } else {
+ scalaErasure(underlyingOfValueClass(clazz))
+ }
+ }
+
+ /** Does this vakue class have an underlying type that's a type parameter of
+ * the class itself?
+ * This method needs to be called at a phase no later than erasurephase
+ */
+ def valueClassIsParametric(clazz: Symbol): Boolean = {
+ assert(!phase.erasedTypes)
+ clazz.typeParams contains
+ clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol
+ }
abstract class ErasureMap extends TypeMap {
private lazy val ObjectArray = arrayType(ObjectClass.tpe)
@@ -84,15 +113,14 @@ trait Erasure {
def eraseNormalClassRef(pre: Type, clazz: Symbol): Type =
typeRef(apply(rebindInnerClass(pre, clazz)), clazz, List()) // #2585
- protected def eraseDerivedValueClassRef(clazz: Symbol): Type =
- scalaErasure(underlyingOfValueClass(clazz))
+ protected def eraseDerivedValueClassRef(tref: TypeRef): Type = erasedValueClassArg(tref)
def apply(tp: Type): Type = tp match {
case ConstantType(_) =>
tp
case st: SubType =>
apply(st.supertype)
- case TypeRef(pre, sym, args) =>
+ case tref @ TypeRef(pre, sym, args) =>
if (sym == ArrayClass)
if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
else if (args.head.typeSymbol.isBottomClass) ObjectArray
@@ -100,7 +128,7 @@ trait Erasure {
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) ErasedObject
else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
else if (sym.isRefinementClass) apply(mergeParents(tp.parents))
- else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(sym)
+ else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref)
else if (sym.isClass) eraseNormalClassRef(pre, sym)
else apply(sym.info) // alias type or abstract type
case PolyType(tparams, restpe) =>
@@ -236,7 +264,8 @@ trait Erasure {
* are then later converted to the underlying parameter type in phase posterasure.
*/
object specialScalaErasure extends ScalaErasureMap {
- override def eraseDerivedValueClassRef(clazz: Symbol): Type = ErasedValueType(clazz)
+ override def eraseDerivedValueClassRef(tref: TypeRef): Type =
+ ErasedValueType(tref)
}
object javaErasure extends JavaErasureMap
@@ -251,6 +280,14 @@ trait Erasure {
}
}
+ object boxingErasure extends ScalaErasureMap {
+ override def eraseNormalClassRef(pre: Type, clazz: Symbol) =
+ if (isPrimitiveValueClass(clazz)) boxedClass(clazz).tpe
+ else super.eraseNormalClassRef(pre, clazz)
+ override def eraseDerivedValueClassRef(tref: TypeRef) =
+ super.eraseNormalClassRef(tref.pre, tref.sym)
+ }
+
/** The intersection dominator (SLS 3.7) of a list of types is computed as follows.
*
* - If the list contains one or more occurrences of scala.Array with
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 57c9e98174..e503d812e6 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -4,6 +4,8 @@ import collection.mutable
object Statistics {
+ type TimerSnapshot = (Long, Long)
+
/** If enabled, increment counter by one */
@inline final def incCounter(c: Counter) {
if (_enabled && c != null) c.value += 1
@@ -30,20 +32,20 @@ object Statistics {
}
/** If enabled, start timer */
- @inline final def startTimer(tm: Timer): (Long, Long) =
+ @inline final def startTimer(tm: Timer): TimerSnapshot =
if (_enabled && tm != null) tm.start() else null
/** If enabled, stop timer */
- @inline final def stopTimer(tm: Timer, start: (Long, Long)) {
+ @inline final def stopTimer(tm: Timer, start: TimerSnapshot) {
if (_enabled && tm != null) tm.stop(start)
}
/** If enabled, push and start a new timer in timer stack */
- @inline final def pushTimerClass(timers: ByClassTimerStack, cls: Class[_]): (Long, Long) =
- if (_enabled && timers != null) timers.push(cls) else null
+ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot =
+ if (_enabled && timers != null) timers.push(timer) else null
/** If enabled, stop and pop timer from timer stack */
- @inline final def popTimerClass(timers: ByClassTimerStack, prev: (Long, Long)) {
+ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) {
if (_enabled && timers != null) timers.pop(prev)
}
@@ -73,6 +75,13 @@ object Statistics {
*/
def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+ /** Create a new stackable that shows as `prefix` and is active
+ * in the same phases as its base timer. Stackable timers are subtimers
+ * that can be stacked ina timerstack, and that print aggregate, as well as specific
+ * durations.
+ */
+ def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer)
+
/** Create a new view that shows as `prefix` and is active in given phases.
* The view always reflects the current value of `quant` as a quantity.
*/
@@ -86,20 +95,27 @@ quant)
/** Same as newQuantMap, where the key type is fixed to be Class[_] */
def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
- /** Create a new timer stack map, indexed by Class[_]. */
- def newByClassTimerStack(prefix: String, underlying: Timer) = new ByClassTimerStack(prefix, underlying)
+ /** Create a new timer stack */
+ def newTimerStack() = new TimerStack()
def allQuantities: Iterable[Quantity] =
- for ((q, _) <- qs if !q.isInstanceOf[SubQuantity];
+ for ((_, q) <- qs if q.underlying == q;
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
private def showPercent(x: Double, base: Double) =
if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+ /** The base trait for quantities.
+ * Quantities with non-empty prefix are printed in the statistics info.
+ */
trait Quantity {
- qs += (this -> ())
+ if (prefix.nonEmpty) {
+ val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+ qs(key) = this
+ }
val prefix: String
val phases: Seq[String]
+ def underlying: Quantity = this
def showAt(phase: String) = phases.isEmpty || (phases contains phase)
def line = f"$prefix%-30s: ${this}"
val children = new mutable.ListBuffer[Quantity]
@@ -123,7 +139,7 @@ quant)
override def toString = quant.toString
}
- private class RelCounter(prefix: String, val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
override def toString =
if (value == 0) "0"
else {
@@ -142,33 +158,39 @@ quant)
value + showPercent(value, underlying.value)
}
- class Timer(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Timer] {
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
var nanos: Long = 0
var timings = 0
- def compare(that: Timer): Int =
- if (this.nanos < that.nanos) -1
- else if (this.nanos > that.nanos) 1
- else 0
def start() = {
(nanos, System.nanoTime())
}
- def stop(prev: (Long, Long)) {
+ def stop(prev: TimerSnapshot) {
val (nanos0, start) = prev
nanos = nanos0 + System.nanoTime() - start
timings += 1
}
- override def toString = s"$timings spans, ${nanos/1000}ms"
+ protected def show(ns: Long) = s"${ns/1000000}ms"
+ override def toString = s"$timings spans, ${show(nanos)}"
}
- private class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
- override def toString: String = super.toString + showPercent(nanos, underlying.nanos)
+ class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos)
+ }
+
+ class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] {
+ var specificNanos: Long = 0
+ def compare(that: StackableTimer): Int =
+ if (this.specificNanos < that.specificNanos) -1
+ else if (this.specificNanos > that.specificNanos) 1
+ else 0
+ override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
}
/** A mutable map quantity where missing elements are automatically inserted
* on access by executing `initValue`.
*/
class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V)
- extends scala.collection.mutable.HashMap[K, V] with Quantity {
+ extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity {
override def default(key: K) = {
val elem = initValue
this(key) = elem
@@ -183,23 +205,25 @@ quant)
}.mkString(", ")
}
- /** A mutable map quantity that takes class keys to subtimer values, relative to
- * some `underlying` timer. In addition, class timers can be pushed and popped.
- * Pushing the timer for a class means stopping the currently active timer.
+ /** A stack of timers, all active, where a timer's specific "clock"
+ * is stopped as long as it is buried by some other timer in the stack, but
+ * its aggregate clock keeps on ticking.
*/
- class ByClassTimerStack(prefix: String, val underlying: Timer)
- extends QuantMap[Class[_], Timer](prefix, underlying.phases, new SubTimer("", underlying)) with SubQuantity {
- private var elems: List[(Timer, Long)] = Nil
- def push(cls: Class[_]): (Long, Long) = {
- val topTimer = this(cls)
- elems = (topTimer, 0L) :: elems
- topTimer.start()
+ class TimerStack {
+ private var elems: List[(StackableTimer, Long)] = Nil
+ /** Start given timer and push it onto the stack */
+ def push(t: StackableTimer): TimerSnapshot = {
+ elems = (t, 0L) :: elems
+ t.start()
}
- def pop(prev: (Long, Long)) = {
+ /** Stop and pop top timer in stack
+ */
+ def pop(prev: TimerSnapshot) = {
val (nanos0, start) = prev
val duration = System.nanoTime() - start
val (topTimer, nestedNanos) :: rest = elems
- topTimer.nanos = nanos0 + duration - nestedNanos
+ topTimer.nanos = nanos0 + duration
+ topTimer.specificNanos += duration - nestedNanos
topTimer.timings += 1
elems = rest match {
case (outerTimer, outerNested) :: elems1 =>
@@ -211,7 +235,7 @@ quant)
}
private var _enabled = false
- private val qs = new mutable.WeakHashMap[Quantity, Unit]
+ private val qs = new mutable.HashMap[String, Quantity]
def enabled = _enabled
def enabled_=(cond: Boolean) = {
@@ -229,4 +253,9 @@ quant)
_enabled = true
}
}
+
+ /** replace rhs with enabled and rebuild to also count tiny but super-hot methods
+ * such as phase, flags, owner, name.
+ */
+ final val hotEnabled = false
}
diff --git a/src/reflect/scala/reflect/makro/Universe.scala b/src/reflect/scala/reflect/makro/Universe.scala
index 98046be555..a676f7f1de 100644
--- a/src/reflect/scala/reflect/makro/Universe.scala
+++ b/src/reflect/scala/reflect/makro/Universe.scala
@@ -5,13 +5,24 @@ abstract class Universe extends scala.reflect.api.Universe {
val treeBuild: TreeBuilder { val global: Universe.this.type }
+ trait AttachableApi {
+ /** ... */
+ def attachments: base.Attachments { type Pos = Position }
+
+ /** ... */
+ def addAttachment(attachment: Any): AttachableApi.this.type
+
+ /** ... */
+ def removeAttachment[T: ClassTag]: AttachableApi.this.type
+ }
+
// Symbol extensions ---------------------------------------------------------------
override type Symbol >: Null <: SymbolContextApi
/** The extended API of symbols that's supported in macro context universes
*/
- trait SymbolContextApi extends SymbolApi { this: Symbol =>
+ trait SymbolContextApi extends SymbolApi with AttachableApi { this: Symbol =>
// [Eugene++ to Martin] should we also add mutability methods here (similarly to what's done below for trees)?
// I'm talking about `setAnnotations` and friends
@@ -23,7 +34,7 @@ abstract class Universe extends scala.reflect.api.Universe {
/** The extended API of trees that's supported in macro context universes
*/
- trait TreeContextApi extends TreeApi { this: Tree =>
+ trait TreeContextApi extends TreeApi with AttachableApi { this: Tree =>
/** ... */
def pos_=(pos: Position): Unit
@@ -62,15 +73,6 @@ abstract class Universe extends scala.reflect.api.Universe {
/** ... */
def setSymbol(sym: Symbol): this.type
-
- /** ... */
- def attachments: base.Attachments { type Pos = Position }
-
- /** ... */
- def addAttachment(attachment: Any): this.type
-
- /** ... */
- def removeAttachment[T: ClassTag]: this.type
}
override type SymTree >: Null <: Tree with SymTreeContextApi
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 38d280ec73..eae6a3b297 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -972,6 +972,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClassSymbol)
+ case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
}
}
@@ -998,10 +999,10 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
mirrors(rootToLoader getOrElseUpdate(root, findLoader)).get.get
}
- private def byName(sym: Symbol): (Name, Symbol) = sym.name -> sym
-
- private lazy val phantomTypes: Map[Name, Symbol] =
- Map(byName(definitions.AnyRefClass)) ++ (definitions.isPhantomClass map byName)
+ private lazy val magicSymbols: Map[(String, Name), Symbol] = {
+ def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
+ Map() ++ (definitions.magicSymbols filter (_.isClass) map mapEntry)
+ }
/** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
* <owner>.<name>, otherwise return NoSymbol.
@@ -1019,13 +1020,12 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- if (owner.name.toTermName == nme.scala_ && owner.owner.isRoot)
- phantomTypes get name match {
- case Some(tsym) =>
- owner.info.decls enter tsym
- return tsym
- case None =>
- }
+ magicSymbols get (owner.fullName, name) match {
+ case Some(tsym) =>
+ owner.info.decls enter tsym
+ return tsym
+ case None =>
+ }
}
info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
super.missingHook(owner, name)
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 4e82fe8ad2..7839850529 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -37,12 +37,17 @@ object ReflectionUtils {
systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
)
- def show(cl: ClassLoader) = {
+ def show(cl: ClassLoader): String = {
+ def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
+ if (clazz == null) return false
+ if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
+ return isAbstractFileClassLoader(clazz.getSuperclass)
+ }
def inferClasspath(cl: ClassLoader): String = cl match {
case cl: java.net.URLClassLoader =>
- "[" + (cl.getURLs mkString ",") + "]"
- case cl if cl != null && cl.getClass.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader" =>
- "[" + cl.asInstanceOf[{val root: scala.reflect.internal.AbstractFileApi}].root + "] and " + inferClasspath(cl.getParent)
+ (cl.getURLs mkString ",")
+ case cl if cl != null && isAbstractFileClassLoader(cl.getClass) =>
+ cl.asInstanceOf[{val root: scala.reflect.internal.AbstractFileApi}].root.canonicalPath
case null =>
inferBootClasspath
case _ =>
@@ -50,7 +55,7 @@ object ReflectionUtils {
}
cl match {
case cl if cl != null =>
- "%s of type %s with classpath %s".format(cl, cl.getClass, inferClasspath(cl))
+ "%s of type %s with classpath [%s] and parent being %s".format(cl, cl.getClass, inferClasspath(cl), show(cl.getParent))
case null =>
"primordial classloader with boot classpath [%s]".format(inferClasspath(cl))
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index c90665508b..5b9090dae5 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -4,7 +4,7 @@ package runtime
/**
* This symbol table trait fills in the definitions so that class information is obtained by refection.
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
- * a runtime compiler that uses reflection to get a class information (class scala.tools.nsc.ReflectGlobal)
+ * a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
*/
trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
diff --git a/test/files/run/syncchannel.check b/test/disabled/run/syncchannel.check
index d81cc0710e..d81cc0710e 100644
--- a/test/files/run/syncchannel.check
+++ b/test/disabled/run/syncchannel.check
diff --git a/test/files/run/syncchannel.scala b/test/disabled/run/syncchannel.scala
index 66ae47fd0a..66ae47fd0a 100644
--- a/test/files/run/syncchannel.scala
+++ b/test/disabled/run/syncchannel.scala
diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check
index d4bac196e9..2a60e3d806 100644
--- a/test/files/buildmanager/t2651_3/t2651_3.check
+++ b/test/files/buildmanager/t2651_3/t2651_3.check
@@ -3,4 +3,4 @@ compiling Set(A.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <deferred> <method>]))
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <method> <deferred>]))
diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check
index b182f31c09..74e5d8f99b 100644
--- a/test/files/buildmanager/t2651_4/t2651_4.check
+++ b/test/files/buildmanager/t2651_4/t2651_4.check
@@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method> <triedcooking>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <deferred> <method>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <deferred> <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method> <triedcooking>]]
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <method> <deferred>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <method> <deferred>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : Int(3)
diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check
index cd0357599c..0d6709e58b 100644
--- a/test/files/buildmanager/t2657/t2657.check
+++ b/test/files/buildmanager/t2657/t2657.check
@@ -4,8 +4,8 @@ warning: there were 1 feature warnings; re-run with -feature for details
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit <method> <triedcooking>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit <method> <triedcooking>]]
+Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : Int(3)
diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check
index a7c767cc45..066561ac44 100644
--- a/test/files/buildmanager/t2789/t2789.check
+++ b/test/files/buildmanager/t2789/t2789.check
@@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method> <triedcooking>]), class E -> List())
-invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method> <triedcooking>]]
+Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]), class E -> List())
+invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: could not find implicit value for parameter y: E
val y = x(3)
diff --git a/test/files/codelib/code.jar.desired.sha1 b/test/files/codelib/code.jar.desired.sha1
index d2b8d9add9..c4cc74c244 100644
--- a/test/files/codelib/code.jar.desired.sha1
+++ b/test/files/codelib/code.jar.desired.sha1
@@ -1 +1 @@
-e737b123d31eede5594ceda07caafed1673ec472 ?code.jar
+e737b123d31eede5594ceda07caafed1673ec472 *code.jar
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.check b/test/files/continuations-neg/ts-1681-nontail-return.check
deleted file mode 100644
index 8fe15f154b..0000000000
--- a/test/files/continuations-neg/ts-1681-nontail-return.check
+++ /dev/null
@@ -1,4 +0,0 @@
-ts-1681-nontail-return.scala:10: error: return expressions in CPS code must be in tail position
- return v
- ^
-one error found
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.scala b/test/files/continuations-neg/ts-1681-nontail-return.scala
deleted file mode 100644
index af86ad304f..0000000000
--- a/test/files/continuations-neg/ts-1681-nontail-return.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
-
- def p(i: Int): Int @cpsParam[Unit, Any] = {
- val v= s1 + 3
- if (v == 8)
- return v
- v + 1
- }
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
-}
diff --git a/test/files/continuations-run/ts-1681-2.check b/test/files/continuations-run/ts-1681-2.check
deleted file mode 100644
index 35b3c93780..0000000000
--- a/test/files/continuations-run/ts-1681-2.check
+++ /dev/null
@@ -1,5 +0,0 @@
-8
-hi
-8
-from try
-8
diff --git a/test/files/continuations-run/ts-1681-2.scala b/test/files/continuations-run/ts-1681-2.scala
deleted file mode 100644
index 8a896dec2c..0000000000
--- a/test/files/continuations-run/ts-1681-2.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cps[Any] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
- def caller2 = reset { println(p2(3)) }
- def caller3 = reset { println(p3(3)) }
-
- def p(i: Int): Int @cps[Any] = {
- val v= s1 + 3
- return v
- }
-
- def p2(i: Int): Int @cps[Any] = {
- val v = s1 + 3
- if (v > 0) {
- println("hi")
- return v
- } else {
- println("hi")
- return 8
- }
- }
-
- def p3(i: Int): Int @cps[Any] = {
- val v = s1 + 3
- try {
- println("from try")
- return v
- } catch {
- case e: Exception =>
- println("from catch")
- return 7
- }
- }
-
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
- repro.caller2
- repro.caller3
-}
diff --git a/test/files/continuations-run/ts-1681-3.check b/test/files/continuations-run/ts-1681-3.check
deleted file mode 100644
index 71489f097c..0000000000
--- a/test/files/continuations-run/ts-1681-3.check
+++ /dev/null
@@ -1,4 +0,0 @@
-enter return expr
-8
-hi
-8
diff --git a/test/files/continuations-run/ts-1681-3.scala b/test/files/continuations-run/ts-1681-3.scala
deleted file mode 100644
index 62c547f5a2..0000000000
--- a/test/files/continuations-run/ts-1681-3.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
- def caller2 = reset { println(p2(3)) }
-
- def p(i: Int): Int @cpsParam[Unit, Any] = {
- val v= s1 + 3
- return { println("enter return expr"); v }
- }
-
- def p2(i: Int): Int @cpsParam[Unit, Any] = {
- val v = s1 + 3
- if (v > 0) {
- return { println("hi"); v }
- } else {
- return { println("hi"); 8 }
- }
- }
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
- repro.caller2
-}
diff --git a/test/files/continuations-run/ts-1681.check b/test/files/continuations-run/ts-1681.check
deleted file mode 100644
index 85176d8e66..0000000000
--- a/test/files/continuations-run/ts-1681.check
+++ /dev/null
@@ -1,3 +0,0 @@
-8
-hi
-8
diff --git a/test/files/continuations-run/ts-1681.scala b/test/files/continuations-run/ts-1681.scala
deleted file mode 100644
index efb1abae15..0000000000
--- a/test/files/continuations-run/ts-1681.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-import scala.util.continuations._
-
-class ReturnRepro {
- def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
- def caller = reset { println(p(3)) }
- def caller2 = reset { println(p2(3)) }
-
- def p(i: Int): Int @cpsParam[Unit, Any] = {
- val v= s1 + 3
- return v
- }
-
- def p2(i: Int): Int @cpsParam[Unit, Any] = {
- val v = s1 + 3
- if (v > 0) {
- println("hi")
- return v
- } else {
- println("hi")
- return 8
- }
- }
-}
-
-object Test extends App {
- val repro = new ReturnRepro
- repro.caller
- repro.caller2
-}
diff --git a/test/files/jvm/actmig-loop-react.scala b/test/files/jvm/actmig-loop-react.scala
deleted file mode 100644
index d714b26594..0000000000
--- a/test/files/jvm/actmig-loop-react.scala
+++ /dev/null
@@ -1,188 +0,0 @@
-import scala.actors.MigrationSystem._
-import scala.actors.Actor._
-import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill }
-import java.util.concurrent.{ TimeUnit, CountDownLatch }
-import scala.collection.mutable.ArrayBuffer
-import scala.concurrent.util.duration._
-import scala.concurrent.{ Promise, Await }
-
-
-object Test {
- val finishedLWCR, finishedTNR, finishedEH = Promise[Boolean]
- val finishedLWCR1, finishedTNR1, finishedEH1 = Promise[Boolean]
-
- def testLoopWithConditionReact() = {
- // Snippet showing composition of receives
- // Loop with Condition Snippet - before
- val myActor = actor {
- var c = true
- loopWhile(c) {
- react {
- case x: Int =>
- // do task
- println("do task")
- if (x == 42) {
- c = false
- finishedLWCR1.success(true)
- }
- }
- }
- }
-
- myActor.start()
- myActor ! 1
- myActor ! 42
-
- Await.ready(finishedLWCR1.future, 5 seconds)
-
- // Loop with Condition Snippet - migrated
- val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
-
- def receive = {
- case x: Int =>
- // do task
- println("do task")
- if (x == 42) {
- finishedLWCR.success(true)
- context.stop(self)
- }
- }
- }, "default-stashing-dispatcher"))
- myAkkaActor ! 1
- myAkkaActor ! 42
- }
-
- def testNestedReact() = {
- // Snippet showing composition of receives
- // Loop with Condition Snippet - before
- val myActor = actor {
- var c = true
- loopWhile(c) {
- react {
- case x: Int =>
- // do task
- println("do task " + x)
- if (x == 42) {
- c = false
- finishedTNR1.success(true)
- } else
- react {
- case y: String =>
- println("do string " + y)
- }
- println("after react")
- }
- }
- }
- myActor.start()
-
- myActor ! 1
- myActor ! "I am a String"
- myActor ! 42
-
- Await.ready(finishedTNR1.future, 5 seconds)
-
- // Loop with Condition Snippet - migrated
- val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
-
- def receive = {
- case x: Int =>
- // do task
- println("do task " + x)
- if (x == 42) {
- finishedTNR.success(true)
- context.stop(self)
- } else
- context.become(({
- case y: String =>
- println("do string " + y)
- }: Receive).andThen(x => {
- unstashAll()
- context.unbecome()
- }).orElse { case x => stash() })
- }
- }, "default-stashing-dispatcher"))
-
- myAkkaActor ! 1
- myAkkaActor ! "I am a String"
- myAkkaActor ! 42
-
- }
-
- def exceptionHandling() = {
- // Stashing actor with act and exception handler
- val myActor = MigrationSystem.actorOf(Props(() => new StashingActor {
-
- def receive = { case _ => println("Dummy method.") }
- override def act() = {
- loop {
- react {
- case "fail" =>
- throw new Exception("failed")
- case "work" =>
- println("working")
- case "die" =>
- finishedEH1.success(true)
- exit()
- }
- }
- }
-
- override def exceptionHandler = {
- case x: Exception => println("scala got exception")
- }
-
- }, "default-stashing-dispatcher"))
-
- myActor ! "work"
- myActor ! "fail"
- myActor ! "die"
-
- Await.ready(finishedEH1.future, 5 seconds)
- // Stashing actor in Akka style
- val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
- def receive = PFCatch({
- case "fail" =>
- throw new Exception("failed")
- case "work" =>
- println("working")
- case "die" =>
- finishedEH.success(true)
- context.stop(self)
- }, { case x: Exception => println("akka got exception") })
- }, "default-stashing-dispatcher"))
-
- myAkkaActor ! "work"
- myAkkaActor ! "fail"
- myAkkaActor ! "die"
- }
-
- def main(args: Array[String]) = {
- testLoopWithConditionReact()
- Await.ready(finishedLWCR.future, 5 seconds)
- exceptionHandling()
- Await.ready(finishedEH.future, 5 seconds)
- testNestedReact()
- Await.ready(finishedTNR.future, 5 seconds)
- }
-
-}
-
-// As per Jim Mcbeath's blog (http://jim-mcbeath.blogspot.com/2008/07/actor-exceptions.html)
-class PFCatch(f: PartialFunction[Any, Unit], handler: PartialFunction[Exception, Unit])
- extends PartialFunction[Any, Unit] {
-
- def apply(x: Any) = {
- try {
- f(x)
- } catch {
- case e: Exception if handler.isDefinedAt(e) => handler(e)
- }
- }
-
- def isDefinedAt(x: Any) = f.isDefinedAt(x)
-}
-
-object PFCatch {
- def apply(f: PartialFunction[Any, Unit], handler: PartialFunction[Exception, Unit]) = new PFCatch(f, handler)
-}
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index e5e01a5954..ca9ff5090f 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -10,21 +10,69 @@ import scala.runtime.NonLocalReturnControl
object FutureTests extends MinimalScalaTest {
-
+
/* some utils */
- def testAsync(s: String): Future[String] = s match {
+ def testAsync(s: String)(implicit ec: ExecutionContext): Future[String] = s match {
case "Hello" => future { "World" }
- case "Failure" => Promise.failed(new RuntimeException("Expected exception; to test fault-tolerance")).future
+ case "Failure" => Future.failed(new RuntimeException("Expected exception; to test fault-tolerance"))
case "NoReply" => Promise[String]().future
}
val defaultTimeout = 5 seconds
/* future specification */
+
+ "A future with custom ExecutionContext" should {
+ "shouldHandleThrowables" in {
+ val ms = new mutable.HashSet[Throwable] with mutable.SynchronizedSet[Throwable]
+ implicit val ec = scala.concurrent.ExecutionContext.fromExecutor(new scala.concurrent.forkjoin.ForkJoinPool(), {
+ t =>
+ ms += t
+ })
+
+ class ThrowableTest(m: String) extends Throwable(m)
+
+ val f1 = future[Any] {
+ throw new ThrowableTest("test")
+ }
+
+ intercept[ThrowableTest] {
+ Await.result(f1, defaultTimeout)
+ }
+
+ val latch = new TestLatch
+ val f2 = future {
+ Await.ready(latch, 5 seconds)
+ "success"
+ }
+ val f3 = f2 map { s => s.toUpperCase }
+
+ f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") }
+ f2 onSuccess { case _ => throw new ThrowableTest("dispatcher receive") }
+
+ latch.open()
+
+ Await.result(f2, defaultTimeout) mustBe ("success")
+
+ f2 foreach { _ => throw new ThrowableTest("current thread foreach") }
+ f2 onSuccess { case _ => throw new ThrowableTest("current thread receive") }
+
+ Await.result(f3, defaultTimeout) mustBe ("SUCCESS")
+
+ val waiting = future {
+ Thread.sleep(1000)
+ }
+ Await.ready(waiting, 2000 millis)
+
+ ms.size mustBe (4)
+ //FIXME should check
+ }
+ }
- "A future" should {
-
+ "A future with global ExecutionContext" should {
+ import ExecutionContext.Implicits._
+
"compose with for-comprehensions" in {
def async(x: Int) = future { (x * 2).toString }
val future0 = future[Any] {
@@ -122,20 +170,20 @@ object FutureTests extends MinimalScalaTest {
val r = new IllegalStateException("recovered")
intercept[IllegalStateException] {
- val failed = Promise.failed[String](o).future recoverWith {
- case _ if false == true => Promise.successful("yay!").future
+ val failed = Future.failed[String](o) recoverWith {
+ case _ if false == true => Future.successful("yay!")
}
Await.result(failed, defaultTimeout)
} mustBe (o)
- val recovered = Promise.failed[String](o).future recoverWith {
- case _ => Promise.successful("yay!").future
+ val recovered = Future.failed[String](o) recoverWith {
+ case _ => Future.successful("yay!")
}
Await.result(recovered, defaultTimeout) mustBe ("yay!")
intercept[IllegalStateException] {
- val refailed = Promise.failed[String](o).future recoverWith {
- case _ => Promise.failed[String](r).future
+ val refailed = Future.failed[String](o) recoverWith {
+ case _ => Future.failed[String](r)
}
Await.result(refailed, defaultTimeout)
} mustBe (r)
@@ -164,7 +212,7 @@ object FutureTests extends MinimalScalaTest {
"firstCompletedOf" in {
def futures = Vector.fill[Future[Int]](10) {
Promise[Int]().future
- } :+ Promise.successful[Int](5).future
+ } :+ Future.successful[Int](5)
Await.result(Future.firstCompletedOf(futures), defaultTimeout) mustBe (5)
Await.result(Future.firstCompletedOf(futures.iterator), defaultTimeout) mustBe (5)
@@ -186,21 +234,21 @@ object FutureTests extends MinimalScalaTest {
val timeout = 10000 millis
val f = new IllegalStateException("test")
intercept[IllegalStateException] {
- val failed = Promise.failed[String](f).future zip Promise.successful("foo").future
+ val failed = Future.failed[String](f) zip Future.successful("foo")
Await.result(failed, timeout)
} mustBe (f)
intercept[IllegalStateException] {
- val failed = Promise.successful("foo").future zip Promise.failed[String](f).future
+ val failed = Future.successful("foo") zip Future.failed[String](f)
Await.result(failed, timeout)
} mustBe (f)
intercept[IllegalStateException] {
- val failed = Promise.failed[String](f).future zip Promise.failed[String](f).future
+ val failed = Future.failed[String](f) zip Future.failed[String](f)
Await.result(failed, timeout)
} mustBe (f)
- val successful = Promise.successful("foo").future zip Promise.successful("foo").future
+ val successful = Future.successful("foo") zip Future.successful("foo")
Await.result(successful, timeout) mustBe (("foo", "foo"))
}
@@ -337,50 +385,6 @@ object FutureTests extends MinimalScalaTest {
Await.result(traversedIterator, defaultTimeout).sum mustBe (10000)
}
- "shouldHandleThrowables" in {
- val ms = new mutable.HashSet[Throwable] with mutable.SynchronizedSet[Throwable]
- implicit val ec = scala.concurrent.ExecutionContext.fromExecutor(new scala.concurrent.forkjoin.ForkJoinPool(), {
- t =>
- ms += t
- })
-
- class ThrowableTest(m: String) extends Throwable(m)
-
- val f1 = future[Any] {
- throw new ThrowableTest("test")
- }
-
- intercept[ThrowableTest] {
- Await.result(f1, defaultTimeout)
- }
-
- val latch = new TestLatch
- val f2 = future {
- Await.ready(latch, 5 seconds)
- "success"
- }
- val f3 = f2 map { s => s.toUpperCase }
-
- f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") }
- f2 onSuccess { case _ => throw new ThrowableTest("dispatcher receive") }
-
- latch.open()
-
- Await.result(f2, defaultTimeout) mustBe ("success")
-
- f2 foreach { _ => throw new ThrowableTest("current thread foreach") }
- f2 onSuccess { case _ => throw new ThrowableTest("current thread receive") }
-
- Await.result(f3, defaultTimeout) mustBe ("SUCCESS")
-
- val waiting = future {
- Thread.sleep(1000)
- }
- Await.ready(waiting, 2000 millis)
-
- ms.size mustBe (4)
- }
-
"shouldBlockUntilResult" in {
val latch = new TestLatch
diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala
index bf9d9b39d7..49bc642b57 100644
--- a/test/files/jvm/future-spec/PromiseTests.scala
+++ b/test/files/jvm/future-spec/PromiseTests.scala
@@ -10,7 +10,8 @@ import scala.runtime.NonLocalReturnControl
object PromiseTests extends MinimalScalaTest {
-
+ import ExecutionContext.Implicits._
+
val defaultTimeout = Inf
/* promise specification */
@@ -20,11 +21,13 @@ object PromiseTests extends MinimalScalaTest {
"not be completed" in {
val p = Promise()
p.future.isCompleted mustBe (false)
+ p.isCompleted mustBe (false)
}
"have no value" in {
val p = Promise()
p.future.value mustBe (None)
+ p.isCompleted mustBe (false)
}
"return supplied value on timeout" in {
@@ -45,14 +48,16 @@ object PromiseTests extends MinimalScalaTest {
"A successful Promise" should {
val result = "test value"
- val future = Promise[String]().complete(Right(result)).future
- futureWithResult(_(future, result))
+ val promise = Promise[String]().complete(Right(result))
+ promise.isCompleted mustBe (true)
+ futureWithResult(_(promise.future, result))
}
"A failed Promise" should {
val message = "Expected Exception"
- val future = Promise[String]().complete(Left(new RuntimeException(message))).future
- futureWithException[RuntimeException](_(future, message))
+ val promise = Promise[String]().complete(Left(new RuntimeException(message)))
+ promise.isCompleted mustBe (true)
+ futureWithException[RuntimeException](_(promise.future, message))
}
"An interrupted Promise" should {
diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check
index 9ff49ef8b4..1c0bf6a568 100644
--- a/test/files/jvm/manifests-new.check
+++ b/test/files/jvm/manifests-new.check
@@ -1,58 +1,58 @@
-x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit
-x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean
-x=a, t=TypeTag[Char], k=TypeRef, s=class Char
-x=1, t=TypeTag[Int], k=TypeRef, s=class Int
-x=abc, t=TypeTag[java.lang.String], k=TypeRef, s=class String
-x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol
-
-x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List
-x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List
-x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List
-x=List(abc), t=TypeTag[List[java.lang.String]], k=TypeRef, s=class List
-x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List
-
-x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array
-x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array
-x=[I, t=TypeTag[Array[Int]], k=TypeRef, s=class Array
-x=[Ljava.lang.String;, t=TypeTag[Array[java.lang.String]], k=TypeRef, s=class Array
-x=[Lscala.Symbol;, t=TypeTag[Array[Symbol]], k=TypeRef, s=class Array
-
-x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2
-x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2
-x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2
-x=(abc,xyz), t=TypeTag[(java.lang.String, java.lang.String)], k=TypeRef, s=class Tuple2
-x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2
-
-x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test
-x=scala.collection.immutable.List$, t=TypeTag[scala.collection.immutable.List.type], k=SingleType, s=object List
-
-x=Foo, t=TypeTag[Foo[Int]], k=TypeRef, s=class Foo
-x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo
-x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo
-x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo
-
-x=Test1$$anon$1, t=TypeTag[Bar[java.lang.String]], k=RefinedType, s=<local Test1>
-x=Test1$$anon$2, t=TypeTag[Bar[java.lang.String]], k=RefinedType, s=<local Test1>
-
-()=()
-true=true
-a=a
-1=1
-'abc='abc
-
-List(())=List(())
-List(true)=List(true)
-List('abc)=List('abc)
-
-Array()=Array()
-Array(true)=Array(true)
-Array(a)=Array(a)
-Array(1)=Array(1)
-
-((),())=((),())
-(true,false)=(true,false)
-
-List(List(1), List(2))=List(List(1), List(2))
-
-Array(Array(1), Array(2))=Array(Array(1), Array(2))
-
+x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit
+x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean
+x=a, t=TypeTag[Char], k=TypeRef, s=class Char
+x=1, t=TypeTag[Int], k=TypeRef, s=class Int
+x=abc, t=TypeTag[java.lang.String], k=TypeRef, s=class String
+x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol
+
+x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List
+x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List
+x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List
+x=List(abc), t=TypeTag[List[java.lang.String]], k=TypeRef, s=class List
+x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List
+
+x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array
+x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array
+x=[I, t=TypeTag[Array[Int]], k=TypeRef, s=class Array
+x=[Ljava.lang.String;, t=TypeTag[Array[java.lang.String]], k=TypeRef, s=class Array
+x=[Lscala.Symbol;, t=TypeTag[Array[Symbol]], k=TypeRef, s=class Array
+
+x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2
+x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2
+x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2
+x=(abc,xyz), t=TypeTag[(java.lang.String, java.lang.String)], k=TypeRef, s=class Tuple2
+x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2
+
+x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test
+x=scala.collection.immutable.List$, t=TypeTag[scala.collection.immutable.List.type], k=SingleType, s=object List
+
+x=Foo, t=TypeTag[Foo[Int]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo
+
+x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+
+()=()
+true=true
+a=a
+1=1
+'abc='abc
+
+List(())=List(())
+List(true)=List(true)
+List('abc)=List('abc)
+
+Array()=Array()
+Array(true)=Array(true)
+Array(a)=Array(a)
+Array(1)=Array(1)
+
+((),())=((),())
+(true,false)=(true,false)
+
+List(List(1), List(2))=List(List(1), List(2))
+
+Array(Array(1), Array(2))=Array(Array(1), Array(2))
+
diff --git a/test/files/jvm/non-fatal-tests.scala b/test/files/jvm/non-fatal-tests.scala
new file mode 100644
index 0000000000..471a9d227a
--- /dev/null
+++ b/test/files/jvm/non-fatal-tests.scala
@@ -0,0 +1,47 @@
+import scala.util.control.NonFatal
+
+trait NonFatalTests {
+
+ //NonFatals
+ val nonFatals: Seq[Throwable] =
+ Seq(new StackOverflowError,
+ new RuntimeException,
+ new Exception,
+ new Throwable)
+
+ //Fatals
+ val fatals: Seq[Throwable] =
+ Seq(new InterruptedException,
+ new OutOfMemoryError,
+ new LinkageError,
+ new VirtualMachineError {},
+ new Throwable with scala.util.control.ControlThrowable,
+ new NotImplementedError)
+
+ def testFatalsUsingApply(): Unit = {
+ fatals foreach { t => assert(NonFatal(t) == false) }
+ }
+
+ def testNonFatalsUsingApply(): Unit = {
+ nonFatals foreach { t => assert(NonFatal(t) == true) }
+ }
+
+ def testFatalsUsingUnapply(): Unit = {
+ fatals foreach { t => assert(NonFatal.unapply(t).isEmpty) }
+ }
+
+ def testNonFatalsUsingUnapply(): Unit = {
+ nonFatals foreach { t => assert(NonFatal.unapply(t).isDefined) }
+ }
+
+ testFatalsUsingApply()
+ testNonFatalsUsingApply()
+ testFatalsUsingUnapply()
+ testNonFatalsUsingUnapply()
+}
+
+object Test
+extends App
+with NonFatalTests {
+ System.exit(0)
+} \ No newline at end of file
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index 012460147a..5c9c71f3f8 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -3,22 +3,19 @@ import scala.concurrent.{
Promise,
TimeoutException,
SyncVar,
- ExecutionException
+ ExecutionException,
+ ExecutionContext
}
-import scala.concurrent.future
-import scala.concurrent.promise
-import scala.concurrent.blocking
+import scala.concurrent.{ future, promise, blocking }
import scala.util.{ Try, Success, Failure }
-
import scala.concurrent.util.Duration
-
trait TestBase {
def once(body: (() => Unit) => Unit) {
val sv = new SyncVar[Boolean]
body(() => sv put true)
- sv.take()
+ sv.take(2000)
}
// def assert(cond: => Boolean) {
@@ -33,7 +30,8 @@ trait TestBase {
trait FutureCallbacks extends TestBase {
-
+ import ExecutionContext.Implicits._
+
def testOnSuccess(): Unit = once {
done =>
var x = 0
@@ -138,7 +136,7 @@ trait FutureCallbacks extends TestBase {
testOnSuccessWhenFailed()
testOnFailure()
testOnFailureWhenSpecialThrowable(5, new Error)
- testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
+ // testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
//TODO: this test is currently problematic, because NonFatal does not match InterruptedException
//testOnFailureWhenSpecialThrowable(7, new InterruptedException)
testOnFailureWhenTimeoutException()
@@ -147,6 +145,7 @@ trait FutureCallbacks extends TestBase {
trait FutureCombinators extends TestBase {
+ import ExecutionContext.Implicits._
def testMapSuccess(): Unit = once {
done =>
@@ -591,7 +590,8 @@ trait FutureCombinators extends TestBase {
trait FutureProjections extends TestBase {
-
+ import ExecutionContext.Implicits._
+
def testFailedFailureOnComplete(): Unit = once {
done =>
val cause = new RuntimeException
@@ -599,10 +599,10 @@ trait FutureProjections extends TestBase {
throw cause
}
f.failed onComplete {
- case Success(t) =>
+ case Right(t) =>
assert(t == cause)
done()
- case Failure(t) =>
+ case Left(t) =>
assert(false)
}
}
@@ -624,9 +624,9 @@ trait FutureProjections extends TestBase {
done =>
val f = future { 0 }
f.failed onComplete {
- case Success(t) =>
+ case Right(t) =>
assert(false)
- case Failure(t) =>
+ case Left(t) =>
assert(t.isInstanceOf[NoSuchElementException])
done()
}
@@ -673,7 +673,8 @@ trait FutureProjections extends TestBase {
trait Blocking extends TestBase {
-
+ import ExecutionContext.Implicits._
+
def testAwaitSuccess(): Unit = once {
done =>
val f = future { 0 }
@@ -702,8 +703,67 @@ trait Blocking extends TestBase {
}
+trait BlockContexts extends TestBase {
+ import ExecutionContext.Implicits._
+ import scala.concurrent.{ Await, Awaitable, BlockContext }
+
+ private def getBlockContext(body: => BlockContext): BlockContext = {
+ blocking(Future { body }, Duration(500, "ms"))
+ }
+
+ // test outside of an ExecutionContext
+ def testDefaultOutsideFuture(): Unit = {
+ val bc = BlockContext.current
+ assert(bc.getClass.getName.contains("DefaultBlockContext"))
+ }
+
+ // test BlockContext in our default ExecutionContext
+ def testDefaultFJP(): Unit = {
+ val bc = getBlockContext(BlockContext.current)
+ assert(bc.isInstanceOf[scala.concurrent.forkjoin.ForkJoinWorkerThread])
+ }
+
+ // test BlockContext inside BlockContext.withBlockContext
+ def testPushCustom(): Unit = {
+ val orig = BlockContext.current
+ val customBC = new BlockContext() {
+ override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
+ orig.internalBlockingCall(awaitable, atMost)
+ }
+
+ val bc = getBlockContext({
+ BlockContext.withBlockContext(customBC) {
+ BlockContext.current
+ }
+ })
+
+ assert(bc eq customBC)
+ }
+
+ // test BlockContext after a BlockContext.push
+ def testPopCustom(): Unit = {
+ val orig = BlockContext.current
+ val customBC = new BlockContext() {
+ override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
+ orig.internalBlockingCall(awaitable, atMost)
+ }
+
+ val bc = getBlockContext({
+ BlockContext.withBlockContext(customBC) {}
+ BlockContext.current
+ })
+
+ assert(bc ne customBC)
+ }
+
+ testDefaultOutsideFuture()
+ testDefaultFJP()
+ testPushCustom()
+ testPopCustom()
+}
trait Promises extends TestBase {
+ import ExecutionContext.Implicits._
def testSuccess(): Unit = once {
done =>
@@ -730,88 +790,89 @@ trait Promises extends TestBase {
trait Exceptions extends TestBase {
-
-}
+ import ExecutionContext.Implicits._
-trait TryEitherExtractor extends TestBase {
-
- import scala.util.{Try, Success, Failure}
-
- def testSuccessMatch(): Unit = once {
- done =>
- val thisIsASuccess = Success(42)
- thisIsASuccess match {
- case Success(v) =>
- done()
- assert(v == 42)
- case Failure(e) =>
- done()
- assert(false)
- case other =>
- done()
- assert(false)
- }
- }
-
- def testRightMatch(): Unit = once {
- done =>
- val thisIsNotASuccess: Right[Throwable, Int] = Right(43)
- thisIsNotASuccess match {
- case Success(v) =>
- done()
- assert(v == 43)
- case Failure(e) =>
- done()
- assert(false)
- case other =>
- done()
- assert(false)
- }
- }
-
- def testFailureMatch(): Unit = once {
- done =>
- val thisIsAFailure = Failure(new Exception("I'm an exception"))
- thisIsAFailure match {
- case Success(v) =>
- done()
- assert(false)
- case Failure(e) =>
- done()
- assert(e.getMessage == "I'm an exception")
- case other =>
- done()
- assert(false)
- }
- }
+}
- def testLeftMatch(): Unit = once {
- done =>
- val thisIsNotAFailure: Left[Throwable, Int] = Left(new Exception("I'm an exception"))
- thisIsNotAFailure match {
- case Success(v) =>
- done()
- assert(false)
- case Failure(e) =>
- done()
- assert(e.getMessage == "I'm an exception")
- case other =>
- done()
- assert(false)
- }
+// trait TryEitherExtractor extends TestBase {
+
+// import scala.util.{Try, Success, Failure}
+
+// def testSuccessMatch(): Unit = once {
+// done =>
+// val thisIsASuccess = Success(42)
+// thisIsASuccess match {
+// case Success(v) =>
+// done()
+// assert(v == 42)
+// case Failure(e) =>
+// done()
+// assert(false)
+// case other =>
+// done()
+// assert(false)
+// }
+// }
+
+// def testRightMatch(): Unit = once {
+// done =>
+// val thisIsNotASuccess: Right[Throwable, Int] = Right(43)
+// thisIsNotASuccess match {
+// case Success(v) =>
+// done()
+// assert(v == 43)
+// case Failure(e) =>
+// done()
+// assert(false)
+// case other =>
+// done()
+// assert(false)
+// }
+// }
+
+// def testFailureMatch(): Unit = once {
+// done =>
+// val thisIsAFailure = Failure(new Exception("I'm an exception"))
+// thisIsAFailure match {
+// case Success(v) =>
+// done()
+// assert(false)
+// case Failure(e) =>
+// done()
+// assert(e.getMessage == "I'm an exception")
+// case other =>
+// done()
+// assert(false)
+// }
+// }
+
+// def testLeftMatch(): Unit = once {
+// done =>
+// val thisIsNotAFailure: Left[Throwable, Int] = Left(new Exception("I'm an exception"))
+// thisIsNotAFailure match {
+// case Success(v) =>
+// done()
+// assert(false)
+// case Failure(e) =>
+// done()
+// assert(e.getMessage == "I'm an exception")
+// case other =>
+// done()
+// assert(false)
+// }
- }
+// }
- testSuccessMatch()
- testRightMatch()
- testFailureMatch()
- testLeftMatch()
-}
+// testSuccessMatch()
+// testRightMatch()
+// testFailureMatch()
+// testLeftMatch()
+// }
trait CustomExecutionContext extends TestBase {
import scala.concurrent.{ ExecutionContext, Awaitable }
- def defaultEC = ExecutionContext.defaultExecutionContext
+ def defaultEC = ExecutionContext.global
val inEC = new java.lang.ThreadLocal[Int]() {
override def initialValue = 0
@@ -826,7 +887,7 @@ trait CustomExecutionContext extends TestBase {
val _count = new java.util.concurrent.atomic.AtomicInteger(0)
def count = _count.get
- def delegate = ExecutionContext.defaultExecutionContext
+ def delegate = ExecutionContext.global
override def execute(runnable: Runnable) = {
_count.incrementAndGet()
@@ -843,9 +904,6 @@ trait CustomExecutionContext extends TestBase {
delegate.execute(wrapper)
}
- override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
- delegate.internalBlockingCall(awaitable, atMost)
-
override def reportFailure(t: Throwable): Unit = {
System.err.println("Failure: " + t.getClass.getSimpleName + ": " + t.getMessage)
delegate.reportFailure(t)
@@ -860,14 +918,16 @@ trait CustomExecutionContext extends TestBase {
def testOnSuccessCustomEC(): Unit = {
val count = countExecs { implicit ec =>
- once { done =>
- val f = future({ assertNoEC() })(defaultEC)
- f onSuccess {
- case _ =>
- assertEC()
+ blocking {
+ once { done =>
+ val f = future({ assertNoEC() })(defaultEC)
+ f onSuccess {
+ case _ =>
+ assertEC()
done()
+ }
+ assertNoEC()
}
- assertNoEC()
}
}
@@ -877,12 +937,14 @@ trait CustomExecutionContext extends TestBase {
def testKeptPromiseCustomEC(): Unit = {
val count = countExecs { implicit ec =>
- once { done =>
- val f = Promise.successful(10).future
- f onSuccess {
- case _ =>
- assertEC()
+ blocking {
+ once { done =>
+ val f = Promise.successful(10).future
+ f onSuccess {
+ case _ =>
+ assertEC()
done()
+ }
}
}
}
@@ -893,28 +955,30 @@ trait CustomExecutionContext extends TestBase {
def testCallbackChainCustomEC(): Unit = {
val count = countExecs { implicit ec =>
- once { done =>
- assertNoEC()
- val addOne = { x: Int => assertEC(); x + 1 }
- val f = Promise.successful(10).future
- f.map(addOne).filter { x =>
- assertEC()
- x == 11
- } flatMap { x =>
- Promise.successful(x + 1).future.map(addOne).map(addOne)
- } onComplete {
- case Left(t) =>
- try {
- throw new AssertionError("error in test: " + t.getMessage, t)
- } finally {
+ blocking {
+ once { done =>
+ assertNoEC()
+ val addOne = { x: Int => assertEC(); x + 1 }
+ val f = Promise.successful(10).future
+ f.map(addOne).filter { x =>
+ assertEC()
+ x == 11
+ } flatMap { x =>
+ Promise.successful(x + 1).future.map(addOne).map(addOne)
+ } onComplete {
+ case Left(t) =>
+ try {
+ throw new AssertionError("error in test: " + t.getMessage, t)
+ } finally {
+ done()
+ }
+ case Right(x) =>
+ assertEC()
+ assert(x == 14)
done()
- }
- case Right(x) =>
- assertEC()
- assert(x == 14)
- done()
+ }
+ assertNoEC()
}
- assertNoEC()
}
}
@@ -934,8 +998,9 @@ with FutureCallbacks
with FutureCombinators
with FutureProjections
with Promises
+with BlockContexts
with Exceptions
-with TryEitherExtractor
+// with TryEitherExtractor
with CustomExecutionContext
{
System.exit(0)
diff --git a/test/files/jvm/try-type-tests.scala b/test/files/jvm/try-type-tests.scala
new file mode 100644
index 0000000000..eecbb0ae57
--- /dev/null
+++ b/test/files/jvm/try-type-tests.scala
@@ -0,0 +1,250 @@
+import scala.util.{Try, Success, Failure}
+
+// tests the basic combinators on Try
+trait TryStandard {
+
+ def testForeachSuccess(): Unit = {
+ val t = Success(1)
+ var res = 0
+ t.foreach(x => res = x * 10)
+ assert(res == 10)
+ }
+
+ def testForeachFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ t.foreach(x => assert(false))
+ }
+
+ def testFlatMapSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.flatMap(x => Try(x * 10))
+ assert(n.get == 10)
+ }
+
+ def testFlatMapFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.flatMap{ x => assert(false); Try() }
+ }
+
+ def testMapSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.map(x => x * 10)
+ assert(n.get == 10)
+ }
+
+ def testMapFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.map(x => assert(false))
+ }
+
+ def testFilterSuccessTrue(): Unit = {
+ val t = Success(1)
+ val n = t.filter(x => x > 0)
+ assert(n.get == 1)
+ }
+
+ def testFilterSuccessFalse(): Unit = {
+ val t = Success(1)
+ val n = t.filter(x => x < 0)
+ n match {
+ case Success(v) => assert(false)
+ case Failure(e: NoSuchElementException) => assert(true)
+ }
+ }
+
+ def testFilterFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.filter{ x => assert(false); true }
+ }
+
+ def testRescueSuccess(): Unit = {
+ val t = Success(1)
+ t.rescue{ case x => assert(false); Try() }
+ }
+
+ def testRescueFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.rescue{ case x => Try(1) }
+ assert(n.get == 1)
+ }
+
+ def testRecoverSuccess(): Unit = {
+ val t = Success(1)
+ t.recover{ case x => assert(false); 99 }
+ }
+
+ def testRecoverFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.recover{ case x => 1 }
+ assert(n.get == 1)
+ }
+
+ def testFlattenSuccess(): Unit = {
+ val f = Failure(new Exception("foo"))
+ val t = Success(f)
+ assert(t.flatten == f)
+ }
+
+ def testFailedSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.failed
+ n match {
+ case Failure(e: UnsupportedOperationException) => assert(true)
+ case _ => assert(false)
+ }
+ }
+
+ def testFailedFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.failed
+ n match {
+ case Success(e: Exception) => assert(true)
+ case _ => assert(false)
+ }
+ }
+
+ testForeachSuccess()
+ testForeachFailure()
+ testFlatMapSuccess()
+ testFlatMapFailure()
+ testMapSuccess()
+ testMapFailure()
+ testFilterSuccessTrue()
+ testFilterSuccessFalse()
+ testFilterFailure()
+ testRescueSuccess()
+ testRescueFailure()
+ testRecoverSuccess()
+ testRecoverFailure()
+ testFlattenSuccess()
+ testFailedSuccess()
+ testFailedFailure()
+}
+
+// tests that implicit conversions from Try to Either behave as expected
+trait TryImplicitConversionTry2Either {
+
+ def testTry2RightMap(): Unit = {
+ val t = Success(1)
+ val n = t.right.map(x => x * 100)
+ assert(n == Right(100))
+ }
+
+ def testTry2LeftMap(): Unit = {
+ val e = new Exception("foo")
+ val t = Failure(e)
+ val n = t.left.map(x => x)
+ assert(n == Left(e))
+ }
+
+ def testTry2FoldSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.fold(x => assert(false), y => y * 200)
+ assert(n == 200)
+ }
+
+ def testTry2FoldFailure(): Unit = {
+ val e = new Exception("foo")
+ val t = Failure(e)
+ val n = t.fold(x => x, y => assert(false))
+ assert(n == e)
+ }
+
+ def testTry2SwapSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.swap
+ assert(n == Left(1))
+ }
+
+ def testTry2SwapFailure(): Unit = {
+ val e = new Exception("foo")
+ val t = Failure(e)
+ val n = t.swap
+ assert(n == Right(e))
+ }
+
+ // def testTry2MergeSucccess(): Unit = {
+ // val t: Try[Int] = Success(1)
+ // val n = (t: Either[Any, Any]).t.merge // connecting two implicit conversions
+ // assert(n == 1)
+ // }
+
+ // def testTry2MergeFailure(): Unit = {
+ // val e = new Exception("foo")
+ // val t = Failure(e)
+ // val n = (t: Either[Any, Any]).merge // connecting two implicit conversions
+ // assert(n == e)
+ // }
+
+ testTry2RightMap()
+ testTry2LeftMap()
+ testTry2FoldSuccess()
+ testTry2FoldFailure()
+ testTry2SwapSuccess()
+ testTry2SwapFailure()
+ // testTry2MergeSucccess()
+ // testTry2MergeFailure()
+}
+
+// tests that implicit conversions from Either to Try behave as expected
+trait TryImplicitConversionEither2Try {
+
+ def testRight2FilterSuccessTrue(): Unit = {
+ def expectsTry[U <% Try[Int]](rght: U): Try[Int] = {
+ val n = rght.filter(x => x > 0) // this should be converted to a Try
+ n
+ }
+ val r = Right(1)
+ val n = expectsTry(r)
+ assert(n == Success(1))
+ }
+
+ def testRight2FilterSuccessFalse(): Unit = {
+ def expectsTry[U <% Try[Int]](rght: U): Try[Int] = {
+ val n = rght.filter(x => x < 0) // this should be converted to a Try
+ n
+ }
+ val r = Right(1)
+ val n = expectsTry(r)
+ n match {
+ case Failure(e: NoSuchElementException) => assert(true)
+ case _ => assert(false)
+ }
+ }
+
+ def testLeft2FilterFailure(): Unit = {
+ def expectsTry[U <% Try[Int]](rght: U): Try[Int] = {
+ val n = rght.filter(x => x > 0) // this should be converted to a Try
+ n
+ }
+ val r = Left(new Exception("foo"))
+ val n = expectsTry(r)
+ n match {
+ case Failure(e: Exception) => assert(true)
+ case _ => assert(false)
+ }
+ }
+
+ def testRight2GetSuccess(): Unit = {
+ def expectsTry[U <% Try[Int]](rght: U): Int = {
+ val n = rght.get // this should be converted to a Try
+ n
+ }
+ val r = Right(1)
+ val n = expectsTry(r)
+ assert(n == 1)
+ }
+
+ testRight2FilterSuccessTrue()
+ testRight2FilterSuccessFalse()
+ testLeft2FilterFailure()
+ testRight2GetSuccess()
+}
+
+object Test
+extends App
+with TryStandard
+with TryImplicitConversionTry2Either
+with TryImplicitConversionEither2Try {
+ System.exit(0)
+} \ No newline at end of file
diff --git a/test/files/lib/annotations.jar.desired.sha1 b/test/files/lib/annotations.jar.desired.sha1
index 2b4292d796..ff7bc9425e 100644
--- a/test/files/lib/annotations.jar.desired.sha1
+++ b/test/files/lib/annotations.jar.desired.sha1
@@ -1 +1 @@
-02fe2ed93766323a13f22c7a7e2ecdcd84259b6c ?annotations.jar
+02fe2ed93766323a13f22c7a7e2ecdcd84259b6c *annotations.jar
diff --git a/test/files/lib/enums.jar.desired.sha1 b/test/files/lib/enums.jar.desired.sha1
index 46cd8e92cf..040dff4487 100644
--- a/test/files/lib/enums.jar.desired.sha1
+++ b/test/files/lib/enums.jar.desired.sha1
@@ -1 +1 @@
-981392dbd1f727b152cd1c908c5fce60ad9d07f7 ?enums.jar
+981392dbd1f727b152cd1c908c5fce60ad9d07f7 *enums.jar
diff --git a/test/files/lib/genericNest.jar.desired.sha1 b/test/files/lib/genericNest.jar.desired.sha1
index e9321262f2..77e4fec408 100644
--- a/test/files/lib/genericNest.jar.desired.sha1
+++ b/test/files/lib/genericNest.jar.desired.sha1
@@ -1 +1 @@
-b1ec8a095cec4902b3609d74d274c04365c59c04 ?genericNest.jar
+b1ec8a095cec4902b3609d74d274c04365c59c04 *genericNest.jar
diff --git a/test/files/lib/methvsfield.jar.desired.sha1 b/test/files/lib/methvsfield.jar.desired.sha1
index 8c01532b88..6655f45ddb 100644
--- a/test/files/lib/methvsfield.jar.desired.sha1
+++ b/test/files/lib/methvsfield.jar.desired.sha1
@@ -1 +1 @@
-be8454d5e7751b063ade201c225dcedefd252775 ?methvsfield.jar
+be8454d5e7751b063ade201c225dcedefd252775 *methvsfield.jar
diff --git a/test/files/lib/nest.jar.desired.sha1 b/test/files/lib/nest.jar.desired.sha1
index 674ca79a5b..056e7ada90 100644
--- a/test/files/lib/nest.jar.desired.sha1
+++ b/test/files/lib/nest.jar.desired.sha1
@@ -1 +1 @@
-cd33e0a0ea249eb42363a2f8ba531186345ff68c ?nest.jar
+cd33e0a0ea249eb42363a2f8ba531186345ff68c *nest.jar
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
index e6ed543d73..2f15402d18 100644
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ b/test/files/lib/scalacheck.jar.desired.sha1
@@ -1 +1 @@
-b6f4dbb29f0c2ec1eba682414f60d52fea84f703 ?scalacheck.jar
+b6f4dbb29f0c2ec1eba682414f60d52fea84f703 *scalacheck.jar
diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check
index ab3d28777d..62f895cc7e 100644
--- a/test/files/neg/catch-all.check
+++ b/test/files/neg/catch-all.check
@@ -1,10 +1,10 @@
catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
- try { "warn" } catch { case _ => }
- ^
+ try { "warn" } catch { case _ => }
+ ^
catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
- try { "warn" } catch { case x => }
- ^
+ try { "warn" } catch { case x => }
+ ^
catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
- try { "warn" } catch { case _: RuntimeException => ; case x => }
- ^
+ try { "warn" } catch { case _: RuntimeException => ; case x => }
+ ^
three errors found
diff --git a/test/files/neg/catch-all.scala b/test/files/neg/catch-all.scala
index 35a6d7af91..c05be77044 100644
--- a/test/files/neg/catch-all.scala
+++ b/test/files/neg/catch-all.scala
@@ -1,19 +1,31 @@
object CatchAll {
- try { "warn" } catch { case _ => }
+ try { "warn" } catch { case _ => }
- try { "warn" } catch { case x => }
+ try { "warn" } catch { case x => }
- try { "warn" } catch { case _: RuntimeException => ; case x => }
+ try { "warn" } catch { case _: RuntimeException => ; case x => }
- try { "okay" } catch { case _: Throwable => }
+ val t = T
- try { "okay" } catch { case _: Exception => }
+ try { "okay" } catch { case T => }
- try { "okay" } catch { case okay: Throwable => }
+ try { "okay" } catch { case `t` => }
- try { "okay" } catch { case okay: Exception => }
+ try { "okay" } catch { case x @ T => }
- try { "okay" } catch { case _ if "".isEmpty => }
+ try { "okay" } catch { case x @ `t` => }
- "okay" match { case _ => "" }
+ try { "okay" } catch { case _: Throwable => }
+
+ try { "okay" } catch { case _: Exception => }
+
+ try { "okay" } catch { case okay: Throwable => }
+
+ try { "okay" } catch { case okay: Exception => }
+
+ try { "okay" } catch { case _ if "".isEmpty => }
+
+ "okay" match { case _ => "" }
}
+
+object T extends Throwable
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
new file mode 100644
index 0000000000..841e893249
--- /dev/null
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -0,0 +1,61 @@
+classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ def cm1[T: ClassManifest] = ???
+ ^
+classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ val cm3: ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ val cm3: ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ def rcm1[T: scala.reflect.ClassManifest] = ???
+ ^
+classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ type CM[T] = ClassManifest[T]
+ ^
+classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ type RCM[T] = scala.reflect.ClassManifest[T]
+ ^
+classmanifests_new_deprecations.scala:20: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def m1[T: Manifest] = ???
+ ^
+classmanifests_new_deprecations.scala:21: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def m2[T](implicit evidence$1: Manifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val m3: Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val m3: Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:24: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def rm1[T: scala.reflect.Manifest] = ???
+ ^
+classmanifests_new_deprecations.scala:25: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val rm3: scala.reflect.Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val rm3: scala.reflect.Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:28: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ type M[T] = Manifest[T]
+ ^
+classmanifests_new_deprecations.scala:33: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ type RM[T] = scala.reflect.Manifest[T]
+ ^
+20 errors found
diff --git a/test/files/neg/classmanifests_new_deprecations.flags b/test/files/neg/classmanifests_new_deprecations.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/classmanifests_new_deprecations.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/classmanifests_new_deprecations.scala b/test/files/neg/classmanifests_new_deprecations.scala
new file mode 100644
index 0000000000..563a0bc197
--- /dev/null
+++ b/test/files/neg/classmanifests_new_deprecations.scala
@@ -0,0 +1,37 @@
+object Test extends App {
+ def cm1[T: ClassManifest] = ???
+ def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
+ val cm3: ClassManifest[Int] = null
+
+ def rcm1[T: scala.reflect.ClassManifest] = ???
+ def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+
+ type CM[T] = ClassManifest[T]
+ def acm1[T: CM] = ???
+ def acm2[T](implicit evidence$1: CM[T]) = ???
+ val acm3: CM[Int] = null
+
+ type RCM[T] = scala.reflect.ClassManifest[T]
+ def arcm1[T: RCM] = ???
+ def arcm2[T](implicit evidence$1: RCM[T]) = ???
+ val arcm3: RCM[Int] = null
+
+ def m1[T: Manifest] = ???
+ def m2[T](implicit evidence$1: Manifest[T]) = ???
+ val m3: Manifest[Int] = null
+
+ def rm1[T: scala.reflect.Manifest] = ???
+ def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ???
+ val rm3: scala.reflect.Manifest[Int] = null
+
+ type M[T] = Manifest[T]
+ def am1[T: M] = ???
+ def am2[T](implicit evidence$1: M[T]) = ???
+ val am3: M[Int] = null
+
+ type RM[T] = scala.reflect.Manifest[T]
+ def arm1[T: RM] = ???
+ def arm2[T](implicit evidence$1: RM[T]) = ???
+ val arm3: RM[Int] = null
+} \ No newline at end of file
diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check
index 7140b99428..0f0d13cb33 100644
--- a/test/files/neg/exhausting.check
+++ b/test/files/neg/exhausting.check
@@ -7,7 +7,7 @@ It would fail on the following input: Nil
def fail2[T](xs: List[T]) = xs match {
^
exhausting.scala:32: error: match may not be exhaustive.
-It would fail on the following input: List(<not in (1, 2)>)
+It would fail on the following input: List((x: Int forSome x not in (1, 2)))
def fail3a(xs: List[Int]) = xs match {
^
exhausting.scala:39: error: match may not be exhaustive.
diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.check b/test/files/neg/interop_abstypetags_arenot_classmanifests.check
index 3134aadb58..15bdcfc5b1 100644
--- a/test/files/neg/interop_abstypetags_arenot_classmanifests.check
+++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.check
@@ -1,4 +1,4 @@
-interop_abstypetags_arenot_classmanifests.scala:5: error: could not find implicit value for parameter m: ClassManifest[T]
+interop_abstypetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
println(classManifest[T])
^
one error found
diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.check b/test/files/neg/interop_typetags_arenot_classmanifests.check
index 403625b4da..b404f94d69 100644
--- a/test/files/neg/interop_typetags_arenot_classmanifests.check
+++ b/test/files/neg/interop_typetags_arenot_classmanifests.check
@@ -1,4 +1,4 @@
-interop_typetags_arenot_classmanifests.scala:5: error: could not find implicit value for parameter m: ClassManifest[T]
+interop_typetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
println(classManifest[T])
^
one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.check b/test/files/neg/macro-invalidret-nonuniversetree.check
index 4fc06b5ceb..1b9487982f 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree.check
+++ b/test/files/neg/macro-invalidret-nonuniversetree.check
@@ -1,7 +1,7 @@
Macros_Test_2.scala:2: error: macro implementation has wrong shape:
required: (c: scala.reflect.makro.Context): c.Expr[Any]
- found : (c: scala.reflect.makro.Context): reflect.mirror.Literal
-type mismatch for return type: reflect.mirror.Literal does not conform to c.Expr[Any]
+ found : (c: scala.reflect.makro.Context): reflect.basis.Literal
+type mismatch for return type: reflect.basis.Literal does not conform to c.Expr[Any]
def foo = macro Impls.foo
^
one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
index 86b7c8d8d0..da0eb0ac83 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
+++ b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
@@ -1,5 +1,5 @@
import scala.reflect.makro.{Context => Ctx}
object Impls {
- def foo(c: Ctx) = scala.reflect.mirror.Literal(scala.reflect.mirror.Constant(42))
+ def foo(c: Ctx) = scala.reflect.basis.Literal(scala.reflect.basis.Constant(42))
}
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
new file mode 100644
index 0000000000..8986b899a3
--- /dev/null
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -0,0 +1,70 @@
+stringinterpolation_macro-neg.scala:8: error: too few parts
+ new StringContext().f()
+ ^
+stringinterpolation_macro-neg.scala:9: error: too few arguments for interpolated string
+ new StringContext("", " is ", "%2d years old").f(s)
+ ^
+stringinterpolation_macro-neg.scala:10: error: too many arguments for interpolated string
+ new StringContext("", " is ", "%2d years old").f(s, d, d)
+ ^
+stringinterpolation_macro-neg.scala:11: error: too few arguments for interpolated string
+ new StringContext("", "").f()
+ ^
+stringinterpolation_macro-neg.scala:14: error: type mismatch;
+ found : String
+ required: Boolean
+ f"$s%b"
+ ^
+stringinterpolation_macro-neg.scala:15: error: type mismatch;
+ found : String
+ required: Char
+ f"$s%c"
+ ^
+stringinterpolation_macro-neg.scala:16: error: type mismatch;
+ found : Double
+ required: Char
+ f"$f%c"
+ ^
+stringinterpolation_macro-neg.scala:17: error: type mismatch;
+ found : String
+ required: Int
+ f"$s%x"
+ ^
+stringinterpolation_macro-neg.scala:18: error: type mismatch;
+ found : Boolean
+ required: Int
+ f"$b%d"
+ ^
+stringinterpolation_macro-neg.scala:19: error: type mismatch;
+ found : String
+ required: Int
+ f"$s%d"
+ ^
+stringinterpolation_macro-neg.scala:20: error: type mismatch;
+ found : Double
+ required: Int
+ f"$f%o"
+ ^
+stringinterpolation_macro-neg.scala:21: error: type mismatch;
+ found : String
+ required: Double
+ f"$s%e"
+ ^
+stringinterpolation_macro-neg.scala:22: error: type mismatch;
+ found : Boolean
+ required: Double
+ f"$b%f"
+ ^
+stringinterpolation_macro-neg.scala:27: error: type mismatch;
+ found : String
+ required: Int
+Note that implicit conversions are not applicable because they are ambiguous:
+ both value strToInt2 of type String => Int
+ and value strToInt1 of type String => Int
+ are possible conversion functions from String to Int
+ f"$s%d"
+ ^
+stringinterpolation_macro-neg.scala:30: error: illegal conversion character
+ f"$s%i"
+ ^
+15 errors found
diff --git a/test/files/neg/stringinterpolation_macro-neg.scala b/test/files/neg/stringinterpolation_macro-neg.scala
new file mode 100644
index 0000000000..ac9d97d678
--- /dev/null
+++ b/test/files/neg/stringinterpolation_macro-neg.scala
@@ -0,0 +1,31 @@
+object Test extends App {
+ val s = "Scala"
+ val d = 8
+ val b = false
+ val f = 3.14159
+
+ // 1) number of arguments
+ new StringContext().f()
+ new StringContext("", " is ", "%2d years old").f(s)
+ new StringContext("", " is ", "%2d years old").f(s, d, d)
+ new StringContext("", "").f()
+
+ // 2) Interpolation mismatches
+ f"$s%b"
+ f"$s%c"
+ f"$f%c"
+ f"$s%x"
+ f"$b%d"
+ f"$s%d"
+ f"$f%o"
+ f"$s%e"
+ f"$b%f"
+
+ {
+ implicit val strToInt1 = (s: String) => 1
+ implicit val strToInt2 = (s: String) => 2
+ f"$s%d"
+ }
+
+ f"$s%i"
+}
diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check
index 8955c94b32..e4730b6459 100644
--- a/test/files/neg/switch.check
+++ b/test/files/neg/switch.check
@@ -1,10 +1,7 @@
-switch.scala:28: error: could not emit switch for @switch annotated match
- def fail1(c: Char) = (c: @switch) match {
- ^
switch.scala:38: error: could not emit switch for @switch annotated match
def fail2(c: Char) = (c: @switch @unchecked) match {
^
switch.scala:45: error: could not emit switch for @switch annotated match
def fail3(c: Char) = (c: @unchecked @switch) match {
^
-three errors found
+two errors found
diff --git a/test/files/neg/switch.scala b/test/files/neg/switch.scala
index a3dfd869d6..198583fe41 100644
--- a/test/files/neg/switch.scala
+++ b/test/files/neg/switch.scala
@@ -24,8 +24,8 @@ object Main {
case _ => false
}
- // has a guard
- def fail1(c: Char) = (c: @switch) match {
+ // has a guard, but since SI-5830 that's ok
+ def succ_guard(c: Char) = (c: @switch) match {
case 'A' | 'B' | 'C' => true
case x if x == 'A' => true
case _ => false
diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check
new file mode 100644
index 0000000000..714816fd62
--- /dev/null
+++ b/test/files/neg/t2442.check
@@ -0,0 +1,9 @@
+t2442.scala:4: error: match may not be exhaustive.
+It would fail on the following input: THREE
+ def f(e: MyEnum) = e match {
+ ^
+t2442.scala:11: error: match may not be exhaustive.
+It would fail on the following input: BLUE
+ def g(e: MySecondEnum) = e match {
+ ^
+two errors found
diff --git a/test/files/neg/t2442.flags b/test/files/neg/t2442.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t2442.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2442/MyEnum.java b/test/files/neg/t2442/MyEnum.java
new file mode 100644
index 0000000000..3ffbbb31b8
--- /dev/null
+++ b/test/files/neg/t2442/MyEnum.java
@@ -0,0 +1,3 @@
+public enum MyEnum {
+ ONE, TWO, THREE;
+} \ No newline at end of file
diff --git a/test/files/neg/t2442/MySecondEnum.java b/test/files/neg/t2442/MySecondEnum.java
new file mode 100644
index 0000000000..0f841286de
--- /dev/null
+++ b/test/files/neg/t2442/MySecondEnum.java
@@ -0,0 +1,6 @@
+public enum MySecondEnum {
+ RED(1), BLUE(2) { public void foo() {} };
+ MySecondEnum(int i) {}
+
+ public void foo() {}
+} \ No newline at end of file
diff --git a/test/files/neg/t2442/t2442.scala b/test/files/neg/t2442/t2442.scala
new file mode 100644
index 0000000000..b0a0f3cd41
--- /dev/null
+++ b/test/files/neg/t2442/t2442.scala
@@ -0,0 +1,15 @@
+class Test {
+ import MyEnum._
+
+ def f(e: MyEnum) = e match {
+ case ONE => println("one")
+ case TWO => println("two")
+ // missing case --> exhaustivity warning!
+ }
+
+ import MySecondEnum._
+ def g(e: MySecondEnum) = e match {
+ case RED => println("red")
+ // missing case --> exhaustivity warning!
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check
new file mode 100644
index 0000000000..aeb18497ed
--- /dev/null
+++ b/test/files/neg/t2796.check
@@ -0,0 +1,4 @@
+t2796.scala:7: error: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+ val abstractVal = "T1.abstractVal" // warn
+ ^
+one error found
diff --git a/test/files/neg/t2796.flags b/test/files/neg/t2796.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t2796.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2796.scala b/test/files/neg/t2796.scala
new file mode 100644
index 0000000000..3bcc9df562
--- /dev/null
+++ b/test/files/neg/t2796.scala
@@ -0,0 +1,28 @@
+trait Base {
+ val abstractVal: String
+ final val useAbstractVal = abstractVal
+}
+
+trait T1 extends {
+ val abstractVal = "T1.abstractVal" // warn
+} with Base
+
+trait T2 extends {
+ type X = Int // okay
+} with Base
+
+
+class C1 extends {
+ val abstractVal = "C1.abstractVal" // okay
+} with Base
+
+object Test {
+ def main(args: Array[String]) {
+ assert(new C1 ().useAbstractVal == "C1.abstractVal")
+ // This currently fails. a more ambitious approach to this ticket would add $earlyinit$
+ // to traits and call it from the right places in the right order.
+ //
+ // For now, we'll just issue a warning.
+ assert(new T1 {}.useAbstractVal == "T1.abstractVal")
+ }
+}
diff --git a/test/files/neg/t3692-old.check b/test/files/neg/t3692-old.check
index a97a7d31ec..92d71f7e4e 100644
--- a/test/files/neg/t3692-old.check
+++ b/test/files/neg/t3692-old.check
@@ -1,8 +1,11 @@
-t3692-old.scala:6: warning: object Manifest in package reflect is deprecated: Use TypeTag instead
- new ManifestTester().toJavaMap(map)
- ^
+t3692-old.scala:11: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: Manifest[T], m2: Manifest[V]): java.util.Map[_, _] = {
+ ^
+t3692-old.scala:11: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: Manifest[T], m2: Manifest[V]): java.util.Map[_, _] = {
+ ^
t3692-old.scala:15: error: unreachable code
case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
^
-one warning found
+two warnings found
one error found
diff --git a/test/files/neg/t3836.check b/test/files/neg/t3836.check
new file mode 100644
index 0000000000..ff2fc36ae9
--- /dev/null
+++ b/test/files/neg/t3836.check
@@ -0,0 +1,13 @@
+t3836.scala:17: error: reference to IOException is ambiguous;
+it is imported twice in the same scope by
+import foo.bar._
+and import java.io._
+ def f = new IOException // genuinely different
+ ^
+t3836.scala:26: error: reference to Bippy is ambiguous;
+it is imported twice in the same scope by
+import baz._
+and import bar._
+ def f: Bippy[Int] = ???
+ ^
+two errors found
diff --git a/test/files/neg/t3836.scala b/test/files/neg/t3836.scala
new file mode 100644
index 0000000000..a68f6e172f
--- /dev/null
+++ b/test/files/neg/t3836.scala
@@ -0,0 +1,28 @@
+package foo
+
+package object bar {
+ type IOException = Object
+ type Bippy[T] = List[T]
+}
+
+package object baz {
+ type Bippy[+T] = List[T]
+}
+
+package baz {
+ import java.io._
+ import foo.bar._
+
+ object Test {
+ def f = new IOException // genuinely different
+ }
+}
+
+package baz2 {
+ import bar._
+ import baz._
+
+ object Test2 {
+ def f: Bippy[Int] = ???
+ }
+}
diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check
index c01226685f..7bd8ff78f9 100644
--- a/test/files/neg/t4541.check
+++ b/test/files/neg/t4541.check
@@ -1,7 +1,7 @@
-t4541.scala:11: error: scala.reflect.internal.Types$TypeError: variable data in class Sparse cannot be accessed in Sparse[Int]
+t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int]
Access to protected method data not permitted because
prefix type Sparse[Int] does not conform to
class Sparse$mcI$sp where the access take place
that.data
^
-one error found \ No newline at end of file
+one error found
diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check
index 54d9c3d1ee..8a52fd97f4 100644
--- a/test/files/neg/t4541b.check
+++ b/test/files/neg/t4541b.check
@@ -1,7 +1,7 @@
-t4541b.scala:13: error: scala.reflect.internal.Types$TypeError: variable data in class SparseArray cannot be accessed in SparseArray[Int]
+t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int]
Access to protected method data not permitted because
prefix type SparseArray[Int] does not conform to
class SparseArray$mcI$sp where the access take place
use(that.data.clone)
^
-one error found \ No newline at end of file
+one error found
diff --git a/test/files/neg/t4691_exhaust_extractor.check b/test/files/neg/t4691_exhaust_extractor.check
new file mode 100644
index 0000000000..cd12e56f86
--- /dev/null
+++ b/test/files/neg/t4691_exhaust_extractor.check
@@ -0,0 +1,13 @@
+t4691_exhaust_extractor.scala:17: error: match may not be exhaustive.
+It would fail on the following input: Bar3()
+ def f1(x: Foo) = x match {
+ ^
+t4691_exhaust_extractor.scala:23: error: match may not be exhaustive.
+It would fail on the following input: Bar3()
+ def f2(x: Foo) = x match {
+ ^
+t4691_exhaust_extractor.scala:29: error: match may not be exhaustive.
+It would fail on the following input: Bar3()
+ def f3(x: Foo) = x match {
+ ^
+three errors found
diff --git a/test/files/neg/t4691_exhaust_extractor.flags b/test/files/neg/t4691_exhaust_extractor.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t4691_exhaust_extractor.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t4691_exhaust_extractor.scala b/test/files/neg/t4691_exhaust_extractor.scala
new file mode 100644
index 0000000000..c68c33d654
--- /dev/null
+++ b/test/files/neg/t4691_exhaust_extractor.scala
@@ -0,0 +1,33 @@
+sealed trait Foo
+class Bar1 extends Foo
+class Bar2 extends Foo
+class Bar3 extends Foo
+
+// these extractors are known to always succeed as they return a Some
+object Baz1 {
+ def unapply(x: Bar1): Some[Int] = Some(1)
+}
+object Baz2 {
+ def unapply(x: Bar2): Some[Int] = Some(2)
+}
+
+
+object Test {
+ // warning: missing Bar3
+ def f1(x: Foo) = x match {
+ case _: Bar1 => 1
+ case _: Bar2 => 2
+ }
+
+ // warning: missing Bar3
+ def f2(x: Foo) = x match {
+ case _: Bar1 => 1
+ case Baz2(x) => x
+ }
+
+ // warning: missing Bar3
+ def f3(x: Foo) = x match {
+ case Baz1(x) => x
+ case Baz2(x) => x
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t4842.check b/test/files/neg/t4842.check
new file mode 100644
index 0000000000..b53bbdbd15
--- /dev/null
+++ b/test/files/neg/t4842.check
@@ -0,0 +1,7 @@
+t4842.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+ ^
+t4842.scala:6: error: self constructor arguments cannot reference unconstructed `this`
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+ ^
+two errors found
diff --git a/test/files/neg/t4842b.scala b/test/files/neg/t4842.scala
index a7996cc061..c6244efda7 100644
--- a/test/files/neg/t4842b.scala
+++ b/test/files/neg/t4842.scala
@@ -1,3 +1,7 @@
+class Foo (x: AnyRef) {
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+}
+
class TypeArg[X](val x: X)(a: AnyRef) {
def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
}
diff --git a/test/files/neg/t4842a.check b/test/files/neg/t4842a.check
deleted file mode 100644
index 39d77bfc48..0000000000
--- a/test/files/neg/t4842a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t4842a.scala:2: error: self constructor arguments cannot reference unconstructed `this`
- def this(x: Int) = this(new { println(Foo.this)}) // error
- ^
-one error found
diff --git a/test/files/neg/t4842a.scala b/test/files/neg/t4842a.scala
deleted file mode 100644
index 78360effb4..0000000000
--- a/test/files/neg/t4842a.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class Foo (x: AnyRef) {
- def this(x: Int) = this(new { println(Foo.this)}) // error
-}
diff --git a/test/files/neg/t4842b.check b/test/files/neg/t4842b.check
deleted file mode 100644
index c7ccd5e059..0000000000
--- a/test/files/neg/t4842b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t4842b.scala:2: error: self constructor arguments cannot reference unconstructed `this`
- def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
- ^
-one error found
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
new file mode 100644
index 0000000000..96eb1fd364
--- /dev/null
+++ b/test/files/neg/t5148.check
@@ -0,0 +1,2 @@
+error: bad reference while unpickling Imports.class: term memberHandlers not found in scala.tools.nsc.interpreter.IMain
+one error found
diff --git a/test/files/neg/t5148.scala b/test/files/neg/t5148.scala
new file mode 100644
index 0000000000..fca64e57df
--- /dev/null
+++ b/test/files/neg/t5148.scala
@@ -0,0 +1,4 @@
+package scala.tools.nsc
+package interpreter
+
+class IMain extends Imports
diff --git a/test/files/neg/t5504.check b/test/files/neg/t5504.check
deleted file mode 100644
index 2827c02d10..0000000000
--- a/test/files/neg/t5504.check
+++ /dev/null
@@ -1,4 +0,0 @@
-error: type _$1 is defined twice
- conflicting symbols both originated in file 't5504/s_1.scala'
- Note: this may be due to a bug in the compiler involving wildcards in package objects
-one error found
diff --git a/test/files/neg/t5589neg.check b/test/files/neg/t5589neg.check
index b3ff16d7e4..f1dad94df3 100644
--- a/test/files/neg/t5589neg.check
+++ b/test/files/neg/t5589neg.check
@@ -1,4 +1,4 @@
-t5589neg.scala:2: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead
+t5589neg.scala:2: warning: `withFilter' method does not yet exist on scala.util.Either.RightProjection[Int,String], using `filter' method instead
def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
^
t5589neg.scala:2: error: constructor cannot be instantiated to expected type;
@@ -6,7 +6,7 @@ t5589neg.scala:2: error: constructor cannot be instantiated to expected type;
required: String
def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
^
-t5589neg.scala:3: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead
+t5589neg.scala:3: warning: `withFilter' method does not yet exist on scala.util.Either.RightProjection[Int,String], using `filter' method instead
def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
^
t5589neg.scala:3: error: constructor cannot be instantiated to expected type;
diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check
new file mode 100644
index 0000000000..89d766fe34
--- /dev/null
+++ b/test/files/neg/t5761.check
@@ -0,0 +1,16 @@
+t5761.scala:4: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:8: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]()) // no crash
+ ^
+t5761.scala:9: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:13: error: not found: type Tread
+ new Tread("sth") { }.run()
+ ^
+four errors found
diff --git a/test/files/neg/t5761.scala b/test/files/neg/t5761.scala
new file mode 100644
index 0000000000..040c4eb75a
--- /dev/null
+++ b/test/files/neg/t5761.scala
@@ -0,0 +1,16 @@
+class D[-A](x: A) { }
+
+object Test1 {
+ println(new D[Int]{}) // crash
+}
+
+object Test2 {
+ println(new D[Int]()) // no crash
+ println(new D[Int]{}) // crash
+}
+
+object Test3 {
+ new Tread("sth") { }.run()
+}
+
+
diff --git a/test/files/neg/t5830.check b/test/files/neg/t5830.check
new file mode 100644
index 0000000000..85cb84378f
--- /dev/null
+++ b/test/files/neg/t5830.check
@@ -0,0 +1,4 @@
+t5830.scala:6: error: unreachable code
+ case 'a' => println("b") // unreachable
+ ^
+one error found
diff --git a/test/files/neg/t5830.flags b/test/files/neg/t5830.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t5830.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t5830.scala b/test/files/neg/t5830.scala
new file mode 100644
index 0000000000..c2df3dec8b
--- /dev/null
+++ b/test/files/neg/t5830.scala
@@ -0,0 +1,9 @@
+import scala.annotation.switch
+
+class Test {
+ def unreachable(ch: Char) = (ch: @switch) match {
+ case 'a' => println("b") // ok
+ case 'a' => println("b") // unreachable
+ case 'c' =>
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t5839.check b/test/files/neg/t5839.check
new file mode 100644
index 0000000000..d4b125bd1e
--- /dev/null
+++ b/test/files/neg/t5839.check
@@ -0,0 +1,6 @@
+t5839.scala:5: error: type mismatch;
+ found : (x: String => String)Int <and> (x: Int)Int
+ required: Int => String
+ val x: String = goo(foo _)
+ ^
+one error found
diff --git a/test/files/neg/t5839.scala b/test/files/neg/t5839.scala
new file mode 100644
index 0000000000..d3a5d4b25f
--- /dev/null
+++ b/test/files/neg/t5839.scala
@@ -0,0 +1,7 @@
+object Test {
+ def goo[T](x: Int => T): T = x(1)
+ implicit def f(x: Int): String = ""
+ def foo(x: Int): Int = x + 1
+ val x: String = goo(foo _)
+ def foo(x: String => String) = 1
+}
diff --git a/test/files/neg/t5969.check b/test/files/neg/t5969.check
new file mode 100644
index 0000000000..9d8ac9a3a5
--- /dev/null
+++ b/test/files/neg/t5969.check
@@ -0,0 +1,7 @@
+t5969.scala:9: error: overloaded method value g with alternatives:
+ (x: C2)String <and>
+ (x: C1)String
+ cannot be applied to (String)
+ if (false) List(g(x)) else List[C1]() map g
+ ^
+one error found
diff --git a/test/files/neg/t5969.scala b/test/files/neg/t5969.scala
new file mode 100644
index 0000000000..62f87fd7ab
--- /dev/null
+++ b/test/files/neg/t5969.scala
@@ -0,0 +1,11 @@
+class C1
+class C2
+class A {
+ def f(x: Any) = x
+ def g(x: C1): String = "A"
+ def g(x: C2): String = "B"
+
+ def crash() = f(List[String]() flatMap { x =>
+ if (false) List(g(x)) else List[C1]() map g
+ })
+}
diff --git a/test/files/neg/t6013.check b/test/files/neg/t6013.check
new file mode 100644
index 0000000000..502da999f5
--- /dev/null
+++ b/test/files/neg/t6013.check
@@ -0,0 +1,7 @@
+DerivedScala.scala:4: error: class C needs to be abstract, since there is a deferred declaration of method foo in class B of type => Int which is not implemented in a subclass
+class C extends B
+ ^
+DerivedScala.scala:7: error: class DerivedScala needs to be abstract, since there is a deferred declaration of method foo in class Abstract of type ()Boolean which is not implemented in a subclass
+class DerivedScala extends Abstract
+ ^
+two errors found
diff --git a/test/files/neg/t6013/Abstract.java b/test/files/neg/t6013/Abstract.java
new file mode 100644
index 0000000000..c0ef046bbd
--- /dev/null
+++ b/test/files/neg/t6013/Abstract.java
@@ -0,0 +1,7 @@
+public abstract class Abstract extends Base {
+ // overrides Base#bar under the erasure model
+ public void bar(java.util.List<java.lang.Integer> foo) { return; }
+
+ // must force re-implementation in derived classes
+ public abstract boolean foo();
+}
diff --git a/test/files/neg/t6013/Base.java b/test/files/neg/t6013/Base.java
new file mode 100644
index 0000000000..b73d7fd821
--- /dev/null
+++ b/test/files/neg/t6013/Base.java
@@ -0,0 +1,10 @@
+abstract public class Base {
+ // This must considered to be overridden by Abstract#foo based
+ // on the erased signatures. This special case is handled by
+ // `javaErasedOverridingSym` in `RefChecks`.
+ public abstract void bar(java.util.List<java.lang.String> foo) { return; }
+
+ // But, a concrete method in a Java superclass must not excuse
+ // a deferred method in the Java subclass!
+ public boolean foo() { return true; }
+}
diff --git a/test/files/neg/t6013/DerivedScala.scala b/test/files/neg/t6013/DerivedScala.scala
new file mode 100644
index 0000000000..fc0c55d398
--- /dev/null
+++ b/test/files/neg/t6013/DerivedScala.scala
@@ -0,0 +1,7 @@
+// Scala extending Scala (this case was working fine before this bug.)
+class A { def foo: Int = 0 }
+abstract class B extends A { def foo: Int }
+class C extends B
+
+// Scala extending Java
+class DerivedScala extends Abstract
diff --git a/test/files/neg/t6042.check b/test/files/neg/t6042.check
new file mode 100644
index 0000000000..221f06e2c5
--- /dev/null
+++ b/test/files/neg/t6042.check
@@ -0,0 +1,4 @@
+t6042.scala:7: error: illegal type selection from volatile type a.OpSemExp (with upper bound LazyExp[a.OpSemExp] with _$1)
+ def foo[AA <: LazyExp[_]](a: AA): a.OpSemExp#Val = ??? // a.OpSemExp is volatile, because of `with This`
+ ^
+one error found
diff --git a/test/files/neg/t6042.scala b/test/files/neg/t6042.scala
new file mode 100644
index 0000000000..5a123d17ca
--- /dev/null
+++ b/test/files/neg/t6042.scala
@@ -0,0 +1,8 @@
+trait LazyExp[+This <: LazyExp[This]] { this: This =>
+ type OpSemExp <: LazyExp[OpSemExp] with This
+ type Val
+}
+
+object Test {
+ def foo[AA <: LazyExp[_]](a: AA): a.OpSemExp#Val = ??? // a.OpSemExp is volatile, because of `with This`
+}
diff --git a/test/files/neg/t649.check b/test/files/neg/t649.check
index a6670886b5..5a270d4751 100644
--- a/test/files/neg/t649.check
+++ b/test/files/neg/t649.check
@@ -1,4 +1,4 @@
t649.scala:3: error: overloaded method foo needs result type
def foo[A] = foo[A]
- ^
+ ^
one error found
diff --git a/test/files/pos/exponential-spec.scala b/test/files/pos/exponential-spec.scala
new file mode 100644
index 0000000000..83aef58f7e
--- /dev/null
+++ b/test/files/pos/exponential-spec.scala
@@ -0,0 +1,47 @@
+// a.scala
+// Sat Jun 30 19:51:17 PDT 2012
+
+trait Exp[T]
+
+object Test {
+ def f[T](exp: Exp[T]): Exp[T] = (
+ f[T] _
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T] // 4s
+ compose f[T] // 5s
+ compose f[T] // 5s
+ compose f[T] // 6s
+ compose f[T] // 7s
+ compose f[T] // 8s
+ compose f[T] // 11s
+ compose f[T] // 17s
+ compose f[T] // 29s
+ compose f[T] // 54s
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ )(exp)
+}
diff --git a/test/files/pos/rangepos-anonapply.flags b/test/files/pos/rangepos-anonapply.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/rangepos-anonapply.scala b/test/files/pos/rangepos-anonapply.scala
new file mode 100644
index 0000000000..2f3e4ad6cd
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.scala
@@ -0,0 +1,9 @@
+class Test {
+ trait PropTraverser {
+ def apply(x: Int): Unit = {}
+ }
+
+ def gather(x: Int) {
+ (new PropTraverser {})(x)
+ }
+}
diff --git a/test/files/pos/t3836.scala b/test/files/pos/t3836.scala
new file mode 100644
index 0000000000..840f171164
--- /dev/null
+++ b/test/files/pos/t3836.scala
@@ -0,0 +1,14 @@
+package foo
+
+package object bar {
+ type IOException = java.io.IOException
+}
+
+package baz {
+ import java.io._
+ import foo.bar._
+
+ object Test {
+ def f = new IOException
+ }
+}
diff --git a/test/files/pos/t4176b.scala b/test/files/pos/t4176b.scala
new file mode 100644
index 0000000000..11914c50c8
--- /dev/null
+++ b/test/files/pos/t4176b.scala
@@ -0,0 +1,5 @@
+object Test {
+ def foo(a: String*) = a
+ val fooEta = foo _
+ (foo: Seq[String] => Seq[String])
+}
diff --git a/test/files/neg/t5504/s_1.scala b/test/files/pos/t5504/s_1.scala
index 35cb2c8bae..35cb2c8bae 100644
--- a/test/files/neg/t5504/s_1.scala
+++ b/test/files/pos/t5504/s_1.scala
diff --git a/test/files/neg/t5504/s_2.scala b/test/files/pos/t5504/s_2.scala
index 03eecf6e19..03eecf6e19 100644
--- a/test/files/neg/t5504/s_2.scala
+++ b/test/files/pos/t5504/s_2.scala
diff --git a/test/files/pos/t5846.scala b/test/files/pos/t5846.scala
new file mode 100644
index 0000000000..b06f5ac39c
--- /dev/null
+++ b/test/files/pos/t5846.scala
@@ -0,0 +1,10 @@
+
+
+
+
+/** Return the most general sorted map type. */
+object Test extends App {
+
+ val empty: collection.SortedMap[String, String] = collection.SortedMap.empty[String, String]
+
+}
diff --git a/test/files/pos/t5899.flags b/test/files/pos/t5899.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5899.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5899.scala b/test/files/pos/t5899.scala
new file mode 100644
index 0000000000..b16f1f84fe
--- /dev/null
+++ b/test/files/pos/t5899.scala
@@ -0,0 +1,19 @@
+import scala.tools.nsc._
+
+trait Foo {
+ val global: Global
+ import global.{Name, Symbol, nme}
+
+ case class Bippy(name: Name)
+
+ def f(x: Bippy, sym: Symbol): Int = {
+ // no warning (!) for
+ // val Stable = sym.name.toTermName
+
+ val Stable = sym.name
+ Bippy(Stable) match {
+ case Bippy(nme.WILDCARD) => 1
+ case Bippy(Stable) => 2 // should not be considered unreachable
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t5932.flags b/test/files/pos/t5932.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5932.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5932.scala b/test/files/pos/t5932.scala
new file mode 100644
index 0000000000..d824523d5b
--- /dev/null
+++ b/test/files/pos/t5932.scala
@@ -0,0 +1,15 @@
+class A
+
+case object B extends A
+
+object Test {
+ val x1 = (B: A)
+
+ println(x1 == B) // no warning
+ println(B == x1) // no warning
+
+ val x2 = (B: A with Product)
+
+ println(x2 == B) // no warning
+ println(B == x2) // spurious warning: "always returns false"
+}
diff --git a/test/files/pos/t5953.scala b/test/files/pos/t5953.scala
index 90e7d84646..7ba035ec3b 100644
--- a/test/files/pos/t5953.scala
+++ b/test/files/pos/t5953.scala
@@ -10,7 +10,7 @@ package object foo {
package foo {
object Test {
- def f1[T](xs: Traversable[T]) = xs.convertTo[immutable.Vector]
+ def f1[T](xs: Traversable[T]) = xs.to[immutable.Vector]
def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector]
}
}
diff --git a/test/files/pos/t5967.scala b/test/files/pos/t5967.scala
new file mode 100644
index 0000000000..eb9bd6dfa7
--- /dev/null
+++ b/test/files/pos/t5967.scala
@@ -0,0 +1,6 @@
+object Test {
+ def f(a: Int*) = a match {
+ case 0 :: Nil => "List(0)! My favorite Seq!"
+ case _ => a.toString
+ }
+}
diff --git a/test/files/pos/t5968.flags b/test/files/pos/t5968.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5968.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5968.scala b/test/files/pos/t5968.scala
new file mode 100644
index 0000000000..0093f84fc0
--- /dev/null
+++ b/test/files/pos/t5968.scala
@@ -0,0 +1,8 @@
+object X {
+ def f(e: Either[Int, X.type]) = e match {
+ case Left(i) => i
+ case Right(X) => 0
+ // SI-5986 spurious exhaustivity warning here
+ }
+}
+
diff --git a/test/files/pos/t6008.flags b/test/files/pos/t6008.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6008.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6008.scala b/test/files/pos/t6008.scala
new file mode 100644
index 0000000000..84ae19b211
--- /dev/null
+++ b/test/files/pos/t6008.scala
@@ -0,0 +1,12 @@
+// none of these should complain about exhaustivity
+class Test {
+ // It would fail on the following inputs: (_, false), (_, true)
+ def x(in: (Int, Boolean)) = in match { case (i: Int, b: Boolean) => 3 }
+
+ // There is no warning if the Int is ignored or bound without an explicit type:
+ def y(in: (Int, Boolean)) = in match { case (_, b: Boolean) => 3 }
+
+ // Keeping the explicit type for the Int but dropping the one for Boolean presents a spurious warning again:
+ // It would fail on the following input: (_, _)
+ def z(in: (Int, Boolean)) = in match { case (i: Int, b) => 3 }
+} \ No newline at end of file
diff --git a/test/files/pos/t6022.flags b/test/files/pos/t6022.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6022.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6022.scala b/test/files/pos/t6022.scala
new file mode 100644
index 0000000000..522c3352c7
--- /dev/null
+++ b/test/files/pos/t6022.scala
@@ -0,0 +1,7 @@
+class Test {
+ (null: Any) match {
+ case x: AnyRef if false =>
+ case list: Option[_] =>
+ case product: Product => // change Product to String and it's all good
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t6033.scala b/test/files/pos/t6033.scala
new file mode 100644
index 0000000000..60142af6be
--- /dev/null
+++ b/test/files/pos/t6033.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val b = new java.math.BigInteger("123")
+ val big1 = BigInt(b)
+ val big2: BigInt = b
+}
diff --git a/test/files/presentation/hyperlinks.flags b/test/files/presentation/hyperlinks.flags
deleted file mode 100644
index dc13682c5e..0000000000
--- a/test/files/presentation/hyperlinks.flags
+++ /dev/null
@@ -1,2 +0,0 @@
-# This test will fail in the new pattern matcher because
-# it generates trees whose positions are not transparent
diff --git a/test/files/presentation/hyperlinks/Runner.scala b/test/files/presentation/hyperlinks/Runner.scala
index 3d19f2d948..61da49a3d7 100644
--- a/test/files/presentation/hyperlinks/Runner.scala
+++ b/test/files/presentation/hyperlinks/Runner.scala
@@ -1,11 +1,11 @@
import scala.tools.nsc.interactive.tests.InteractiveTest
object Test extends InteractiveTest {
- override def runTests() {
+ override def runDefaultTests() {
// make sure typer is done.. the virtual pattern matcher might translate
// some trees and mess up positions. But we'll catch it red handed!
sourceFiles foreach (src => askLoadedTyped(src).get)
- super.runTests()
+ super.runDefaultTests()
}
} \ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000469/Runner.scala b/test/files/presentation/ide-bug-1000469/Runner.scala
index c53533fddd..1ef3cf9025 100644
--- a/test/files/presentation/ide-bug-1000469/Runner.scala
+++ b/test/files/presentation/ide-bug-1000469/Runner.scala
@@ -1,5 +1,3 @@
import scala.tools.nsc.interactive.tests._
-object Test extends InteractiveTest {
- override val runRandomTests = false
-}
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index 9d4674d7c7..4be98a6b21 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -25,7 +25,6 @@ retrieved 126 members
[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
[accessible: true] `method contains(elem: Any)Boolean`
-[accessible: true] `method convertTo[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
@@ -111,6 +110,7 @@ retrieved 126 members
[accessible: true] `method toString()String`
[accessible: true] `method toTraversable=> Traversable[B]`
[accessible: true] `method toVector=> Vector[B]`
+[accessible: true] `method to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
diff --git a/test/files/presentation/ide-bug-1000531.flags b/test/files/presentation/ide-bug-1000531.flags
deleted file mode 100644
index 56d026a62d..0000000000
--- a/test/files/presentation/ide-bug-1000531.flags
+++ /dev/null
@@ -1,18 +0,0 @@
-# This file contains command line options that are passed to the presentation compiler
-# Lines starting with # are stripped, and you can split arguments on several lines.
-
-# The -bootclasspath option is treated specially by the test framework: if it's not specified
-# in this file, the presentation compiler will pick up the scala-library/compiler that's on the
-# java classpath used to run this test (usually build/pack)
-
-# Any option can be passed this way, like presentation debug
-# -Ypresentation-debug -Ypresentation-verbose
-
-# the classpath is relative to the current working directory. That means it depends where you're
-# running partest from. Run it from the root scala checkout for these files to resolve correctly
-# (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
-# framework translates them to the platform dependent representation.
-# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar
-
-# the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar
diff --git a/test/files/presentation/ide-t1000976.check b/test/files/presentation/ide-t1000976.check
new file mode 100644
index 0000000000..d58f86d6c6
--- /dev/null
+++ b/test/files/presentation/ide-t1000976.check
@@ -0,0 +1 @@
+Test OK \ No newline at end of file
diff --git a/test/files/presentation/ide-t1000976.flags b/test/files/presentation/ide-t1000976.flags
new file mode 100644
index 0000000000..9a1a05a4f6
--- /dev/null
+++ b/test/files/presentation/ide-t1000976.flags
@@ -0,0 +1 @@
+-sourcepath src \ No newline at end of file
diff --git a/test/files/presentation/ide-t1000976/Test.scala b/test/files/presentation/ide-t1000976/Test.scala
new file mode 100644
index 0000000000..722259d3a1
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/Test.scala
@@ -0,0 +1,30 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+ override def execute(): Unit = {
+ loadSourceAndWaitUntilTypechecked("A.scala")
+ val sourceB = loadSourceAndWaitUntilTypechecked("B.scala")
+ checkErrors(sourceB)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ compiler.askToDoFirst(sourceFile)
+ val res = new Response[Unit]
+ compiler.askReload(List(sourceFile), res)
+ res.get
+ askLoadedTyped(sourceFile).get
+ sourceFile
+ }
+
+ private def checkErrors(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) =>
+ val problems = unit.problems.toList
+ if(problems.isEmpty) reporter.println("Test OK")
+ else problems.foreach(problem => reporter.println(problem.msg))
+
+ case None => reporter.println("No compilation unit found for " + source.file.name)
+ }
+}
diff --git a/test/files/presentation/ide-t1000976/src/a/A.scala b/test/files/presentation/ide-t1000976/src/a/A.scala
new file mode 100644
index 0000000000..fcfef8b525
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/a/A.scala
@@ -0,0 +1,7 @@
+package a
+
+import d.D._
+
+object A {
+ Seq.empty[Byte].toArray.toSeq
+}
diff --git a/test/files/presentation/ide-t1000976/src/b/B.scala b/test/files/presentation/ide-t1000976/src/b/B.scala
new file mode 100644
index 0000000000..628348cac1
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/b/B.scala
@@ -0,0 +1,7 @@
+package b
+
+import c.C
+
+class B {
+ new C("")
+}
diff --git a/test/files/presentation/ide-t1000976/src/c/C.scala b/test/files/presentation/ide-t1000976/src/c/C.scala
new file mode 100644
index 0000000000..cc23e3eef1
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/c/C.scala
@@ -0,0 +1,3 @@
+package c
+
+class C(key: String = "", componentStates: String = "")
diff --git a/test/files/presentation/ide-t1000976/src/d/D.scala b/test/files/presentation/ide-t1000976/src/d/D.scala
new file mode 100644
index 0000000000..d7a48f98d5
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/d/D.scala
@@ -0,0 +1,7 @@
+package d
+
+import c.C
+
+object D {
+ implicit def c2s(c: C): String = ""
+}
diff --git a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
index 857beac7df..a5533a623a 100644
--- a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
+++ b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
@@ -24,10 +24,7 @@ import scala.tools.nsc.io._
object Test extends InteractiveTest {
final val mega = 1024 * 1024
- override def main(args: Array[String]) {
- memoryConsumptionTest()
- compiler.askShutdown()
- }
+ override def execute(): Unit = memoryConsumptionTest()
def batchSource(name: String) =
new BatchSourceFile(AbstractFile.getFile(name))
diff --git a/test/files/presentation/t5708/Test.scala b/test/files/presentation/t5708/Test.scala
index 96e758d974..bec1131c4c 100644
--- a/test/files/presentation/t5708/Test.scala
+++ b/test/files/presentation/t5708/Test.scala
@@ -1,5 +1,3 @@
import scala.tools.nsc.interactive.tests.InteractiveTest
-object Test extends InteractiveTest {
-
-} \ No newline at end of file
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/visibility/Test.scala b/test/files/presentation/visibility/Test.scala
index 96e758d974..bec1131c4c 100644
--- a/test/files/presentation/visibility/Test.scala
+++ b/test/files/presentation/visibility/Test.scala
@@ -1,5 +1,3 @@
import scala.tools.nsc.interactive.tests.InteractiveTest
-object Test extends InteractiveTest {
-
-} \ No newline at end of file
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/res/t5489.check b/test/files/res/t5489.check
new file mode 100644
index 0000000000..6cf64f734b
--- /dev/null
+++ b/test/files/res/t5489.check
@@ -0,0 +1,4 @@
+
+nsc>
+nsc>
+nsc>
diff --git a/test/files/res/t5489.res b/test/files/res/t5489.res
new file mode 100644
index 0000000000..5b787b7765
--- /dev/null
+++ b/test/files/res/t5489.res
@@ -0,0 +1,2 @@
+t5489/t5489.scala
+t5489/t5489.scala \ No newline at end of file
diff --git a/test/files/res/t5489/t5489.scala b/test/files/res/t5489/t5489.scala
new file mode 100644
index 0000000000..f821a1a9b6
--- /dev/null
+++ b/test/files/res/t5489/t5489.scala
@@ -0,0 +1,14 @@
+package repro
+
+trait HasString {
+ def blerg(): String
+}
+
+class CausesProblems {
+ def problems = (
+ if ("don't optimize me away!".length == 0)
+ new HasString { def blerg() = "wut" }
+ else
+ new HasString { def blerg() = "okay" }
+ ).blerg()
+}
diff --git a/test/files/run/abstypetags_core.check b/test/files/run/abstypetags_core.check
index 55c8cb7e89..8d20e099c4 100644
--- a/test/files/run/abstypetags_core.check
+++ b/test/files/run/abstypetags_core.check
@@ -1,28 +1,30 @@
-true
-TypeTag[Byte]
-true
-TypeTag[Short]
-true
-TypeTag[Char]
-true
-TypeTag[Int]
-true
-TypeTag[Long]
-true
-TypeTag[Float]
-true
-TypeTag[Double]
-true
-TypeTag[Boolean]
-true
-TypeTag[Unit]
-true
-TypeTag[Any]
-true
-TypeTag[java.lang.Object]
-true
-TypeTag[Null]
-true
-TypeTag[Nothing]
-true
-TypeTag[java.lang.String]
+true
+TypeTag[Byte]
+true
+TypeTag[Short]
+true
+TypeTag[Char]
+true
+TypeTag[Int]
+true
+TypeTag[Long]
+true
+TypeTag[Float]
+true
+TypeTag[Double]
+true
+TypeTag[Boolean]
+true
+TypeTag[Unit]
+true
+TypeTag[Any]
+true
+TypeTag[AnyVal]
+true
+TypeTag[AnyRef]
+true
+TypeTag[java.lang.Object]
+true
+TypeTag[Null]
+true
+TypeTag[Nothing]
diff --git a/test/files/run/abstypetags_core.scala b/test/files/run/abstypetags_core.scala
index a49817f4d4..226de94055 100644
--- a/test/files/run/abstypetags_core.scala
+++ b/test/files/run/abstypetags_core.scala
@@ -21,12 +21,14 @@ object Test extends App {
println(implicitly[AbsTypeTag[Unit]])
println(implicitly[AbsTypeTag[Any]] eq AbsTypeTag.Any)
println(implicitly[AbsTypeTag[Any]])
+ println(implicitly[AbsTypeTag[AnyVal]] eq AbsTypeTag.AnyVal)
+ println(implicitly[AbsTypeTag[AnyVal]])
+ println(implicitly[AbsTypeTag[AnyRef]] eq AbsTypeTag.AnyRef)
+ println(implicitly[AbsTypeTag[AnyRef]])
println(implicitly[AbsTypeTag[Object]] eq AbsTypeTag.Object)
println(implicitly[AbsTypeTag[Object]])
println(implicitly[AbsTypeTag[Null]] eq AbsTypeTag.Null)
println(implicitly[AbsTypeTag[Null]])
println(implicitly[AbsTypeTag[Nothing]] eq AbsTypeTag.Nothing)
println(implicitly[AbsTypeTag[Nothing]])
- println(implicitly[AbsTypeTag[String]] eq AbsTypeTag.String)
- println(implicitly[AbsTypeTag[String]])
} \ No newline at end of file
diff --git a/test/files/run/classmanifests_new_alias.check b/test/files/run/classmanifests_new_alias.check
new file mode 100644
index 0000000000..ffb8482f91
--- /dev/null
+++ b/test/files/run/classmanifests_new_alias.check
@@ -0,0 +1,2 @@
+Int
+true
diff --git a/test/files/run/classmanifests_new_alias.scala b/test/files/run/classmanifests_new_alias.scala
new file mode 100644
index 0000000000..12bd93bab6
--- /dev/null
+++ b/test/files/run/classmanifests_new_alias.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ type CM[T] = ClassManifest[T]
+ println(implicitly[CM[Int]])
+ println(implicitly[CM[Int]] eq Manifest.Int)
+} \ No newline at end of file
diff --git a/test/files/run/classmanifests_new_core.check b/test/files/run/classmanifests_new_core.check
new file mode 100644
index 0000000000..ffb8482f91
--- /dev/null
+++ b/test/files/run/classmanifests_new_core.check
@@ -0,0 +1,2 @@
+Int
+true
diff --git a/test/files/run/classmanifests_new_core.scala b/test/files/run/classmanifests_new_core.scala
new file mode 100644
index 0000000000..63dbfab25c
--- /dev/null
+++ b/test/files/run/classmanifests_new_core.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(classManifest[Int])
+ println(classManifest[Int] eq Manifest.Int)
+} \ No newline at end of file
diff --git a/test/files/run/classtags_core.check b/test/files/run/classtags_core.check
index 7beada3f1c..6519db2178 100644
--- a/test/files/run/classtags_core.check
+++ b/test/files/run/classtags_core.check
@@ -21,8 +21,10 @@ ClassTag[class java.lang.Object]
true
ClassTag[class java.lang.Object]
true
+ClassTag[class java.lang.Object]
+true
+ClassTag[class java.lang.Object]
+true
ClassTag[class scala.runtime.Null$]
true
ClassTag[class scala.runtime.Nothing$]
-true
-ClassTag[class java.lang.String]
diff --git a/test/files/run/classtags_core.scala b/test/files/run/classtags_core.scala
index 17c69d9c41..0e174d8243 100644
--- a/test/files/run/classtags_core.scala
+++ b/test/files/run/classtags_core.scala
@@ -21,12 +21,14 @@ object Test extends App {
println(implicitly[ClassTag[Unit]])
println(implicitly[ClassTag[Any]] eq ClassTag.Any)
println(implicitly[ClassTag[Any]])
+ println(implicitly[ClassTag[AnyVal]] eq ClassTag.AnyVal)
+ println(implicitly[ClassTag[AnyVal]])
+ println(implicitly[ClassTag[AnyRef]] eq ClassTag.AnyRef)
+ println(implicitly[ClassTag[AnyRef]])
println(implicitly[ClassTag[Object]] eq ClassTag.Object)
println(implicitly[ClassTag[Object]])
println(implicitly[ClassTag[Null]] eq ClassTag.Null)
println(implicitly[ClassTag[Null]])
println(implicitly[ClassTag[Nothing]] eq ClassTag.Nothing)
println(implicitly[ClassTag[Nothing]])
- println(implicitly[ClassTag[String]] eq ClassTag.String)
- println(implicitly[ClassTag[String]])
} \ No newline at end of file
diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check
index 08d0fa32c5..5e43d25f7e 100644
--- a/test/files/run/collection-conversions.check
+++ b/test/files/run/collection-conversions.check
@@ -11,6 +11,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing Vector ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -24,6 +25,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing List ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -37,6 +39,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing Buffer ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -50,6 +53,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing ParVector ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -63,6 +67,21 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing ParArray ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing Set ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -76,6 +95,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing SetView ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -89,6 +109,7 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK
-- Testing BufferView ---
:[Direct] Vector : OK
:[Copy] Vector : OK
@@ -102,3 +123,4 @@
:[Direct] Array : OK
:[Copy] Array : OK
:[Copy] ParVector: OK
+ :[Copy] ParArray : OK \ No newline at end of file
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
index b5c4d8e261..d842742230 100644
--- a/test/files/run/collection-conversions.scala
+++ b/test/files/run/collection-conversions.scala
@@ -1,6 +1,7 @@
import collection._
import mutable.Buffer
import parallel.immutable.ParVector
+import parallel.mutable.ParArray
import reflect.ClassTag
object Test {
@@ -29,30 +30,33 @@ object Test {
val testStream = Stream(1,2,3)
val testArray = Array(1,2,3)
val testParVector = ParVector(1,2,3)
+ val testParArray = ParArray(1,2,3)
def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = {
val tmp = col
println("-- Testing " + name + " ---")
printResult("[Direct] Vector ", col.toVector, testVector)
- printResult("[Copy] Vector ", col.convertTo[Vector], testVector)
+ printResult("[Copy] Vector ", col.to[Vector], testVector)
printResult("[Direct] Buffer ", col.toBuffer, testBuffer)
- printResult("[Copy] Buffer ", col.convertTo[Buffer], testBuffer)
+ printResult("[Copy] Buffer ", col.to[Buffer], testBuffer)
printResult("[Direct] GenSeq ", col.toSeq, testGenSeq)
- printResult("[Copy] GenSeq ", col.convertTo[GenSeq], testGenSeq)
- printResult("[Copy] Seq ", col.convertTo[Seq], testSeq)
+ printResult("[Copy] GenSeq ", col.to[GenSeq], testGenSeq)
+ printResult("[Copy] Seq ", col.to[Seq], testSeq)
printResult("[Direct] Stream ", col.toStream, testStream)
- printResult("[Copy] Stream ", col.convertTo[Stream], testStream)
+ printResult("[Copy] Stream ", col.to[Stream], testStream)
printResult("[Direct] Array ", col.toArray, testArray)
- printResult("[Copy] Array ", col.convertTo[Array], testArray)
- printResult("[Copy] ParVector", col.convertTo[ParVector], testParVector)
+ printResult("[Copy] Array ", col.to[Array], testArray)
+ printResult("[Copy] ParVector", col.to[ParVector], testParVector)
+ printResult("[Copy] ParArray ", col.to[ParArray], testParArray)
}
def main(args: Array[String]): Unit = {
- testConversion("iterator", (1 to 3).iterator)
+ testConversion("iterator", (1 to 3).iterator)
testConversion("Vector", Vector(1,2,3))
testConversion("List", List(1,2,3))
testConversion("Buffer", Buffer(1,2,3))
testConversion("ParVector", ParVector(1,2,3))
+ testConversion("ParArray", ParArray(1,2,3))
testConversion("Set", Set(1,2,3))
testConversion("SetView", Set(1,2,3).view)
testConversion("BufferView", Buffer(1,2,3).view)
diff --git a/test/files/run/enrich-gentraversable.check b/test/files/run/enrich-gentraversable.check
index 348b38d6a4..94c66e3692 100644
--- a/test/files/run/enrich-gentraversable.check
+++ b/test/files/run/enrich-gentraversable.check
@@ -2,3 +2,7 @@ List(2, 4)
Array(2, 4)
HW
Vector(72, 108, 108, 32, 114, 108, 100)
+List(2, 4)
+Array(2, 4)
+HW
+Vector(72, 108, 108, 32, 114, 108, 100)
diff --git a/test/files/run/enrich-gentraversable.scala b/test/files/run/enrich-gentraversable.scala
index c9320ff985..52eded55fd 100644
--- a/test/files/run/enrich-gentraversable.scala
+++ b/test/files/run/enrich-gentraversable.scala
@@ -1,30 +1,67 @@
object Test extends App {
- import scala.collection.generic.{ CanBuildFrom, FromRepr, HasElem }
+ import scala.collection.{GenTraversableOnce,GenTraversableLike}
+ import scala.collection.generic._
def typed[T](t : => T) {}
-
- class FilterMapImpl[A, Repr](val r : Repr)(implicit hasElem : HasElem[Repr, A]) {
- def filterMap[B, That](f : A => Option[B])(implicit cbf : CanBuildFrom[Repr, B, That]) : That = r.flatMap(f(_).toSeq)
+ def testTraversableLike = {
+ class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) /* extends AnyVal */ {
+ final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ r.flatMap(f(_).toSeq)
+ }
+ implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableLike[Repr]): FilterMapImpl[fr.A,Repr] =
+ new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+
+ val l = List(1, 2, 3, 4, 5)
+ val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[List[Int]](fml)
+ println(fml)
+
+ val a = Array(1, 2, 3, 4, 5)
+ val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[Array[Int]](fma)
+ println(fma.deep)
+
+ val s = "Hello World"
+ val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
+ typed[String](fms1)
+ println(fms1)
+
+ val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
+ typed[IndexedSeq[Int]](fms2)
+ println(fms2)
}
+ def testTraversableOnce = {
+ class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) /* extends AnyVal */ {
+ final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ val b = cbf()
+ for(e <- r.seq) f(e) foreach (b +=)
- implicit def filterMap[Repr : FromRepr](r : Repr) = new FilterMapImpl(r)
-
- val l = List(1, 2, 3, 4, 5)
- val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
- typed[List[Int]](fml)
- println(fml)
-
- val a = Array(1, 2, 3, 4, 5)
- val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
- typed[Array[Int]](fma)
- println(fma.deep)
+ b.result
+ }
+ }
+ implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ new FilterMapImpl[fr.A, Repr](fr.conversion(r))
- val s = "Hello World"
- val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
- typed[String](fms1)
- println(fms1)
+ val l = List(1, 2, 3, 4, 5)
+ val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[List[Int]](fml)
+ println(fml)
- val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
- typed[IndexedSeq[Int]](fms2)
- println(fms2)
+ val a = Array(1, 2, 3, 4, 5)
+ val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[Array[Int]](fma)
+ println(fma.deep)
+
+ val s = "Hello World"
+ val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
+ typed[String](fms1)
+ println(fms1)
+
+ val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
+ typed[IndexedSeq[Int]](fms2)
+ println(fms2)
+ }
+
+ testTraversableLike
+ testTraversableOnce
}
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 708fcc6985..7d96c447b0 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -20,14 +20,14 @@
409c408,417
< 103 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 17
>
> 17:
-> 101 LOAD_LOCAL(value ex5)
-> 101 STORE_LOCAL(value x3)
-> 101 SCOPE_ENTER value x3
-> 106 LOAD_LOCAL(value x3)
+> 101 LOAD_LOCAL(value ex6)
+> 101 STORE_LOCAL(value x4)
+> 101 SCOPE_ENTER value x4
+> 106 LOAD_LOCAL(value x4)
> 106 IS_INSTANCE REF(class MyException)
> 106 CZJUMP (BOOL)NE ? 5 : 11
422,424d429
@@ -125,29 +125,29 @@
737c786,793
< 172 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 32
>
> 32:
-> 170 LOAD_LOCAL(value ex5)
-> 170 STORE_LOCAL(value x3)
-> 170 SCOPE_ENTER value x3
+> 170 LOAD_LOCAL(value ex6)
+> 170 STORE_LOCAL(value x4)
+> 170 SCOPE_ENTER value x4
> 170 JUMP 18
793c849,850
< 177 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 33
797c854,861
< 170 THROW(Throwable)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 33
>
> 33:
-> 169 LOAD_LOCAL(value ex5)
-> 169 STORE_LOCAL(value x3)
-> 169 SCOPE_ENTER value x3
+> 169 LOAD_LOCAL(value ex6)
+> 169 STORE_LOCAL(value x4)
+> 169 SCOPE_ENTER value x4
> 169 JUMP 5
830c894,895
< 182 THROW(MyException)
@@ -188,13 +188,13 @@
909c988,995
< 124 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 20
>
> 20:
-> 122 LOAD_LOCAL(value ex5)
-> 122 STORE_LOCAL(value x3)
-> 122 SCOPE_ENTER value x3
+> 122 LOAD_LOCAL(value ex6)
+> 122 STORE_LOCAL(value x4)
+> 122 SCOPE_ENTER value x4
> 122 JUMP 7
969c1055
< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19) starting at: 3
@@ -207,14 +207,14 @@
1019c1105,1114
< 148 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 20
>
> 20:
-> 145 LOAD_LOCAL(value ex5)
-> 145 STORE_LOCAL(value x3)
-> 145 SCOPE_ENTER value x3
-> 154 LOAD_LOCAL(value x3)
+> 145 LOAD_LOCAL(value ex6)
+> 145 STORE_LOCAL(value x4)
+> 145 SCOPE_ENTER value x4
+> 154 LOAD_LOCAL(value x4)
> 154 IS_INSTANCE REF(class MyException)
> 154 CZJUMP (BOOL)NE ? 5 : 11
1040,1042d1134
@@ -243,19 +243,19 @@
1372c1471,1472
< 203 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 20
1392c1492,1501
< 209 THROW(MyException)
---
-> ? STORE_LOCAL(value ex5)
+> ? STORE_LOCAL(value ex6)
> ? JUMP 20
>
> 20:
-> 200 LOAD_LOCAL(value ex5)
-> 200 STORE_LOCAL(value x3)
-> 200 SCOPE_ENTER value x3
-> 212 LOAD_LOCAL(value x3)
+> 200 LOAD_LOCAL(value ex6)
+> 200 STORE_LOCAL(value x4)
+> 200 SCOPE_ENTER value x4
+> 212 LOAD_LOCAL(value x4)
> 212 IS_INSTANCE REF(class MyException)
> 212 CZJUMP (BOOL)NE ? 5 : 11
1405,1407d1513
diff --git a/test/files/run/interop_classtags_are_classmanifests.check b/test/files/run/interop_classtags_are_classmanifests.check
index c59e92d4eb..7a0a829af2 100644
--- a/test/files/run/interop_classtags_are_classmanifests.check
+++ b/test/files/run/interop_classtags_are_classmanifests.check
@@ -1,3 +1,3 @@
-Int
-java.lang.String
-Array[Int]
+ClassTag[int]
+ClassTag[class java.lang.String]
+ClassTag[class [I]
diff --git a/test/files/run/macro-reify-splice-splice/Macros_1.scala b/test/files/run/macro-reify-splice-splice/Macros_1.scala
index 030a0a217e..4f1b600f63 100644
--- a/test/files/run/macro-reify-splice-splice/Macros_1.scala
+++ b/test/files/run/macro-reify-splice-splice/Macros_1.scala
@@ -1,5 +1,4 @@
import scala.reflect.makro.{Context => Ctx}
-import scala.reflect.{mirror => mr}
object Macros {
def foo = macro Impls.foo
diff --git a/test/files/run/newTags.check b/test/files/run/newTags.check
index d83740c40c..2cbc265d7a 100644
--- a/test/files/run/newTags.check
+++ b/test/files/run/newTags.check
@@ -1,5 +1,5 @@
-TypeRef(ThisType(class immutable),class List,List(TypeRef(ThisType(class scala),class Int,List())))
+TypeRef(SingleType(SingleType(SingleType(NoPrefix,class <root>),module scala),module package),class List,List(TypeRef(ThisType(class scala),class Int,List())))
List[Int]
-TypeRef(ThisType(class immutable),class Map,List(TypeRef(ThisType(class lang),class String,List()), TypeRef(ThisType(class lang),class String,List())))
-scala.collection.immutable.Map[java.lang.String,java.lang.String]
-TypeTag[TypeRef(ThisType(class immutable),class Map,List(TypeRef(ThisType(class lang),class String,List()), TypeRef(ThisType(class lang),class String,List())))]
+TypeRef(SingleType(ThisType(class scala),module Predef),class Map,List(TypeRef(SingleType(ThisType(class scala),module Predef),class String,List()), TypeRef(SingleType(ThisType(class scala),module Predef),class String,List())))
+Map[String,String]
+TypeTag[TypeRef(SingleType(ThisType(class scala),module Predef),class Map,List(TypeRef(SingleType(ThisType(class scala),module Predef),class String,List()), TypeRef(SingleType(ThisType(class scala),module Predef),class String,List())))]
diff --git a/test/files/run/patmat-finally.scala b/test/files/run/patmat-finally.scala
new file mode 100644
index 0000000000..6f769b30a0
--- /dev/null
+++ b/test/files/run/patmat-finally.scala
@@ -0,0 +1,25 @@
+/** Test pattern matching and finally, see SI-5929. */
+object Test extends App {
+ def bar(s1: Object, s2: Object) {
+ s1 match {
+ case _ =>
+ }
+
+ try {
+ ()
+ } finally {
+ s2 match {
+ case _ =>
+ }
+ }
+ }
+
+ def x = {
+ null match { case _ => }
+
+ try { 1 } finally { while(false) { } }
+ }
+
+ bar(null, null)
+ x
+}
diff --git a/test/files/run/reflect-resolveoverload-bynameparam.scala b/test/files/run/reflect-resolveoverload-bynameparam.scala
new file mode 100644
index 0000000000..7fb8c82ab8
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-bynameparam.scala
@@ -0,0 +1,32 @@
+
+class A
+class B extends A
+
+class C {
+ def foo(x: => Int)(y: String) = x
+ def foo(x: String)(y: List[_]) = x
+ def foo(x: => A)(y: Array[_]) = 1
+ def foo(x: A)(y: Seq[_]) = 2
+ def foo(x: B)(y: Map[_, _]) = 4
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val c = new C
+ val im = cm.reflect(c)
+ val t = u.typeOf[C] member u.newTermName("foo") asTermSymbol
+ val f1 = t.resolveOverloaded(posVargs = List(u.typeOf[Int])) asMethodSymbol
+ val f2 = t.resolveOverloaded(posVargs = List(u.typeOf[String])) asMethodSymbol
+ val f3 = t.resolveOverloaded(posVargs = List(u.typeOf[A])) asMethodSymbol
+ val f4 = t.resolveOverloaded(posVargs = List(u.typeOf[B])) asMethodSymbol
+ val m1 = im.reflectMethod(f1)
+ val m2 = im.reflectMethod(f2)
+ val m3 = im.reflectMethod(f3)
+ val m4 = im.reflectMethod(f4)
+ assert(m1(() => 1, null) == c.foo(1)(null))
+ assert(m2("a", null) == c.foo("a")(null))
+ assert(m3(new A, null) == c.foo(new A)(null))
+ assert(m4(new B, null) == c.foo(new B)(null))
+}
+
diff --git a/test/files/run/reflect-resolveoverload-expected.scala b/test/files/run/reflect-resolveoverload-expected.scala
new file mode 100644
index 0000000000..1378090309
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-expected.scala
@@ -0,0 +1,43 @@
+
+class A {
+ override def equals(x: Any) = {
+ x.isInstanceOf[A] && !x.isInstanceOf[B]
+ }
+}
+class B extends A {
+ override def equals(x: Any) = {
+ x.isInstanceOf[B]
+ }
+}
+
+class C {
+ def a(x: String) = 1
+ def a(x: Array[_]) = "a"
+ def b(x: String) = new A
+ def b(x: Array[_]) = new B
+ def c(x: String) = new B
+ def c(x: Array[_]) = "a"
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val c = new C
+ val im = cm.reflect(c)
+ def invoke(s: String, expectedType: u.Type, expectedResult: Any) {
+ val ol = (u.typeOf[C] member u.newTermName(s)).asTermSymbol
+ val methodSym = ol.resolveOverloaded(posVargs = List(u.typeOf[Null]), expected = expectedType).asMethodSymbol
+ val sig = methodSym.typeSignature.asInstanceOf[u.MethodType]
+ val method = im.reflectMethod(methodSym)
+ assert(method(null) == expectedResult)
+ }
+
+ invoke("a", u.typeOf[Int], c.a(null): Int)
+ invoke("a", u.typeOf[String], c.a(null): String)
+ invoke("b", u.typeOf[B], c.b(null): B)
+ invoke("c", u.typeOf[A], c.c(null): A)
+ invoke("c", u.typeOf[A], c.c(null): A)
+ invoke("c", u.typeOf[B], c.c(null): B)
+ invoke("c", u.typeOf[String], c.c(null): String)
+
+}
diff --git a/test/files/run/reflect-resolveoverload-invalid.scala b/test/files/run/reflect-resolveoverload-invalid.scala
new file mode 100644
index 0000000000..def28ccbb4
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-invalid.scala
@@ -0,0 +1,43 @@
+
+class A
+class B extends A
+
+class C {
+ def a(x: Int) = 1
+ def a(x: String) = 2
+ def b(x: B) = 3
+ def c(x: A, y: B) = 4
+ def c(x: B, y: A) = 5
+ def d[T](x: Int) = 6
+ def d(x: String) = 7
+ def e(x: A) = 8
+ def e(x: =>B) = 9
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+
+ val x = new C
+ val t = u.typeOf[C]
+
+ val a = t member u.newTermName("a") asTermSymbol
+ val b = t member u.newTermName("b") asTermSymbol
+ val c = t member u.newTermName("c") asTermSymbol
+ val d = t member u.newTermName("d") asTermSymbol
+ val e = t member u.newTermName("e") asTermSymbol
+
+ val n1 = a.resolveOverloaded(posVargs = List(u.typeOf[Char]))
+ val n2 = b.resolveOverloaded(posVargs = List(u.typeOf[A]))
+ val n3 = c.resolveOverloaded(posVargs = List(u.typeOf[B], u.typeOf[B]))
+ val n4 = d.resolveOverloaded(targs = List(u.typeOf[Int]))
+ val n5 = d.resolveOverloaded()
+ val n6 = e.resolveOverloaded(posVargs = List(u.typeOf[B]))
+
+ assert(n1 == u.NoSymbol)
+ assert(n2 == u.NoSymbol)
+ assert(n3 == u.NoSymbol)
+ assert(n4 == u.NoSymbol)
+ assert(n5 == u.NoSymbol)
+ assert(n6 == u.NoSymbol)
+}
diff --git a/test/files/run/reflect-resolveoverload-named.scala b/test/files/run/reflect-resolveoverload-named.scala
new file mode 100644
index 0000000000..017ec85c0d
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-named.scala
@@ -0,0 +1,26 @@
+
+class A {
+ def foo(x: String, y: Int) = 1
+ def foo(x: Int, y: String) = 2
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val a = new A
+ val im = cm.reflect(a)
+ val tpe = u.typeOf[A]
+ val overloaded = tpe member u.newTermName("foo") asTermSymbol
+ val ms1 =
+ overloaded resolveOverloaded(nameVargs = Seq((u.newTermName("x"), u.typeOf[String]), (u.newTermName("y"), u.typeOf[Int])))
+ val ms2 =
+ overloaded resolveOverloaded(nameVargs = Seq((u.newTermName("y"), u.typeOf[Int]), (u.newTermName("x"), u.typeOf[String])))
+ val ms3 =
+ overloaded resolveOverloaded(nameVargs = Seq((u.newTermName("x"), u.typeOf[Int]), (u.newTermName("y"), u.typeOf[String])))
+ val ms4 =
+ overloaded resolveOverloaded(nameVargs = Seq((u.newTermName("y"), u.typeOf[String]), (u.newTermName("x"), u.typeOf[Int])))
+ assert(im.reflectMethod(ms1 asMethodSymbol)("A", 1) == 1)
+ assert(im.reflectMethod(ms2 asMethodSymbol)("A", 1) == 1)
+ assert(im.reflectMethod(ms3 asMethodSymbol)(1, "A") == 2)
+ assert(im.reflectMethod(ms4 asMethodSymbol)(1, "A") == 2)
+}
diff --git a/test/files/run/reflect-resolveoverload-targs.scala b/test/files/run/reflect-resolveoverload-targs.scala
new file mode 100644
index 0000000000..888b2f0c15
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-targs.scala
@@ -0,0 +1,29 @@
+
+import reflect.runtime.{universe=>u}
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C {
+ def foo[T: u.TypeTag](x: String) = 1
+ def foo[T: u.TypeTag, S: u.TypeTag](x: String) = 2
+}
+
+object Test extends App {
+ val c = new C
+ val im = cm.reflect(c)
+ val foo = u.typeOf[C] member u.newTermName("foo") asTermSymbol
+ val f1 = foo.resolveOverloaded(
+ targs = Seq(u.typeOf[Int]),
+ posVargs = Seq(u.typeOf[String])
+ )
+
+ val f2 = foo.resolveOverloaded(
+ targs = Seq(u.typeOf[Int],
+ u.typeOf[Int]), posVargs = Seq(u.typeOf[String])
+ )
+
+ val m1 = im.reflectMethod(f1 asMethodSymbol)
+ val m2 = im.reflectMethod(f2 asMethodSymbol)
+
+ assert(m1("a", u.typeTag[Int]) == c.foo[Int]("a"))
+ assert(m2("a", u.typeTag[Int], u.typeTag[Int]) == c.foo[Int, Int]("a"))
+}
diff --git a/test/files/run/reflect-resolveoverload-tparm-substitute.scala b/test/files/run/reflect-resolveoverload-tparm-substitute.scala
new file mode 100644
index 0000000000..22e7bcd40a
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-tparm-substitute.scala
@@ -0,0 +1,77 @@
+
+class A
+class B extends A
+
+class C {
+ def foo[T](x: T) = x
+ def foo(x: Int) = "a"
+ def foo(x: A) = x
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val c = new C
+ val im = cm.reflect(c)
+ val term = u.typeOf[C] member u.newTermName("foo") asTermSymbol
+
+ val f1 = term.resolveOverloaded(
+ posVargs = List(u.typeOf[Int]),
+ expected = u.typeOf[String]
+ )
+
+ val f2 = term.resolveOverloaded(
+ targs = List(u.typeOf[String]),
+ posVargs = List(u.typeOf[String]),
+ expected = u.typeOf[String]
+ )
+
+ val f3 = term.resolveOverloaded(
+ posVargs = List(u.typeOf[A]),
+ expected = u.typeOf[A]
+ )
+
+ val f4 = term.resolveOverloaded(
+ targs = List(u.typeOf[A]),
+ posVargs = List(u.typeOf[A]),
+ expected = u.typeOf[A]
+ )
+
+ val f5 = term.resolveOverloaded(
+ targs = List(u.typeOf[B]),
+ posVargs = List(u.typeOf[B]),
+ expected = u.typeOf[B]
+ )
+
+ val f6 = term.resolveOverloaded(
+ targs = List(u.typeOf[B]),
+ posVargs = List(u.typeOf[B]),
+ expected = u.typeOf[A]
+ )
+
+ val f7 = term.resolveOverloaded(
+ targs = List(u.typeOf[A]),
+ posVargs = List(u.typeOf[B]),
+ expected = u.typeOf[A]
+ )
+
+ val m1 = im.reflectMethod(f1 asMethodSymbol)
+ val m2 = im.reflectMethod(f2 asMethodSymbol)
+ val m3 = im.reflectMethod(f3 asMethodSymbol)
+ val m4 = im.reflectMethod(f4 asMethodSymbol)
+ val m5 = im.reflectMethod(f5 asMethodSymbol)
+ val m6 = im.reflectMethod(f6 asMethodSymbol)
+ val m7 = im.reflectMethod(f7 asMethodSymbol)
+
+ val a = new A
+ val b = new B
+ assert(m1(2) == (c.foo(2): String))
+ assert(m2("xyz") == (c.foo[String]("xyz"): String))
+ assert(m3(a) == (c.foo(a): A))
+ assert(m4(a) == (c.foo[A](a): A))
+ assert(m5(b) == (c.foo[B](b): B))
+ assert(m6(b) == (c.foo[B](b): A))
+ assert(m7(b) == (c.foo[A](b): A))
+
+
+}
diff --git a/test/files/run/reflect-resolveoverload-variadic.scala b/test/files/run/reflect-resolveoverload-variadic.scala
new file mode 100644
index 0000000000..8e2e15600f
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload-variadic.scala
@@ -0,0 +1,27 @@
+
+class C {
+ def foo(x: Int*) = 1 + x.sum
+ def foo(x: String) = 2
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val c = new C
+ val im = cm.reflect(c)
+ val foo = u.typeOf[C] member u.newTermName("foo") asTermSymbol
+ val f0 = foo.resolveOverloaded()
+ val f1 = foo.resolveOverloaded(posVargs = Seq(u.typeOf[Int]))
+ val f2 = foo.resolveOverloaded(posVargs = Seq(u.typeOf[Int], u.typeOf[Int]))
+ val f3 = foo.resolveOverloaded(posVargs = Seq(u.typeOf[String]))
+
+ val m0 = im.reflectMethod(f0 asMethodSymbol)
+ val m1 = im.reflectMethod(f1 asMethodSymbol)
+ val m2 = im.reflectMethod(f2 asMethodSymbol)
+ val m3 = im.reflectMethod(f3 asMethodSymbol)
+
+ assert(m0(Seq()) == c.foo())
+ assert(m1(Seq(1)) == c.foo(1))
+ assert(m2(Seq(4, 9)) == c.foo(4, 9))
+ assert(m3("abc") == c.foo("abc"))
+}
diff --git a/test/files/run/reflect-overload.scala b/test/files/run/reflect-resolveoverload1.scala
index 870a200813..a859a0ec4e 100644
--- a/test/files/run/reflect-overload.scala
+++ b/test/files/run/reflect-resolveoverload1.scala
@@ -9,11 +9,11 @@ object Test extends App {
val st = sc.asType
val meth = (st member newTermName("indexOf")).asTermSymbol
val IntType = definitions.IntClass.asType
- val indexOf = (meth resolveOverloaded(actuals = List(IntType))).asMethodSymbol
+ val indexOf = (meth resolveOverloaded(posVargs = List(IntType))).asMethodSymbol
assert(m.reflectMethod(indexOf)('w') == 6)
assert((m.reflectMethod(indexOf)('w') match { case x: Int => x }) == 6)
val meth2 = (st member newTermName("substring")).asTermSymbol
- val substring = (meth2 resolveOverloaded(actuals = List(IntType, IntType))).asMethodSymbol
+ val substring = (meth2 resolveOverloaded(posVargs = List(IntType, IntType))).asMethodSymbol
assert(m.reflectMethod(substring)(2, 6) == "llo ")
}
diff --git a/test/files/run/reflect-resolveoverload2.scala b/test/files/run/reflect-resolveoverload2.scala
new file mode 100644
index 0000000000..b5f719814b
--- /dev/null
+++ b/test/files/run/reflect-resolveoverload2.scala
@@ -0,0 +1,40 @@
+class A
+class B extends A
+
+class C {
+ def a(x: Int) = 1
+ def a(x: String) = 2
+ //def b(x: => Int)(s: String) = 1
+ //def b(x: => String)(a: Array[_]) = 2
+ def c(x: A) = 1
+ def c(x: B) = 2
+ //def d(x: => A)(s: String) = 1
+ //def d(x: => B)(a: Array[_]) = 2
+ def e(x: A) = 1
+ def e(x: B = new B) = 2
+}
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val c = new C
+ val im = cm.reflect(c)
+ def invoke(s: String, arg: Any, argType: u.Type): Int = {
+ val ol = u.typeOf[C] member u.newTermName(s) asTermSymbol
+ val methodSym = ol.resolveOverloaded(posVargs = List(argType)) asMethodSymbol
+ val sig = methodSym.typeSignature.asInstanceOf[u.MethodType]
+ val method = im.reflectMethod(methodSym)
+ if (sig.resultType.kind == "MethodType") method(arg, null).asInstanceOf[Int]
+ else method(arg).asInstanceOf[Int]
+ }
+ assert(c.a(1) == invoke("a", 1, u.typeOf[Int]))
+ assert(c.a("a") == invoke("a", "a", u.typeOf[String]))
+ //assert(c.b(1)(null) == invoke("b", 1, u.typeOf[Int]))
+ //assert(c.b("a")(null) == invoke("b", "a", u.typeOf[String]))
+ assert(c.c(new A) == invoke("c", new A, u.typeOf[A]))
+ assert(c.c(new B) == invoke("c", new B, u.typeOf[B]))
+ //assert(c.d(new A)(null) == invoke("d", new A, u.typeOf[A]))
+ //assert(c.d(new B)(null) == invoke("d", new B, u.typeOf[B]))
+ assert(c.e(new A) == invoke("e", new A, u.typeOf[A]))
+ assert(c.e(new B) == invoke("e", new B, u.typeOf[B]))
+}
diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check
new file mode 100644
index 0000000000..feafb58d3b
--- /dev/null
+++ b/test/files/run/reflection-equality.check
@@ -0,0 +1,53 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class X {
+ def methodIntIntInt(x: Int, y: Int) = x+y
+}
+defined class X
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.reflect.runtime.{ currentMirror => cm }
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> def im: InstanceMirror = cm.reflect(new X)
+im: reflect.runtime.universe.InstanceMirror
+
+scala> val cs: ClassSymbol = im.symbol
+cs: reflect.runtime.universe.ClassSymbol = class X
+
+scala> val ts: Type = cs.typeSignature
+ts: reflect.runtime.universe.Type =
+java.lang.Object {
+ def <init>: <?>
+ def methodIntIntInt: <?>
+}
+
+scala> val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethodSymbol
+ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt
+
+scala> val MethodType( _, t1 ) = ms.typeSignature
+t1: reflect.runtime.universe.Type = scala.Int
+
+scala> val t2 = typeOf[scala.Int]
+t2: reflect.runtime.universe.Type = Int
+
+scala> t1 == t2
+res0: Boolean = false
+
+scala> t1 =:= t2
+res1: Boolean = true
+
+scala> t1 <:< t2
+res2: Boolean = true
+
+scala> t2 <:< t1
+res3: Boolean = true
+
+scala>
diff --git a/test/files/run/reflection-equality.scala b/test/files/run/reflection-equality.scala
new file mode 100644
index 0000000000..35dc47a59f
--- /dev/null
+++ b/test/files/run/reflection-equality.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |class X {
+ | def methodIntIntInt(x: Int, y: Int) = x+y
+ |}
+ |
+ |import scala.reflect.runtime.universe._
+ |import scala.reflect.runtime.{ currentMirror => cm }
+ |def im: InstanceMirror = cm.reflect(new X)
+ |val cs: ClassSymbol = im.symbol
+ |val ts: Type = cs.typeSignature
+ |val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethodSymbol
+ |val MethodType( _, t1 ) = ms.typeSignature
+ |val t2 = typeOf[scala.Int]
+ |t1 == t2
+ |t1 =:= t2
+ |t1 <:< t2
+ |t2 <:< t1
+ |""".stripMargin
+}
diff --git a/test/files/run/reflection-magicsymbols.check b/test/files/run/reflection-magicsymbols.check
new file mode 100644
index 0000000000..2600847d99
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols.check
@@ -0,0 +1,22 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> class A { def foo(x: Int*) = 1 }
+defined class A
+
+scala> val sig = typeOf[A] member newTermName("foo") typeSignature
+warning: there were 1 feature warnings; re-run with -feature for details
+sig: reflect.runtime.universe.Type = (x: <?>)scala.Int
+
+scala> val x = sig.asInstanceOf[MethodType].params.head
+x: reflect.runtime.universe.Symbol = value x
+
+scala> println(x.typeSignature)
+scala.Int*
+
+scala>
diff --git a/test/files/run/reflection-magicsymbols.scala b/test/files/run/reflection-magicsymbols.scala
new file mode 100644
index 0000000000..a40845d6ac
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.reflect.runtime.universe._
+ |class A { def foo(x: Int*) = 1 }
+ |val sig = typeOf[A] member newTermName("foo") typeSignature
+ |val x = sig.asInstanceOf[MethodType].params.head
+ |println(x.typeSignature)
+ |""".stripMargin
+}
diff --git a/test/files/run/reflection-repl.check b/test/files/run/reflection-repl.check
index e93fb6d6e7..341dd10ab0 100644
--- a/test/files/run/reflection-repl.check
+++ b/test/files/run/reflection-repl.check
@@ -4,6 +4,6 @@ Type :help for more information.
scala>
scala> scala.reflect.runtime.universe.typeOf[List[Nothing]]
-res0: reflect.runtime.universe.Type = List[Nothing]
+res0: reflect.runtime.universe.Type = scala.List[Nothing]
scala>
diff --git a/test/files/run/reify-aliases.check b/test/files/run/reify-aliases.check
new file mode 100644
index 0000000000..392dd59a1d
--- /dev/null
+++ b/test/files/run/reify-aliases.check
@@ -0,0 +1 @@
+TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
diff --git a/test/files/run/reify-aliases.scala b/test/files/run/reify-aliases.scala
new file mode 100644
index 0000000000..45b1a34e24
--- /dev/null
+++ b/test/files/run/reify-aliases.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(showRaw(typeOf[String]))
+} \ No newline at end of file
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
index 9df18b2362..9bbb0ab87a 100644
--- a/test/files/run/reify_ann1a.check
+++ b/test/files/run/reify_ann1a.check
@@ -1,5 +1,5 @@
{
- @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends Object {
+ @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b")) = _;
def <init>(@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b"))) = {
super.<init>();
@@ -14,15 +14,15 @@
()
}
{
- @ann(immutable.this.List.apply[java.lang.String]("1a")) @ann(immutable.this.List.apply[java.lang.String]("1b")) class C[@ann(immutable.this.List.apply[java.lang.String]("2a")) @ann(immutable.this.List.apply[java.lang.String]("2b")) T] extends Object {
- @ann(immutable.this.List.apply[java.lang.String]("3a")) @ann(immutable.this.List.apply[java.lang.String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[java.lang.String]("4b")) @ann(immutable.this.List.apply[java.lang.String]("4a")) = _;
- def <init>(@ann(immutable.this.List.apply[java.lang.String]("3a")) @ann(immutable.this.List.apply[java.lang.String]("3b")) x: T @ann(immutable.this.List.apply[java.lang.String]("4b")) @ann(immutable.this.List.apply[java.lang.String]("4a"))): C[T] = {
+ @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends AnyRef {
+ @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
+ def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
C.super.<init>();
()
};
- @ann(immutable.this.List.apply[java.lang.String]("5a")) @ann(immutable.this.List.apply[java.lang.String]("5b")) def f(x: Int @ann(immutable.this.List.apply[java.lang.String]("6b")) @ann(immutable.this.List.apply[java.lang.String]("6a"))): Int = {
- @ann(immutable.this.List.apply[java.lang.String]("7a")) @ann(immutable.this.List.apply[java.lang.String]("7b")) val r: Int @ann(immutable.this.List.apply[java.lang.String]("8b")) @ann(immutable.this.List.apply[java.lang.String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[java.lang.String]("8a"))): Int @ann(immutable.this.List.apply[java.lang.String]("8b")) @ann(immutable.this.List.apply[java.lang.String]("8a")));
- val s: Int @ann(immutable.this.List.apply[java.lang.String]("9b")) @ann(immutable.this.List.apply[java.lang.String]("9a")) = (4: Int @ann(immutable.this.List.apply[java.lang.String]("9b")) @ann(immutable.this.List.apply[java.lang.String]("9a")));
+ @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
+ @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
+ val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
r.+(s)
}
};
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
index bae838f15f..4a51c02701 100644
--- a/test/files/run/reify_ann1b.check
+++ b/test/files/run/reify_ann1b.check
@@ -1,5 +1,5 @@
{
- @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends Object {
+ @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends AnyRef {
@new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
super.<init>();
@@ -14,7 +14,7 @@
()
}
{
- @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends Object {
+ @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends AnyRef {
@ann(bar = "3a") @ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _;
def <init>(@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = {
C.super.<init>();
diff --git a/test/files/run/reify_ann2a.check b/test/files/run/reify_ann2a.check
index bb30441f8c..4d67bbf786 100644
--- a/test/files/run/reify_ann2a.check
+++ b/test/files/run/reify_ann2a.check
@@ -6,7 +6,7 @@
()
}
};
- @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends Object {
+ @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b")) = _;
def <init>(@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b"))) = {
super.<init>();
@@ -22,21 +22,21 @@
}
{
class ann extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
- <paramaccessor> private[this] val bar: List[java.lang.String] = _;
- def <init>(bar: List[java.lang.String]): ann = {
+ <paramaccessor> private[this] val bar: List[String] = _;
+ def <init>(bar: List[String]): ann = {
ann.super.<init>();
()
}
};
- @ann(immutable.this.List.apply[java.lang.String]("1a")) @ann(immutable.this.List.apply[java.lang.String]("1b")) class C[@ann(immutable.this.List.apply[java.lang.String]("2a")) @ann(immutable.this.List.apply[java.lang.String]("2b")) T] extends Object {
- @ann(immutable.this.List.apply[java.lang.String]("3a")) @ann(immutable.this.List.apply[java.lang.String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[java.lang.String]("4b")) @ann(immutable.this.List.apply[java.lang.String]("4a")) = _;
- def <init>(@ann(immutable.this.List.apply[java.lang.String]("3a")) @ann(immutable.this.List.apply[java.lang.String]("3b")) x: T @ann(immutable.this.List.apply[java.lang.String]("4b")) @ann(immutable.this.List.apply[java.lang.String]("4a"))): C[T] = {
+ @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends AnyRef {
+ @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
+ def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
C.super.<init>();
()
};
- @ann(immutable.this.List.apply[java.lang.String]("5a")) @ann(immutable.this.List.apply[java.lang.String]("5b")) def f(x: Int @ann(immutable.this.List.apply[java.lang.String]("6b")) @ann(immutable.this.List.apply[java.lang.String]("6a"))): Int = {
- @ann(immutable.this.List.apply[java.lang.String]("7a")) @ann(immutable.this.List.apply[java.lang.String]("7b")) val r: Int @ann(immutable.this.List.apply[java.lang.String]("8b")) @ann(immutable.this.List.apply[java.lang.String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[java.lang.String]("8a"))): Int @ann(immutable.this.List.apply[java.lang.String]("8b")) @ann(immutable.this.List.apply[java.lang.String]("8a")));
- val s: Int @ann(immutable.this.List.apply[java.lang.String]("9b")) @ann(immutable.this.List.apply[java.lang.String]("9a")) = (4: Int @ann(immutable.this.List.apply[java.lang.String]("9b")) @ann(immutable.this.List.apply[java.lang.String]("9a")));
+ @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
+ @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
+ val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
r.+(s)
}
};
diff --git a/test/files/run/reify_ann3.check b/test/files/run/reify_ann3.check
index 9452a9701e..96b4cf9c4e 100644
--- a/test/files/run/reify_ann3.check
+++ b/test/files/run/reify_ann3.check
@@ -1,5 +1,5 @@
{
- class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends Object {
+ class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends AnyRef {
@new inline @getter() final <paramaccessor> val key: A = _;
def <init>(key: A) = {
super.<init>();
@@ -9,7 +9,7 @@
()
}
{
- class Tree[A, B] extends Object {
+ class Tree[A, B] extends AnyRef {
final <paramaccessor> private[this] val key: A = _;
@inline @scala.annotation.meta.getter final <stable> <accessor> <paramaccessor> def key: A = Tree.this.key;
def <init>(key: A): Tree[A,B] = {
diff --git a/test/files/run/reify_ann4.check b/test/files/run/reify_ann4.check
index 406ee7bc08..6f853053d2 100644
--- a/test/files/run/reify_ann4.check
+++ b/test/files/run/reify_ann4.check
@@ -5,7 +5,7 @@
()
}
};
- class C extends Object {
+ class C extends AnyRef {
def <init>() = {
super.<init>();
()
@@ -21,7 +21,7 @@
()
}
};
- class C extends Object {
+ class C extends AnyRef {
def <init>(): C = {
C.super.<init>();
()
diff --git a/test/files/run/reify_ann5.check b/test/files/run/reify_ann5.check
index ecf08eebb2..d443c601a0 100644
--- a/test/files/run/reify_ann5.check
+++ b/test/files/run/reify_ann5.check
@@ -1,5 +1,5 @@
{
- class C extends Object {
+ class C extends AnyRef {
@new inline @beanGetter() @new BeanProperty() <paramaccessor> val x: Int = _;
def <init>(x: Int) = {
super.<init>();
@@ -9,7 +9,7 @@
()
}
{
- class C extends Object {
+ class C extends AnyRef {
@scala.beans.BeanProperty <paramaccessor> private[this] val x: Int = _;
<stable> <accessor> <paramaccessor> def x: Int = C.this.x;
def <init>(x: Int): C = {
diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check
index 685ecf5de6..6f17b26158 100644
--- a/test/files/run/reify_classfileann_a.check
+++ b/test/files/run/reify_classfileann_a.check
@@ -1,5 +1,5 @@
{
- @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends Object {
+ @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef {
def <init>() = {
super.<init>();
()
@@ -8,7 +8,7 @@
()
}
{
- @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends Object {
+ @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends AnyRef {
def <init>(): C = {
C.super.<init>();
()
diff --git a/test/files/run/reify_classfileann_b.check b/test/files/run/reify_classfileann_b.check
index 0aac9aeb2a..33270e0035 100644
--- a/test/files/run/reify_classfileann_b.check
+++ b/test/files/run/reify_classfileann_b.check
@@ -1,5 +1,5 @@
{
- class C extends Object {
+ class C extends AnyRef {
def <init>() = {
super.<init>();
()
@@ -9,7 +9,7 @@
()
}
{
- class C extends Object {
+ class C extends AnyRef {
def <init>(): C = {
C.super.<init>();
()
diff --git a/test/files/run/reify_magicsymbols.check b/test/files/run/reify_magicsymbols.check
new file mode 100644
index 0000000000..e2aa46a364
--- /dev/null
+++ b/test/files/run/reify_magicsymbols.check
@@ -0,0 +1,13 @@
+Any
+AnyVal
+AnyRef
+Null
+Nothing
+List[Any]
+List[AnyVal]
+List[AnyRef]
+List[Null]
+List[Nothing]
+AnyRef{def foo(x: Int): Int}
+Int* => Unit
+=> Int => Unit
diff --git a/test/files/run/reify_magicsymbols.scala b/test/files/run/reify_magicsymbols.scala
new file mode 100644
index 0000000000..256ecbea33
--- /dev/null
+++ b/test/files/run/reify_magicsymbols.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(typeOf[Any])
+ println(typeOf[AnyVal])
+ println(typeOf[AnyRef])
+ println(typeOf[Null])
+ println(typeOf[Nothing])
+ println(typeOf[List[Any]])
+ println(typeOf[List[AnyVal]])
+ println(typeOf[List[AnyRef]])
+ println(typeOf[List[Null]])
+ println(typeOf[List[Nothing]])
+ println(typeOf[{def foo(x: Int): Int}])
+ println(typeOf[(Int*) => Unit])
+ println(typeOf[(=> Int) => Unit])
+} \ No newline at end of file
diff --git a/test/files/run/reify_newimpl_09.check b/test/files/run/reify_newimpl_09.check
deleted file mode 100644
index 220bd6875a..0000000000
--- a/test/files/run/reify_newimpl_09.check
+++ /dev/null
@@ -1 +0,0 @@
-List(2) \ No newline at end of file
diff --git a/test/files/run/reify_newimpl_10.check b/test/files/run/reify_newimpl_10.check
deleted file mode 100644
index 220bd6875a..0000000000
--- a/test/files/run/reify_newimpl_10.check
+++ /dev/null
@@ -1 +0,0 @@
-List(2) \ No newline at end of file
diff --git a/test/files/run/reify_newimpl_16.check b/test/files/run/reify_newimpl_16.check
deleted file mode 100644
index 220bd6875a..0000000000
--- a/test/files/run/reify_newimpl_16.check
+++ /dev/null
@@ -1 +0,0 @@
-List(2) \ No newline at end of file
diff --git a/test/files/run/reify_newimpl_17.check b/test/files/run/reify_newimpl_17.check
deleted file mode 100644
index e592e0e94e..0000000000
--- a/test/files/run/reify_newimpl_17.check
+++ /dev/null
@@ -1,2 +0,0 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: U defined by C in reify_newimpl_17.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
index 6c5f124960..c006cb7ffb 100644
--- a/test/files/run/reify_newimpl_26.check
+++ b/test/files/run/reify_newimpl_26.check
@@ -14,7 +14,7 @@ scala> def foo[T]{
foo: [T]=> Unit
scala> foo[Int]
-AbsTypeTag[List[T]]
+AbsTypeTag[scala.List[T]]
scala>
diff --git a/test/files/run/reify_newimpl_28.check b/test/files/run/reify_newimpl_28.check
deleted file mode 100644
index a7029974a4..0000000000
--- a/test/files/run/reify_newimpl_28.check
+++ /dev/null
@@ -1 +0,0 @@
-List(2)
diff --git a/test/files/run/reify_newimpl_32.check b/test/files/run/reify_newimpl_32.check
deleted file mode 100644
index a7029974a4..0000000000
--- a/test/files/run/reify_newimpl_32.check
+++ /dev/null
@@ -1 +0,0 @@
-List(2)
diff --git a/test/files/run/reify_newimpl_34.check b/test/files/run/reify_newimpl_34.check
deleted file mode 100644
index a7029974a4..0000000000
--- a/test/files/run/reify_newimpl_34.check
+++ /dev/null
@@ -1 +0,0 @@
-List(2)
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index 1b1c6fdbb8..07e99781e3 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -44,8 +44,8 @@ object Test extends App {
val Literal(Constant(s_format: String)) = format
val paramsStack = scala.collection.mutable.Stack(params: _*)
val parsed = s_format.split("(?<=%[\\w%])|(?=%[\\w%])") map {
- case "%d" => createTempValDef( paramsStack.pop, IntTpe )
- case "%s" => createTempValDef( paramsStack.pop, StringTpe )
+ case "%d" => createTempValDef( paramsStack.pop, typeOf[Int] )
+ case "%s" => createTempValDef( paramsStack.pop, typeOf[String] )
case "%%" => {
(None:Option[Tree], Literal(Constant("%")))
}
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
index 83055f2b70..6fc3ffe051 100644
--- a/test/files/run/showraw_mods.check
+++ b/test/files/run/showraw_mods.check
@@ -1 +1 @@
-Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(java.lang.Object)), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
index 82724cae44..d3a1fddf48 100644
--- a/test/files/run/showraw_tree.check
+++ b/test/files/run/showraw_tree.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
index c6dbd6f1ce..0c189130cf 100644
--- a/test/files/run/showraw_tree_ids.check
+++ b/test/files/run/showraw_tree_ids.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#1903), List(Ident(java.lang.String#129), Ident(java.lang.String#129)))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#1908), List(Ident(java.lang.String#129), Ident(java.lang.String#129)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Ident(newTypeName("String")#<id>), Ident(newTypeName("String")#<id>)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Ident(newTypeName("String")#<id>), Ident(newTypeName("String")#<id>)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.scala b/test/files/run/showraw_tree_ids.scala
index b56b8b4476..ea9a3cd4c2 100644
--- a/test/files/run/showraw_tree_ids.scala
+++ b/test/files/run/showraw_tree_ids.scala
@@ -3,6 +3,7 @@ import scala.reflect.runtime.universe._
object Test extends App {
val tree1 = reify(new collection.immutable.HashMap[String, String])
val tree2 = reify(new collection.mutable.HashMap[String, String])
- println(showRaw(tree1.tree, printIds = true))
- println(showRaw(tree2.tree, printIds = true))
+ def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ println(stabilize(showRaw(tree1.tree, printIds = true)))
+ println(stabilize(showRaw(tree2.tree, printIds = true)))
} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
index a12e21c611..7b4ab456b0 100644
--- a/test/files/run/showraw_tree_kinds.check
+++ b/test/files/run/showraw_tree_kinds.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Ident(java.lang.String#CLS), Ident(java.lang.String#CLS)))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Ident(java.lang.String#CLS), Ident(java.lang.String#CLS)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Ident(newTypeName("String")#TPE), Ident(newTypeName("String")#TPE)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Ident(newTypeName("String")#TPE), Ident(newTypeName("String")#TPE)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
index 02e7aeed7c..39e68dc881 100644
--- a/test/files/run/showraw_tree_types_ids.check
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -1,10 +1,10 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#1903), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)))))), nme.CONSTRUCTOR#1913), List())
-[1] TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List()))))
-[3] TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List())
-[4] TypeRef(ThisType(java.lang#128), java.lang.String#129, List())
-Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#1908), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)))))), nme.CONSTRUCTOR#2231), List())
-[4] TypeRef(ThisType(java.lang#128), java.lang.String#129, List())
-[5] TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List())))
-[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List()))))
-[7] TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
+[4] TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[4] TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())
+[5] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
diff --git a/test/files/run/showraw_tree_types_ids.scala b/test/files/run/showraw_tree_types_ids.scala
index cb2c2bfb0f..198729e705 100644
--- a/test/files/run/showraw_tree_types_ids.scala
+++ b/test/files/run/showraw_tree_types_ids.scala
@@ -5,6 +5,7 @@ object Test extends App {
val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
val tree1 = reify(new collection.immutable.HashMap[String, String])
val tree2 = reify(new collection.mutable.HashMap[String, String])
- println(showRaw(tb.typeCheck(tree1.tree), printIds = true, printTypes = true))
- println(showRaw(tb.typeCheck(tree2.tree), printIds = true, printTypes = true))
+ def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ println(stabilize(showRaw(tb.typeCheck(tree1.tree), printIds = true, printTypes = true)))
+ println(stabilize(showRaw(tb.typeCheck(tree2.tree), printIds = true, printTypes = true)))
} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
index 60176c7192..0a41f5185c 100644
--- a/test/files/run/showraw_tree_types_typed.check
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -1,10 +1,10 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String)), TypeTree[4]().setOriginal(Ident[4](java.lang.String)))))), nme.CONSTRUCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List()))))
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))), TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))))))), nme.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List()))))
[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
-[4] TypeRef(ThisType(java.lang), java.lang.String, List())
-Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String)), TypeTree[4]().setOriginal(Ident[4](java.lang.String)))))), nme.CONSTRUCTOR), List())
-[4] TypeRef(ThisType(java.lang), java.lang.String, List())
-[5] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List())))
-[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List()))))
+[4] TypeRef(ThisType(scala.Predef), newTypeName("String"), List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))), TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))))))), nme.CONSTRUCTOR), List())
+[4] TypeRef(ThisType(scala.Predef), newTypeName("String"), List())
+[5] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List()))))
[7] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
index 82724cae44..d3a1fddf48 100644
--- a/test/files/run/showraw_tree_types_untyped.check
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
index 0b409554a0..400b736704 100644
--- a/test/files/run/showraw_tree_ultimate.check
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -1,10 +1,10 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#1903#CLS), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)))))), nme.CONSTRUCTOR#1913#PCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()))))
-[3] TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List())
-[4] TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())
-Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#1908#CLS), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)))))), nme.CONSTRUCTOR#2231#CTOR), List())
-[4] TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())
-[5] TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())))
-[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()))))
-[7] TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List())
+[4] TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
+[4] TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())
+[5] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List())
diff --git a/test/files/run/showraw_tree_ultimate.scala b/test/files/run/showraw_tree_ultimate.scala
index dfd7abde52..a8507623bc 100644
--- a/test/files/run/showraw_tree_ultimate.scala
+++ b/test/files/run/showraw_tree_ultimate.scala
@@ -5,6 +5,7 @@ object Test extends App {
val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
val tree1 = reify(new collection.immutable.HashMap[String, String])
val tree2 = reify(new collection.mutable.HashMap[String, String])
- println(showRaw(tb.typeCheck(tree1.tree), printIds = true, printKinds = true, printTypes = true))
- println(showRaw(tb.typeCheck(tree2.tree), printIds = true, printKinds = true, printTypes = true))
+ def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ println(stabilize(showRaw(tb.typeCheck(tree1.tree), printIds = true, printKinds = true, printTypes = true)))
+ println(stabilize(showRaw(tb.typeCheck(tree2.tree), printIds = true, printKinds = true, printTypes = true)))
} \ No newline at end of file
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
new file mode 100644
index 0000000000..be62c5780b
--- /dev/null
+++ b/test/files/run/stringinterpolation_macro-run.check
@@ -0,0 +1,62 @@
+false
+false
+true
+false
+true
+FALSE
+FALSE
+TRUE
+FALSE
+TRUE
+true
+false
+null
+0
+80000000
+4c01926
+NULL
+4C01926
+null
+NULL
+Scala
+SCALA
+5
+x
+x
+x
+x
+x
+x
+x
+x
+x
+x
+x
+x
+S
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+42
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.000000e+00
+3.000000e+00
+05/26/12
+05/26/12
+05/26/12
+05/26/12
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
new file mode 100644
index 0000000000..9c59c334f8
--- /dev/null
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -0,0 +1,103 @@
+object Test extends App {
+
+// 'b' / 'B' (category: general)
+// -----------------------------
+println(f"${null}%b")
+println(f"${false}%b")
+println(f"${true}%b")
+println(f"${new java.lang.Boolean(false)}%b")
+println(f"${new java.lang.Boolean(true)}%b")
+
+println(f"${null}%B")
+println(f"${false}%B")
+println(f"${true}%B")
+println(f"${new java.lang.Boolean(false)}%B")
+println(f"${new java.lang.Boolean(true)}%B")
+
+implicit val stringToBoolean = java.lang.Boolean.parseBoolean(_: String)
+println(f"${"true"}%b")
+println(f"${"false"}%b")
+
+// 'h' | 'H' (category: general)
+// -----------------------------
+println(f"${null}%h")
+println(f"${0.0}%h")
+println(f"${-0.0}%h")
+println(f"${"Scala"}%h")
+
+println(f"${null}%H")
+println(f"${"Scala"}%H")
+
+// 's' | 'S' (category: general)
+// -----------------------------
+println(f"${null}%s")
+println(f"${null}%S")
+println(f"${"Scala"}%s")
+println(f"${"Scala"}%S")
+println(f"${5}")
+
+// 'c' | 'C' (category: character)
+// -------------------------------
+println(f"${120:Char}%c")
+println(f"${120:Byte}%c")
+println(f"${120:Short}%c")
+println(f"${120:Int}%c")
+println(f"${new java.lang.Character('x')}%c")
+println(f"${new java.lang.Byte(120:Byte)}%c")
+println(f"${new java.lang.Short(120:Short)}%c")
+println(f"${new java.lang.Integer(120)}%c")
+
+println(f"${'x' : java.lang.Character}%c")
+println(f"${(120:Byte) : java.lang.Byte}%c")
+println(f"${(120:Short) : java.lang.Short}%c")
+println(f"${120 : java.lang.Integer}%c")
+
+implicit val stringToChar = (x: String) => x(0)
+println(f"${"Scala"}%c")
+
+// 'd' | 'o' | 'x' | 'X' (category: integral)
+// ------------------------------------------
+println(f"${120:Byte}%d")
+println(f"${120:Short}%d")
+println(f"${120:Int}%d")
+println(f"${120:Long}%d")
+println(f"${new java.lang.Byte(120:Byte)}%d")
+println(f"${new java.lang.Short(120:Short)}%d")
+println(f"${new java.lang.Integer(120)}%d")
+println(f"${new java.lang.Long(120)}%d")
+println(f"${120 : java.lang.Integer}%d")
+println(f"${120 : java.lang.Long}%d")
+println(f"${BigInt(120)}%d")
+println(f"${new java.math.BigInteger("120")}%d")
+
+{
+ implicit val strToShort = (s: String) => java.lang.Short.parseShort(s)
+ println(f"${"120"}%d")
+ implicit val strToInt = (s: String) => 42
+ println(f"${"120"}%d")
+}
+
+// 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' (category: floating point)
+// ------------------------------------------------------------------
+println(f"${3.4f}%e")
+println(f"${3.4}%e")
+println(f"${3.4f : java.lang.Float}%e")
+println(f"${3.4 : java.lang.Double}%e")
+println(f"${BigDecimal(3.4)}%e")
+println(f"${new java.math.BigDecimal(3.4)}%e")
+println(f"${3}%e")
+println(f"${3L}%e")
+
+// 't' | 'T' (category: date/time)
+// -------------------------------
+import java.util.Calendar
+import java.util.Locale
+val c = Calendar.getInstance(Locale.US)
+c.set(2012, Calendar.MAY, 26)
+println(f"${c}%TD")
+println(f"${c.getTime}%TD")
+println(f"${c.getTime.getTime}%TD")
+
+implicit val strToDate = (x: String) => c
+println(f"""${"1234"}%TD""")
+}
diff --git a/test/files/run/t3326.check b/test/files/run/t3326.check
new file mode 100644
index 0000000000..d0e11cebf7
--- /dev/null
+++ b/test/files/run/t3326.check
@@ -0,0 +1,8 @@
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World)
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World) \ No newline at end of file
diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala
new file mode 100644
index 0000000000..f70cb01504
--- /dev/null
+++ b/test/files/run/t3326.scala
@@ -0,0 +1,74 @@
+
+
+
+import scala.math.Ordering
+
+
+
+/** The heart of the problem - we want to retain the ordering when
+ * using `++` on sorted maps.
+ *
+ * There are 2 `++` overloads - a generic one in traversables and
+ * a map-specific one in `MapLike` - which knows about the ordering.
+ *
+ * The problem here is that the expected return type for the expression
+ * in which `++` appears drives the decision of the overload that needs
+ * to be taken.
+ * The `collection.SortedMap` does not have `++` overridden to return
+ * `SortedMap`, but `immutable.Map` instead.
+ * This is why `collection.SortedMap` used to resort to the generic
+ * `TraversableLike.++` which knows nothing about the ordering.
+ *
+ * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`,
+ * we override the `MapLike.++` overload in `collection.SortedMap` to return
+ * the proper type `SortedMap`.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ testCollectionSorted()
+ testImmutableSorted()
+ }
+
+ def testCollectionSorted() {
+ import collection._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+
+ def testImmutableSorted() {
+ import collection.immutable._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+}
diff --git a/test/files/run/t3613.scala b/test/files/run/t3613.scala
index c3b249571b..171a6a21aa 100644
--- a/test/files/run/t3613.scala
+++ b/test/files/run/t3613.scala
@@ -8,7 +8,7 @@ class Boopy {
case "Boopy" => fireIntervalAdded( model, 0, 1 )
}
def getSize = 0
- def getElementAt( idx: Int ) : AnyRef = "egal"
+ def getElementAt( idx: Int ) = ???
}
}
diff --git a/test/files/run/t4027.check b/test/files/run/t4027.check
new file mode 100644
index 0000000000..bdacfc1c06
--- /dev/null
+++ b/test/files/run/t4027.check
@@ -0,0 +1,12 @@
+Map(2 -> true, 4 -> true)
+Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!)
+Map(2 -> 4, 4 -> 4)
+Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5)
+Map()
+Map(1 -> false!)
+Map(2 -> true, 4 -> true)
+Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!)
+Map(2 -> 4, 4 -> 4)
+Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5)
+Map()
+Map(1 -> false!) \ No newline at end of file
diff --git a/test/files/run/t4027.scala b/test/files/run/t4027.scala
new file mode 100644
index 0000000000..d70ca0cc3a
--- /dev/null
+++ b/test/files/run/t4027.scala
@@ -0,0 +1,27 @@
+
+
+import collection._
+
+
+/** Sorted maps should have `filterKeys` and `mapValues` which return sorted maps.
+ * Mapping, filtering, etc. on these views should return sorted maps again.
+ */
+object Test extends App {
+
+ val sortedmap = SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
+ println(sortedmap.filterKeys(_ % 2 == 0): SortedMap[Int, Boolean])
+ println(sortedmap.mapValues(_ + "!"): SortedMap[Int, String])
+ println(sortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
+ println(sortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
+ println(sortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): SortedMap[Int, Boolean])
+ println(sortedmap.mapValues(_ + "!").filter(t => t._1 < 2): SortedMap[Int, String])
+
+ val immsortedmap = immutable.SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
+ println(immsortedmap.filterKeys(_ % 2 == 0): immutable.SortedMap[Int, Boolean])
+ println(immsortedmap.mapValues(_ + "!"): immutable.SortedMap[Int, String])
+ println(immsortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
+ println(immsortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
+ println(immsortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): immutable.SortedMap[Int, Boolean])
+ println(immsortedmap.mapValues(_ + "!").filter(t => t._1 < 2): immutable.SortedMap[Int, String])
+
+}
diff --git a/test/files/run/t4935.check b/test/files/run/t4935.check
new file mode 100644
index 0000000000..ef0493b275
--- /dev/null
+++ b/test/files/run/t4935.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
new file mode 100644
index 0000000000..ac14fe5dbd
--- /dev/null
+++ b/test/files/run/t4935.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t4935.scala b/test/files/run/t4935.scala
new file mode 100644
index 0000000000..18631e2041
--- /dev/null
+++ b/test/files/run/t4935.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ for (i <- 0 to 1) {
+ val a = Foo
+ }
+}
+
+object Foo {
+ println("hello")
+}
diff --git a/test/files/run/t5009.check b/test/files/run/t5009.check
index cc9df54b34..6c567227b5 100644
--- a/test/files/run/t5009.check
+++ b/test/files/run/t5009.check
@@ -1,4 +1,5 @@
C(1,true)
10
C(7283,20)
+C(66,-3)
100
diff --git a/test/files/run/t5009.scala b/test/files/run/t5009.scala
index b4fe1bc894..db12c0d685 100644
--- a/test/files/run/t5009.scala
+++ b/test/files/run/t5009.scala
@@ -6,12 +6,9 @@ object Test extends App {
println(c)
println(c.l)
- val f1a = c.copy(y = 20, x = 7283)
+ println(c.copy(y = 20, x = 7283)("enwa", b = false)(l = -1, s = new Object))
- val f1b = c.copy[Int, String, Object](y = 20, x = 7283)
- val f2b = f1b("lkdjen", false)
- val res = f2b(new Object, 100)
+ val res = c.copy[Int, String, Object](y = -3, x = 66)("lkdjen", false)(new Object, 100)
println(res)
println(res.l)
-
}
diff --git a/test/files/run/t5224.check b/test/files/run/t5224.check
index c754f23551..ffeb7b43ce 100644
--- a/test/files/run/t5224.check
+++ b/test/files/run/t5224.check
@@ -1,5 +1,5 @@
{
- @new Foo(bar = "qwe") class C extends Object {
+ @new Foo(bar = "qwe") class C extends AnyRef {
def <init>() = {
super.<init>();
()
diff --git a/test/files/run/t5271_3.check b/test/files/run/t5271_3.check
index 2b920a36a8..3cfedbacd7 100644
--- a/test/files/run/t5271_3.check
+++ b/test/files/run/t5271_3.check
@@ -1,5 +1,5 @@
{
- object C extends Object {
+ object C extends AnyRef {
def <init>() = {
super.<init>();
()
diff --git a/test/files/run/t5704.check b/test/files/run/t5704.check
index 74c4610988..0f6c84d2ec 100644
--- a/test/files/run/t5704.check
+++ b/test/files/run/t5704.check
@@ -1 +1 @@
-java.lang.String
+String
diff --git a/test/files/run/t5830.check b/test/files/run/t5830.check
new file mode 100644
index 0000000000..675387eb8e
--- /dev/null
+++ b/test/files/run/t5830.check
@@ -0,0 +1,6 @@
+a with oef
+a with oef
+a
+def with oef
+def
+default
diff --git a/test/files/run/t5830.flags b/test/files/run/t5830.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/run/t5830.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/run/t5830.scala b/test/files/run/t5830.scala
new file mode 100644
index 0000000000..5d808bfa28
--- /dev/null
+++ b/test/files/run/t5830.scala
@@ -0,0 +1,56 @@
+import scala.annotation.switch
+
+object Test extends App {
+ // TODO: should not emit a switch
+ // def noSwitch(ch: Char, eof: Boolean) = (ch: @switch) match {
+ // case 'a' if eof => println("a with oef") // then branch
+ // }
+
+ def onlyThen(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case 'a' if eof => println("a with oef") // then branch
+ case 'c' =>
+ }
+
+ def ifThenElse(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case 'a' if eof => println("a with oef") // then branch
+ case 'a' if eof => println("a with oef2") // unreachable, but the analysis is not that sophisticated
+ case 'a' => println("a") // else-branch
+ case 'c' =>
+ }
+
+ def defaultUnguarded(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case ' ' if eof => println("spacey oef")
+ case _ => println("default")
+ }
+
+ def defaults(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case _ if eof => println("def with oef") // then branch
+ case _ if eof => println("def with oef2") // unreachable, but the analysis is not that sophisticated
+ case _ => println("def") // else-branch
+ }
+
+ // test binders in collapsed cases (no need to run, it's "enough" to know it doesn't crash the compiler)
+ def guard(x: Any): Boolean = true
+ def testBinders =
+ try { println("") } // work around SI-6015
+ catch {
+ case _ if guard(null) =>
+ case x if guard(x) => throw x
+ }
+
+ // def unreachable(ch: Char) = (ch: @switch) match {
+ // case 'a' => println("b") // ok
+ // case 'a' => println("b") // unreachable
+ // case 'c' =>
+ // }
+
+ // noSwitch('a', true)
+ onlyThen('a', true) // 'a with oef'
+ ifThenElse('a', true) // 'a with oef'
+ ifThenElse('a', false) // 'a'
+ defaults('a', true) // 'def with oef'
+ defaults('a', false) // 'def'
+
+ // test that it jumps to default case, no match error
+ defaultUnguarded(' ', false) // default
+} \ No newline at end of file
diff --git a/test/files/run/t5907.check b/test/files/run/t5907.check
new file mode 100644
index 0000000000..bc23692679
--- /dev/null
+++ b/test/files/run/t5907.check
@@ -0,0 +1,31 @@
+c1: 2
+c1: 2873
+c2: 37
+c3: 1, 2, 27
+c3: 1, 22, 27
+c3: 11, 7, 27
+c4: 1
+c4: 23
+c5: 1, 2, 33, b
+c5: 1, 19, 33, b
+c5: 1, 2, 193, c
+c5: 1, 371, 193, c
+c5: -1, 2, -2, lken
+c6: 29, 18, -12
+c6: 1, 93, 2892
+c6: 1, 93, 761
+c7: 1, 22, 33, elkj
+c7: 1, 283, 29872, me
+c7: 37, 298, 899, ekjr
+c8: 172, 989, 77, eliurna
+c8: 1, 82, 2111, schtring
+c8: -1, 92, 29, lken
+c9: 1, 271, ehebab
+c9: 1, 299, enag
+c9: 1, 299, enag
+c9: 1, 299, enag
+c9: -42, 99, flae
+c9: 10, 298, 27
+c9: elkn, en, emn
+c9: ka, kb, kb
+c9: ka, kb, ka
diff --git a/test/files/run/t5907.scala b/test/files/run/t5907.scala
new file mode 100644
index 0000000000..a005e9fbd3
--- /dev/null
+++ b/test/files/run/t5907.scala
@@ -0,0 +1,118 @@
+object Test extends App {
+ t
+
+ def t {
+ val c1 = C1()(1)
+ println(c1.copy()(2))
+
+ {
+ implicit val i = 2873
+ println(c1.copy())
+ }
+
+ val c2 = C2()(1)
+ println(c2.copy()(37))
+
+ val c3 = C3(1,2)(3)
+ println(c3.copy()(27))
+ println(c3.copy(y = 22)(27))
+ println(c3.copy(y = 7, x = 11)(27))
+
+ val c4 = C4(1)
+ println(c4.copy())
+ println(c4.copy(x = 23))
+
+ val c5 = C5(1,2)(3,"a")
+ println(c5.copy()(33,"b"))
+ println(c5.copy(y = 19)(33,"b"))
+
+ {
+ implicit val i = 193
+ implicit val s = "c"
+ println(c5.copy())
+ println(c5.copy(y = 371))
+ println(c5.copy(x = -1)(-2, "lken"))
+ }
+
+ val c6 = C6(1)(2)(3)
+ println(c6.copy(29)(18)(-12))
+
+ {
+ implicit val i = 2892
+ println(c6.copy(x = 1)(93))
+ println(c6.copy(x = 1)(93)(761))
+ }
+
+ val c7 = C7(1)(2)(3)("h")
+ println(c7.copy()(22)(33)("elkj"))
+
+ {
+ implicit val s = "me"
+ println(c7.copy()(283)(29872))
+ println(c7.copy(37)(298)(899)("ekjr"))
+ }
+
+ val c8 = C8(1)(2,3)()("els")
+ println(c8.copy(x = 172)(989, 77)()("eliurna"))
+
+ {
+ implicit val s = "schtring"
+ println(c8.copy()(82,2111)())
+ println(c8.copy(x = -1)(92,29)()("lken"))
+ }
+
+ val c9 = C9(1)(2)()()("u")
+ println(c9.copy()(271)()()("ehebab"))
+
+ {
+ implicit val s = "enag"
+ println(c9.copy()(299))
+ println(c9.copy()(299)())
+ println(c9.copy()(299)()())
+ println(c9.copy(x = -42)(99)()()("flae"))
+ }
+
+ class KA { override def toString = "ka" }
+ class KB extends KA { override def toString = "kb" }
+ val c10 = C10(10)(3)(19)
+ println(c10.copy()(298)(27))
+ println(c10.copy("elkn")("en")("emn"))
+ println(c10.copy(new KA)(new KB)(new KB))
+
+ {
+ implicit val k = new KA
+ println(c10.copy(new KA)(new KB))
+ }
+ }
+}
+
+case class C1(implicit x: Int) {
+ override def toString = s"c1: $x"
+}
+case class C2()(y: Int) {
+ override def toString = s"c2: $y"
+}
+case class C3(x: Int, y: Int)(z: Int) {
+ override def toString = s"c3: $x, $y, $z"
+}
+case class C4(x: Int) {
+ override def toString = s"c4: $x"
+}
+case class C5(x: Int, y: Int)(implicit z: Int, s: String) {
+ override def toString = s"c5: $x, $y, $z, $s"
+}
+case class C6(x: Int)(y: Int)(implicit z: Int) {
+ override def toString = s"c6: $x, $y, $z"
+}
+case class C7(x: Int)(y: Int)(z: Int)(implicit s: String) {
+ override def toString = s"c7: $x, $y, $z, $s"
+}
+case class C8(x: Int)(y: Int, z: Int)()(implicit s: String) {
+ override def toString = s"c8: $x, $y, $z, $s"
+}
+case class C9(x: Int)(y: Int)()()(implicit s: String) {
+ override def toString = s"c9: $x, $y, $s"
+}
+case class C10[T,U <: T](x: T)(y: U)(implicit z: T) {
+ override def toString = s"c9: $x, $y, $z"
+}
diff --git a/test/files/run/t5914.check b/test/files/run/t5914.check
new file mode 100644
index 0000000000..818e321255
--- /dev/null
+++ b/test/files/run/t5914.check
@@ -0,0 +1 @@
+correct
diff --git a/test/files/run/t5914.scala b/test/files/run/t5914.scala
new file mode 100644
index 0000000000..45d8815738
--- /dev/null
+++ b/test/files/run/t5914.scala
@@ -0,0 +1,23 @@
+import scala.reflect.ClassTag
+
+trait Trees {
+ class Tree
+ implicit val ttTag: ClassTag[TypeTree]
+ type TypeTree <: Tree
+ val TypeTree: TypeTreeExtractor
+ abstract class TypeTreeExtractor {
+ def unapply(t: TypeTree): Option[String]
+ }
+ def test(tree: Tree) =
+ tree match {
+ case TypeTree(_) => println("lolwut")
+ case null => println("correct")
+ }
+}
+
+object Test extends App with Trees {
+ val ttTag = implicitly[ClassTag[TypeTree]]
+ case class TypeTree(meh: String) extends Tree
+ object TypeTree extends TypeTreeExtractor
+ test(null) // should not crash
+} \ No newline at end of file
diff --git a/test/files/run/t5966.check b/test/files/run/t5966.check
new file mode 100644
index 0000000000..bfe8358a77
--- /dev/null
+++ b/test/files/run/t5966.check
@@ -0,0 +1,3 @@
+(o()_)("") = List()
+(o("a1")_)("") = WrappedArray(a1)
+(o("a1", "a2")_)("") = WrappedArray(a1, a2)
diff --git a/test/files/run/t5966.scala b/test/files/run/t5966.scala
new file mode 100644
index 0000000000..bbe1a6e874
--- /dev/null
+++ b/test/files/run/t5966.scala
@@ -0,0 +1,9 @@
+object o { def apply(i: AnyRef*)(j: String) = i }
+
+object Test {
+ def main(args: Array[String]) {
+ println("(o()_)(\"\") = " + (o()_)(""))
+ println("(o(\"a1\")_)(\"\") = " + (o("a1")_)(""))
+ println("(o(\"a1\", \"a2\")_)(\"\") = " + (o("a1", "a2")_)(""))
+ }
+}
diff --git a/test/files/run/t5971.check b/test/files/run/t5971.check
new file mode 100644
index 0000000000..0c36a1ff02
--- /dev/null
+++ b/test/files/run/t5971.check
@@ -0,0 +1,4 @@
+r,b
+r
+a,b
+r,a,b \ No newline at end of file
diff --git a/test/files/run/t5971.scala b/test/files/run/t5971.scala
new file mode 100644
index 0000000000..dbd9beebb3
--- /dev/null
+++ b/test/files/run/t5971.scala
@@ -0,0 +1,23 @@
+
+
+
+
+
+/** When using `AbstractTransformed` abstract inner class in views in order
+ * to force generating bridges, one must take care to push the corresponding
+ * collection trait (such as `Iterable` or `Seq`) as far as possible to the
+ * left in the linearization order -- otherwise, overridden methods from these
+ * traits can override the already overridden methods in view. This was the
+ * case with `takeWhile`.
+ * Mind blowing, I know.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ println("bar".view.reverse.filter(_ > 'a').mkString(","))
+ println("bar".view.reverse.take(1).mkString(","))
+ println("bar".view.reverse.dropWhile(_ > 'a').mkString(","))
+ println("bar".view.reverse.takeWhile(_ => true).mkString(","))
+ }
+
+}
diff --git a/test/files/run/t5974.check b/test/files/run/t5974.check
new file mode 100644
index 0000000000..9766475a41
--- /dev/null
+++ b/test/files/run/t5974.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t5974.scala b/test/files/run/t5974.scala
new file mode 100644
index 0000000000..5b99e9f721
--- /dev/null
+++ b/test/files/run/t5974.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ import scala.collection.JavaConverters._
+
+ def ser(a: AnyRef) =
+ (new java.io.ObjectOutputStream(new java.io.ByteArrayOutputStream())).writeObject(a)
+
+ val l = java.util.Arrays.asList("pigdog").asScala
+ ser(l)
+ println("ok")
+}
diff --git a/test/files/run/t5986.check b/test/files/run/t5986.check
new file mode 100644
index 0000000000..4101770c6d
--- /dev/null
+++ b/test/files/run/t5986.check
@@ -0,0 +1,15 @@
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4) \ No newline at end of file
diff --git a/test/files/run/t5986.scala b/test/files/run/t5986.scala
new file mode 100644
index 0000000000..8cf7086f98
--- /dev/null
+++ b/test/files/run/t5986.scala
@@ -0,0 +1,36 @@
+
+
+
+import scala.collection._
+
+
+
+/** A sorted set should not replace elements when adding
+ * and the element already exists in the set.
+ */
+object Test {
+
+ class Foo(val name: String, val n: Int) {
+ override def equals(obj: Any): Boolean = obj match { case other: Foo => name == other.name; case _ => false }
+ override def hashCode = name.##
+ override def toString = "Foo(" + name + ", " + n + ")"
+ }
+
+ implicit val ordering: Ordering[Foo] = Ordering.fromLessThan[Foo] { (a, b) => a.name.compareTo(b.name) < 0 }
+
+ def check[S <: Set[Foo]](set: S) {
+ def output(s: Set[Foo]) = println(s.toList.sorted.mkString(","))
+ output(set + new Foo("bar", 2))
+ output(set ++ List(new Foo("bar", 2), new Foo("bar", 3), new Foo("bar", 4)))
+ output(set union Set(new Foo("bar", 2), new Foo("baz", 3), new Foo("bazz", 4)))
+ }
+
+ def main(args: Array[String]) {
+ check(Set(new Foo("bar", 1)))
+ check(immutable.Set(new Foo("bar", 1)))
+ check(mutable.Set(new Foo("bar", 1)))
+ check(immutable.SortedSet(new Foo("bar", 1)))
+ check(mutable.SortedSet(new Foo("bar", 1)))
+ }
+
+}
diff --git a/test/files/run/test-cpp.check b/test/files/run/test-cpp.check
index 40a976119f..a7163edb5f 100644
--- a/test/files/run/test-cpp.check
+++ b/test/files/run/test-cpp.check
@@ -1,73 +1,65 @@
-37c37
-< locals: value args, value x, value y
----
-> locals: value args
-42,43d41
-< 52 CONSTANT(2)
-< 52 STORE_LOCAL(value x)
-45,46d42
-< 53 LOAD_LOCAL(value x)
-< 53 STORE_LOCAL(value y)
-49c45
-< 54 LOAD_LOCAL(value y)
----
-> 54 CONSTANT(2)
-92c88
-< locals: value args, value x, value y
----
-> locals: value args, value x
-101,102d96
-< 82 LOAD_LOCAL(value x)
-< 82 STORE_LOCAL(value y)
-105c99
-< 83 LOAD_LOCAL(value y)
----
-> 83 LOAD_LOCAL(value x)
-135c129
-< locals: value args, value x, value y
----
-> locals: value args
-140,141d133
-< 66 THIS(TestAliasChainDerefThis)
-< 66 STORE_LOCAL(value x)
-143,144d134
-< 67 LOAD_LOCAL(value x)
-< 67 STORE_LOCAL(value y)
-147c137
-< 68 LOAD_LOCAL(value y)
----
-> 68 THIS(Object)
-176c166
-< locals: value x, value y
----
-> locals: value x
-181,182d170
-< 29 LOAD_LOCAL(value x)
-< 29 STORE_LOCAL(value y)
-185c173
-< 30 LOAD_LOCAL(value y)
----
-> 30 LOAD_LOCAL(value x)
-223,224d210
-< 97 LOAD_LOCAL(variable x)
-< 97 STORE_LOCAL(variable y)
-227c213
-< 98 LOAD_LOCAL(variable y)
----
-> 98 LOAD_LOCAL(variable x)
-233,234d218
-< 101 LOAD_LOCAL(variable y)
-< 101 STORE_LOCAL(variable x)
-236c220
-< 102 LOAD_LOCAL(variable x)
----
-> 102 LOAD_LOCAL(variable y)
-345c329
-< 41 THIS(TestSetterInline)
----
-> 41 THIS(Object)
-347c331
-< 41 CALL_METHOD TestSetterInline._postSetHook_$eq (static-instance)
----
-> 41 STORE_FIELD variable _postSetHook (dynamic)
-
+37c37
+< locals: value args, value x, value y
+---
+> locals: value args
+42,43d41
+< 52 CONSTANT(2)
+< 52 STORE_LOCAL(value x)
+45,46d42
+< 53 LOAD_LOCAL(value x)
+< 53 STORE_LOCAL(value y)
+49c45
+< 54 LOAD_LOCAL(value y)
+---
+> 54 CONSTANT(2)
+92c88
+< locals: value args, value x, value y
+---
+> locals: value args, value x
+101,102d96
+< 82 LOAD_LOCAL(value x)
+< 82 STORE_LOCAL(value y)
+105c99
+< 83 LOAD_LOCAL(value y)
+---
+> 83 LOAD_LOCAL(value x)
+135c129
+< locals: value args, value x, value y
+---
+> locals: value args
+140,141d133
+< 66 THIS(TestAliasChainDerefThis)
+< 66 STORE_LOCAL(value x)
+143,144d134
+< 67 LOAD_LOCAL(value x)
+< 67 STORE_LOCAL(value y)
+147c137
+< 68 LOAD_LOCAL(value y)
+---
+> 68 THIS(Object)
+176c166
+< locals: value x, value y
+---
+> locals: value x
+181,182d170
+< 29 LOAD_LOCAL(value x)
+< 29 STORE_LOCAL(value y)
+185c173
+< 30 LOAD_LOCAL(value y)
+---
+> 30 LOAD_LOCAL(value x)
+223,224d210
+< 97 LOAD_LOCAL(variable x)
+< 97 STORE_LOCAL(variable y)
+227c213
+< 98 LOAD_LOCAL(variable y)
+---
+> 98 LOAD_LOCAL(variable x)
+233,234d218
+< 101 LOAD_LOCAL(variable y)
+< 101 STORE_LOCAL(variable x)
+236c220
+< 102 LOAD_LOCAL(variable x)
+---
+> 102 LOAD_LOCAL(variable y)
+
diff --git a/test/files/run/concretetypetags_core.check b/test/files/run/typetags_core.check
index d042e0900d..8d20e099c4 100644
--- a/test/files/run/concretetypetags_core.check
+++ b/test/files/run/typetags_core.check
@@ -19,10 +19,12 @@ TypeTag[Unit]
true
TypeTag[Any]
true
+TypeTag[AnyVal]
+true
+TypeTag[AnyRef]
+true
TypeTag[java.lang.Object]
true
TypeTag[Null]
true
TypeTag[Nothing]
-true
-TypeTag[java.lang.String]
diff --git a/test/files/run/concretetypetags_core.scala b/test/files/run/typetags_core.scala
index a870f77c5f..5257d55118 100644
--- a/test/files/run/concretetypetags_core.scala
+++ b/test/files/run/typetags_core.scala
@@ -21,12 +21,14 @@ object Test extends App {
println(implicitly[TypeTag[Unit]])
println(implicitly[TypeTag[Any]] eq TypeTag.Any)
println(implicitly[TypeTag[Any]])
+ println(implicitly[TypeTag[AnyVal]] eq TypeTag.AnyVal)
+ println(implicitly[TypeTag[AnyVal]])
+ println(implicitly[TypeTag[AnyRef]] eq TypeTag.AnyRef)
+ println(implicitly[TypeTag[AnyRef]])
println(implicitly[TypeTag[Object]] eq TypeTag.Object)
println(implicitly[TypeTag[Object]])
println(implicitly[TypeTag[Null]] eq TypeTag.Null)
println(implicitly[TypeTag[Null]])
println(implicitly[TypeTag[Nothing]] eq TypeTag.Nothing)
println(implicitly[TypeTag[Nothing]])
- println(implicitly[TypeTag[String]] eq TypeTag.String)
- println(implicitly[TypeTag[String]])
} \ No newline at end of file
diff --git a/test/files/run/concretetypetags_multi.check b/test/files/run/typetags_multi.check
index 6110252c36..6110252c36 100644
--- a/test/files/run/concretetypetags_multi.check
+++ b/test/files/run/typetags_multi.check
diff --git a/test/files/run/concretetypetags_multi.scala b/test/files/run/typetags_multi.scala
index b30aac80a6..b30aac80a6 100644
--- a/test/files/run/concretetypetags_multi.scala
+++ b/test/files/run/typetags_multi.scala
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
index e4b356c889..e2609fa200 100644
--- a/test/files/scalacheck/redblacktree.scala
+++ b/test/files/scalacheck/redblacktree.scala
@@ -121,7 +121,7 @@ package scala.collection.immutable.redblacktree {
override type ModifyParm = Int
override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
case Some((key, _)) => key.init.mkString + "MN"
@@ -144,7 +144,7 @@ package scala.collection.immutable.redblacktree {
override type ModifyParm = Int
override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
- case (key, _) => update(tree, key, newValue)
+ case (key, _) => update(tree, key, newValue, true)
} getOrElse tree
property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
diff --git a/test/files/speclib/instrumented.jar.desired.sha1 b/test/files/speclib/instrumented.jar.desired.sha1
index 24856fe19a..0b8ee593da 100644
--- a/test/files/speclib/instrumented.jar.desired.sha1
+++ b/test/files/speclib/instrumented.jar.desired.sha1
@@ -1 +1 @@
-474d8c20ab31438d5d4a2ba6bc07ebdcdb530b50 ?instrumented.jar
+474d8c20ab31438d5d4a2ba6bc07ebdcdb530b50 *instrumented.jar
diff --git a/test/files/run/reify_implicits-new.check b/test/pending/run/reify_implicits-new.check
index e3aeb20f6b..e3aeb20f6b 100644
--- a/test/files/run/reify_implicits-new.check
+++ b/test/pending/run/reify_implicits-new.check
diff --git a/test/files/run/reify_implicits-new.scala b/test/pending/run/reify_implicits-new.scala
index 42a1deef26..42a1deef26 100644
--- a/test/files/run/reify_implicits-new.scala
+++ b/test/pending/run/reify_implicits-new.scala
diff --git a/test/files/run/reify_implicits-old.check b/test/pending/run/reify_implicits-old.check
index e3aeb20f6b..e3aeb20f6b 100644
--- a/test/files/run/reify_implicits-old.check
+++ b/test/pending/run/reify_implicits-old.check
diff --git a/test/files/run/reify_implicits-old.scala b/test/pending/run/reify_implicits-old.scala
index 8ff256d2d4..8ff256d2d4 100644
--- a/test/files/run/reify_implicits-old.scala
+++ b/test/pending/run/reify_implicits-old.scala
diff --git a/test/files/run/reify_newimpl_09.scala b/test/pending/run/reify_newimpl_09.scala
index 27fbd37b71..27fbd37b71 100644
--- a/test/files/run/reify_newimpl_09.scala
+++ b/test/pending/run/reify_newimpl_09.scala
diff --git a/test/pending/run/reify_newimpl_09a.scala b/test/pending/run/reify_newimpl_09a.scala
new file mode 100644
index 0000000000..27fbd37b71
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09a.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09b.scala b/test/pending/run/reify_newimpl_09b.scala
new file mode 100644
index 0000000000..9e86dd5d8d
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09b.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ type U = Int
+ type T = U
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09c.scala b/test/pending/run/reify_newimpl_09c.scala
new file mode 100644
index 0000000000..e2f4a4923a
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09c.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ def foo[W] = {
+ type U = W
+ type T = U
+ reify {
+ List[T](2)
+ }
+ }
+ val code = foo[Int]
+ println(code.tree.freeTypes)
+ val W = code.tree.freeTypes(2)
+ cm.mkToolBox().runExpr(code.tree, Map(W -> definitions.IntTpe))
+ println(code.eval)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/reify_newimpl_10.scala b/test/pending/run/reify_newimpl_10.scala
index 791e52943a..791e52943a 100644
--- a/test/files/run/reify_newimpl_10.scala
+++ b/test/pending/run/reify_newimpl_10.scala
diff --git a/test/files/run/reify_newimpl_16.scala b/test/pending/run/reify_newimpl_16.scala
index a0cadf4d48..a0cadf4d48 100644
--- a/test/files/run/reify_newimpl_16.scala
+++ b/test/pending/run/reify_newimpl_16.scala
diff --git a/test/files/run/reify_newimpl_17.scala b/test/pending/run/reify_newimpl_17.scala
index 8fbcd52502..8fbcd52502 100644
--- a/test/files/run/reify_newimpl_17.scala
+++ b/test/pending/run/reify_newimpl_17.scala
diff --git a/test/files/run/reify_newimpl_28.scala b/test/pending/run/reify_newimpl_28.scala
index 524a110704..524a110704 100644
--- a/test/files/run/reify_newimpl_28.scala
+++ b/test/pending/run/reify_newimpl_28.scala
diff --git a/test/files/run/reify_newimpl_32.scala b/test/pending/run/reify_newimpl_32.scala
index 095e59d919..095e59d919 100644
--- a/test/files/run/reify_newimpl_32.scala
+++ b/test/pending/run/reify_newimpl_32.scala
diff --git a/test/files/run/reify_newimpl_34.scala b/test/pending/run/reify_newimpl_34.scala
index a0a575ed7d..a0a575ed7d 100644
--- a/test/files/run/reify_newimpl_34.scala
+++ b/test/pending/run/reify_newimpl_34.scala
diff --git a/test/scaladoc/resources/doc-root/Any.scala b/test/scaladoc/resources/doc-root/Any.scala
new file mode 100644
index 0000000000..031b7d9d8c
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/Any.scala
@@ -0,0 +1,114 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala
+ * execution environment inherits directly or indirectly from this class.
+ */
+abstract class Any {
+ /** Compares the receiver object (`this`) with the argument object (`that`) for equivalence.
+ *
+ * Any implementation of this method should be an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]:
+ *
+ * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`.
+ * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and
+ * only if `y.equals(x)` returns `true`.
+ * - It is transitive: for any instances `x`, `y`, and `z` of type `AnyRef` if `x.equals(y)` returns `true` and
+ * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`.
+ *
+ * If you override this method, you should verify that your implementation remains an equivalence relation.
+ * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that
+ * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]].
+ * (`o1.hashCode.equals(o2.hashCode)`).
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ def equals(that: Any): Boolean
+
+ /** Calculate a hash code value for the object.
+ *
+ * The default hashing algorithm is platform dependent.
+ *
+ * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet
+ * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`.
+ * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have
+ * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure
+ * to verify that the behavior is consistent with the `equals` method.
+ *
+ * @return the hash code value for this object.
+ */
+ def hashCode(): Int
+
+ /** Returns a string representation of the object.
+ *
+ * The default representation is platform dependent.
+ *
+ * @return a string representation of the object.
+ */
+ def toString(): String
+
+ /** Returns the runtime class representation of the object.
+ *
+ * @return a class object corresponding to the runtime type of the receiver.
+ */
+ def getClass(): Class[_]
+
+ /** Test two objects for equality.
+ * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`.
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ final def ==(that: Any): Boolean = this equals that
+
+ /** Test two objects for inequality.
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if !(this == that), false otherwise.
+ */
+ final def != (that: Any): Boolean = !(this == that)
+
+ /** Equivalent to `x.hashCode` except for boxed numeric types and `null`.
+ * For numerics, it returns a hash value which is consistent
+ * with value equality: if two value type instances compare
+ * as true, then ## will produce the same hash value for each
+ * of them.
+ * For `null` returns a hashcode where `null.hashCode` throws a
+ * `NullPointerException`.
+ *
+ * @return a hash value consistent with ==
+ */
+ final def ##(): Int = sys.error("##")
+
+ /** Test whether the dynamic type of the receiver object is `T0`.
+ *
+ * Note that the result of the test is modulo Scala's erasure semantics.
+ * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the
+ * expression `List(1).isInstanceOf[List[String]]` will return `true`.
+ * In the latter example, because the type argument is erased as part of compilation it is
+ * not possible to check whether the contents of the list are of the specified type.
+ *
+ * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise.
+ */
+ def isInstanceOf[T0]: Boolean = sys.error("isInstanceOf")
+
+ /** Cast the receiver object to be of type `T0`.
+ *
+ * Note that the success of a cast at runtime is modulo Scala's erasure semantics.
+ * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at
+ * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not.
+ * In the latter example, because the type argument is erased as part of compilation it is
+ * not possible to check whether the contents of the list are of the requested type.
+ *
+ * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`.
+ * @return the receiver object.
+ */
+ def asInstanceOf[T0]: T0 = sys.error("asInstanceOf")
+}
diff --git a/test/scaladoc/resources/doc-root/AnyRef.scala b/test/scaladoc/resources/doc-root/AnyRef.scala
new file mode 100644
index 0000000000..1eefb0c806
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/AnyRef.scala
@@ -0,0 +1,131 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** Class `AnyRef` is the root class of all ''reference types''.
+ * All types except the value types descend from this class.
+ */
+trait AnyRef extends Any {
+
+ /** The equality method for reference types. Default implementation delegates to `eq`.
+ *
+ * See also `equals` in [[scala.Any]].
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ def equals(that: Any): Boolean = this eq that
+
+ /** The hashCode method for reference types. See hashCode in [[scala.Any]].
+ *
+ * @return the hash code value for this object.
+ */
+ def hashCode: Int = sys.error("hashCode")
+
+ /** Creates a String representation of this object. The default
+ * representation is platform dependent. On the java platform it
+ * is the concatenation of the class name, "@", and the object's
+ * hashcode in hexadecimal.
+ *
+ * @return a String representation of the object.
+ */
+ def toString: String = sys.error("toString")
+
+ /** Executes the code in `body` with an exclusive lock on `this`.
+ *
+ * @param body the code to execute
+ * @return the result of `body`
+ */
+ def synchronized[T](body: => T): T
+
+ /** Tests whether the argument (`arg0`) is a reference to the receiver object (`this`).
+ *
+ * The `eq` method implements an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on
+ * non-null instances of `AnyRef`, and has three additional properties:
+ *
+ * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of
+ * `x.eq(y)` consistently returns `true` or consistently returns `false`.
+ * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`.
+ * - `null.eq(null)` returns `true`.
+ *
+ * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is
+ * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they
+ * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`).
+ *
+ * @param that the object to compare against this object for reference equality.
+ * @return `true` if the argument is a reference to the receiver object; `false` otherwise.
+ */
+ final def eq(that: AnyRef): Boolean = sys.error("eq")
+
+ /** Equivalent to `!(this eq that)`.
+ *
+ * @param that the object to compare against this object for reference equality.
+ * @return `true` if the argument is not a reference to the receiver object; `false` otherwise.
+ */
+ final def ne(that: AnyRef): Boolean = !(this eq that)
+
+ /** The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`.
+ *
+ * @param arg0 the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ final def ==(that: AnyRef): Boolean =
+ if (this eq null) that eq null
+ else this equals that
+
+ /** Create a copy of the receiver object.
+ *
+ * The default implementation of the `clone` method is platform dependent.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ * @return a copy of the receiver object.
+ */
+ protected def clone(): AnyRef
+
+ /** Called by the garbage collector on the receiver object when there
+ * are no more references to the object.
+ *
+ * The details of when and if the `finalize` method is invoked, as
+ * well as the interaction between `finalize` and non-local returns
+ * and exceptions, are all platform dependent.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ protected def finalize(): Unit
+
+ /** A representation that corresponds to the dynamic class of the receiver object.
+ *
+ * The nature of the representation is platform dependent.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ * @return a representation that corresponds to the dynamic class of the receiver object.
+ */
+ def getClass(): Class[_]
+
+ /** Wakes up a single thread that is waiting on the receiver object's monitor.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ def notify(): Unit
+
+ /** Wakes up all threads that are waiting on the receiver object's monitor.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ def notifyAll(): Unit
+
+ /** Causes the current Thread to wait until another Thread invokes
+ * the notify() or notifyAll() methods.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ def wait (): Unit
+ def wait (timeout: Long, nanos: Int): Unit
+ def wait (timeout: Long): Unit
+}
diff --git a/test/scaladoc/resources/doc-root/Nothing.scala b/test/scaladoc/resources/doc-root/Nothing.scala
new file mode 100644
index 0000000000..eed6066039
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/Nothing.scala
@@ -0,0 +1,23 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy.
+ *
+ * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist
+ * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is
+ * nevertheless useful in several ways. For instance, the Scala library defines a value
+ * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala,
+ * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`.
+ *
+ * Another usage for Nothing is the return type for methods which never return normally.
+ * One example is method error in [[scala.sys]], which always throws an exception.
+ */
+sealed trait Nothing
+
diff --git a/test/scaladoc/resources/doc-root/Null.scala b/test/scaladoc/resources/doc-root/Null.scala
new file mode 100644
index 0000000000..7455e78ae7
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/Null.scala
@@ -0,0 +1,17 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy.
+ *
+ * `Null` is a subtype of all reference types; its only instance is the `null` reference.
+ * Since `Null` is not a subtype of value types, `null` is not a member of any such type. For instance,
+ * it is not possible to assign `null` to a variable of type [[scala.Int]].
+ */
+sealed trait Null
diff --git a/test/scaladoc/resources/implicits-ambiguating-res.scala b/test/scaladoc/resources/implicits-ambiguating-res.scala
new file mode 100644
index 0000000000..6ed51366cb
--- /dev/null
+++ b/test/scaladoc/resources/implicits-ambiguating-res.scala
@@ -0,0 +1,72 @@
+/**
+ * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the
+ * class' own members
+ *
+ * {{{
+ * scala> class A { def foo(t: String) = 4 }
+ * defined class A
+ *
+ * scala> class B { def foo(t: Any) = 5 }
+ * defined class B
+ *
+ * scala> implicit def AtoB(a:A) = new B
+ * AtoB: (a: A)B
+ *
+ * scala> val a = new A
+ * a: A = A@28f553e3
+ *
+ * scala> a.foo("T")
+ * res1: Int = 4
+ *
+ * scala> a.foo(4)
+ * res2: Int = 5
+ * }}}
+ */
+package scala.test.scaladoc.implicits.ambiguating
+import language.implicitConversions // according to SIP18
+
+/** - conv1-5 should be ambiguous
+ * - conv6-7 should not be ambiguous
+ * - conv8 should be ambiguous
+ * - conv9 should be ambiguous
+ * - conv10 and conv11 should not be ambiguous */
+class A[T]
+/** conv1-9 should be the same, conv10 should be ambiguous, conv11 should be okay */
+class B extends A[Int]
+/** conv1-9 should be the same, conv10 and conv11 should not be ambiguous */
+class C extends A[Double]
+ /** conv1-9 should be the same, conv10 should not be ambiguous while conv11 should be ambiguous */
+class D extends A[AnyRef]
+
+class X[T] {
+ def conv1: AnyRef = ???
+ def conv2: T = ???
+ def conv3(l: Int): AnyRef = ???
+ def conv4(l: AnyRef): AnyRef = ???
+ def conv5(l: AnyRef): String = ???
+ def conv6(l: String)(m: String): AnyRef = ???
+ def conv7(l: AnyRef)(m: AnyRef): AnyRef = ???
+ def conv8(l: AnyRef): AnyRef = ???
+ def conv9(l: String): AnyRef = ???
+ def conv10(l: T): T = ???
+ def conv11(l: T): T = ???
+}
+
+class Z[T] {
+ def conv1: AnyRef = ???
+ def conv2: T = ???
+ def conv3(p: Int): AnyRef = ???
+ def conv4(p: AnyRef): String = ???
+ def conv5(p: AnyRef): AnyRef = ???
+ def conv6(p: String, q: String): AnyRef = ???
+ def conv7(p: AnyRef, q: AnyRef): AnyRef = ???
+ def conv8(p: String): AnyRef = ???
+ def conv9(p: AnyRef): AnyRef = ???
+ def conv10(p: Int): T = ???
+ def conv11(p: String): T = ???
+}
+
+object A {
+ implicit def AtoX[T](a: A[T]) = new X[T]
+ implicit def AtoZ[T](a: A[T]) = new Z[T]
+}
diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala
index 65d7bdf67c..d6c0332c10 100644
--- a/test/scaladoc/resources/implicits-base-res.scala
+++ b/test/scaladoc/resources/implicits-base-res.scala
@@ -16,8 +16,9 @@ trait MyNumeric[R]
* def convToManifestA(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
* def convToMyNumericA(x: T) // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
* def convToNumericA(x: T) // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
- * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints
- * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
+ * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
+ * def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
* def convToTraversableOps(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
* // should not be abstract!
* }}}
@@ -52,9 +53,10 @@ object A {
* def convToManifestA(x: Double) // pimpA7: no constraints
* def convToMyNumericA(x: Double) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
* def convToNumericA(x: Double) // pimpA1: no constraintsd
- * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints
+ * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
* def convToTraversableOps(x: Double) // pimpA7: no constraints
- * // should not be abstract!
+ * // should not be abstract!
* }}}
*/
class B extends A[Double]
@@ -68,7 +70,8 @@ object B extends A
* def convToIntA(x: Int) // pimpA2: no constraints
* def convToMyNumericA(x: Int) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
* def convToNumericA(x: Int) // pimpA1: no constraints
- * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints
+ * def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
+ * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints, SHADOWED
* }}}
*/
class C extends A[Int]
@@ -81,7 +84,8 @@ object C extends A
* {{{
* def convToMyNumericA(x: String) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
* def convToNumericA(x: String) // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
- * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints
+ * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
* }}}
*/
class D extends A[String]
diff --git a/test/scaladoc/resources/implicits-elimination-res.scala b/test/scaladoc/resources/implicits-elimination-res.scala
index b23667440c..5f7135c9e8 100644
--- a/test/scaladoc/resources/implicits-elimination-res.scala
+++ b/test/scaladoc/resources/implicits-elimination-res.scala
@@ -2,13 +2,13 @@
* Testing scaladoc implicits elimination
*/
package scala.test.scaladoc.implicits.elimination {
-
+
import language.implicitConversions // according to SIP18
/** No conversion, as B doesn't bring any member */
class A
class B { class C; trait V; type T; }
- object A {
- implicit def toB(a: A): B = null
+ object A {
+ implicit def toB(a: A): B = null
}
}
diff --git a/test/scaladoc/run/SI-5373.scala b/test/scaladoc/run/SI-5373.scala
index 0062abbb2a..65cf8baff5 100644
--- a/test/scaladoc/run/SI-5373.scala
+++ b/test/scaladoc/run/SI-5373.scala
@@ -12,12 +12,12 @@ object Test extends ScaladocModelTest {
def foo = ()
}
- trait B {
+ trait B extends A {
@bridge()
def foo = ()
}
- class C extends A with B
+ class C extends B
}
"""
diff --git a/test/scaladoc/run/implicits-elimination.check b/test/scaladoc/run/SI-5780.check
index 619c56180b..619c56180b 100644
--- a/test/scaladoc/run/implicits-elimination.check
+++ b/test/scaladoc/run/SI-5780.check
diff --git a/test/scaladoc/run/SI-5780.scala b/test/scaladoc/run/SI-5780.scala
new file mode 100644
index 0000000000..809567faec
--- /dev/null
+++ b/test/scaladoc/run/SI-5780.scala
@@ -0,0 +1,25 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.SI5780
+
+ object `package` { def foo: AnyRef = "hello"; class T /* so the package is not dropped */ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-doc-root-content " + resourcePath + "/doc-root"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val foo = rootPackage._package("scala")._package("test")._package("scaladoc")._package("SI5780")._method("foo")
+ // check that AnyRef is properly linked to its template:
+ assert(foo.resultType.name == "AnyRef", foo.resultType.name + " == AnyRef")
+ assert(foo.resultType.refEntity.size == 1, foo.resultType.refEntity + ".size == 1")
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-base.check b/test/scaladoc/run/diagrams-base.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/diagrams-base.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-base.scala b/test/scaladoc/run/diagrams-base.scala
new file mode 100644
index 0000000000..38bed06502
--- /dev/null
+++ b/test/scaladoc/run/diagrams-base.scala
@@ -0,0 +1,73 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.diagrams
+
+ import language.implicitConversions
+
+ trait A
+ trait B
+ trait C
+ class E extends A with B with C
+ object E { implicit def eToT(e: E) = new T }
+
+ class F extends E
+ class G extends E
+ private class H extends E /* since it's private, it won't go into the diagram */
+ class T { def t = true }
+
+ class X
+ object X { implicit def xToE(x: X) = new E}
+ class Y extends X
+ class Z
+ object Z { implicit def zToE(z: Z) = new E}
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams -implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ val E = base._class("E")
+ val diag = E.inheritanceDiagram.get
+
+ // there must be a single this node
+ assert(diag.nodes.filter(_.isThisNode).length == 1)
+
+ // 1. check class E diagram
+ assert(diag.isClassDiagram)
+
+ val (incoming, outgoing) = diag.edges.partition(!_._1.isThisNode)
+ assert(incoming.length == 5)
+ assert(outgoing.head._2.length == 4)
+
+ val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
+ assert(outgoingSuperclass.length == 3)
+ assert(outgoingImplicit.length == 1)
+
+ val (incomingSubclass, incomingImplicit) = incoming.partition(_._1.isNormalNode)
+ assert(incomingSubclass.length == 2)
+ assert(incomingImplicit.length == 3)
+
+ val classDiag = diag.asInstanceOf[ClassDiagram]
+ assert(classDiag.incomingImplicits.length == 3)
+ assert(classDiag.outgoingImplicits.length == 1)
+
+ // 2. check package diagram
+ // NOTE: Z should be eliminated because it's isolated
+ val packDiag = base.contentDiagram.get
+ assert(packDiag.isPackageDiagram)
+ assert(packDiag.nodes.length == 8) // check singular object removal
+ assert(packDiag.edges.length == 4)
+ assert(packDiag.edges.foldLeft(0)(_ + _._2.length) == 6)
+
+ // TODO: Should check numbering
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-determinism.check b/test/scaladoc/run/diagrams-determinism.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/diagrams-determinism.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-determinism.scala b/test/scaladoc/run/diagrams-determinism.scala
new file mode 100644
index 0000000000..6c8db05d78
--- /dev/null
+++ b/test/scaladoc/run/diagrams-determinism.scala
@@ -0,0 +1,67 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.diagrams
+
+ trait A
+ trait B extends A
+ trait C extends B
+ trait D extends C with A
+ trait E extends C with A with D
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams -implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ def diagramString(rootPackage: Package) = {
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ val A = base._trait("A")
+ val B = base._trait("B")
+ val C = base._trait("C")
+ val D = base._trait("D")
+ val E = base._trait("E")
+
+ base.contentDiagram.get.toString + "\n" +
+ A.inheritanceDiagram.get.toString + "\n" +
+ B.inheritanceDiagram.get.toString + "\n" +
+ C.inheritanceDiagram.get.toString + "\n" +
+ D.inheritanceDiagram.get.toString + "\n" +
+ E.inheritanceDiagram.get.toString
+ }
+
+ // 1. check that several runs produce the same output
+ val run0 = diagramString(rootPackage)
+ val run1 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
+ val run2 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
+ val run3 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
+
+ // any variance in the order of the diagram elements should crash the following tests:
+ assert(run0 == run1)
+ assert(run1 == run2)
+ assert(run2 == run3)
+
+ // 2. check the order in the diagram: this node, subclasses, and then implicit conversions
+ def assertRightOrder(diagram: Diagram) = {
+ for ((node, subclasses) <- diagram.edges)
+ assert(subclasses == subclasses.filter(_.isThisNode) :::
+ subclasses.filter(_.isNormalNode) :::
+ subclasses.filter(_.isImplicitNode))
+ }
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ assertRightOrder(base.contentDiagram.get)
+ assertRightOrder(base._trait("A").inheritanceDiagram.get)
+ assertRightOrder(base._trait("B").inheritanceDiagram.get)
+ assertRightOrder(base._trait("C").inheritanceDiagram.get)
+ assertRightOrder(base._trait("D").inheritanceDiagram.get)
+ assertRightOrder(base._trait("E").inheritanceDiagram.get)
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-filtering.check b/test/scaladoc/run/diagrams-filtering.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/diagrams-filtering.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-filtering.scala b/test/scaladoc/run/diagrams-filtering.scala
new file mode 100644
index 0000000000..8cb32180a1
--- /dev/null
+++ b/test/scaladoc/run/diagrams-filtering.scala
@@ -0,0 +1,93 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc
+
+ /** @contentDiagram hideNodes "scala.test.*.A" "java.*", hideEdges ("*G" -> "*E") */
+ package object diagrams {
+ def foo = 4
+ }
+
+ package diagrams {
+ import language.implicitConversions
+
+ /** @inheritanceDiagram hideIncomingImplicits, hideNodes "*E" */
+ trait A
+ trait AA extends A
+ trait B
+ trait AAA extends B
+
+ /** @inheritanceDiagram hideDiagram */
+ trait C
+ trait AAAA extends C
+
+ /** @inheritanceDiagram hideEdges("*E" -> "*A") */
+ class E extends A with B with C
+ class F extends E
+ /** @inheritanceDiagram hideNodes "*G" "G" */
+ class G extends E
+ private class H extends E /* since it's private, it won't go into the diagram */
+ class T { def t = true }
+ object E {
+ implicit def eToT(e: E) = new T
+ implicit def eToA(e: E) = new A { }
+ }
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams -implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // base package
+ // Assert we have 7 nodes and 6 edges
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ val packDiag = base.contentDiagram.get
+ assert(packDiag.nodes.length == 6)
+ assert(packDiag.edges.map(_._2.length).sum == 5)
+
+ // trait A
+ // Assert we have just 3 nodes and 2 edges
+ val A = base._trait("A")
+ val ADiag = A.inheritanceDiagram.get
+ assert(ADiag.nodes.length == 3)
+ assert(ADiag.edges.map(_._2.length).sum == 2)
+
+ // trait C
+ val C = base._trait("C")
+ assert(!C.inheritanceDiagram.isDefined)
+
+ // trait G
+ val G = base._trait("G")
+ assert(!G.inheritanceDiagram.isDefined)
+
+ // trait E
+ val E = base._class("E")
+ val EDiag = E.inheritanceDiagram.get
+
+ // there must be a single this node
+ assert(EDiag.nodes.filter(_.isThisNode).length == 1)
+
+ // 1. check class E diagram
+ val (incoming, outgoing) = EDiag.edges.partition(!_._1.isThisNode)
+ assert(incoming.length == 2) // F and G
+ assert(outgoing.head._2.length == 3) // B, C and T
+
+ val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
+ assert(outgoingSuperclass.length == 2) // B and C
+ assert(outgoingImplicit.length == 1, outgoingImplicit) // T
+
+ val (incomingSubclass, incomingImplicit) = incoming.partition(_._1.isNormalNode)
+ assert(incomingSubclass.length == 2) // F and G
+ assert(incomingImplicit.length == 0)
+
+ assert(EDiag.nodes.length == 6) // E, B and C, F and G and the implicit conversion to T
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-inherited-nodes.check b/test/scaladoc/run/diagrams-inherited-nodes.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/diagrams-inherited-nodes.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-inherited-nodes.scala b/test/scaladoc/run/diagrams-inherited-nodes.scala
new file mode 100644
index 0000000000..8ac382aab8
--- /dev/null
+++ b/test/scaladoc/run/diagrams-inherited-nodes.scala
@@ -0,0 +1,69 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.diagrams.inherited.nodes {
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T1 {
+ trait A1
+ trait A2 extends A1
+ trait A3 extends A2
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T2 extends T1 {
+ trait B1 extends A1
+ trait B2 extends A2 with B1
+ trait B3 extends A3 with B2
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T3 {
+ self: T1 with T2 =>
+ trait C1 extends B1
+ trait C2 extends B2 with C1
+ trait C3 extends B3 with C2
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T4 extends T3 with T2 with T1 {
+ trait D1 extends C1
+ trait D2 extends C2 with D1
+ trait D3 extends C3 with D2
+ }
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // base package
+ // Assert we have 7 nodes and 6 edges
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")._package("inherited")._package("nodes")
+
+ def checkDiagram(t: String, nodes: Int, edges: Int) = {
+ // trait T1
+ val T = base._trait(t)
+ val TDiag = T.contentDiagram.get
+ assert(TDiag.nodes.length == nodes, t + ": " + TDiag.nodes + ".length == " + nodes)
+ assert(TDiag.edges.map(_._2.length).sum == edges, t + ": " + TDiag.edges.mkString("List(\n", ",\n", "\n)") + ".map(_._2.length).sum == " + edges)
+ }
+
+ checkDiagram("T1", 3, 2)
+ checkDiagram("T2", 6, 7)
+ checkDiagram("T3", 3, 2)
+ checkDiagram("T4", 12, 17)
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-ambiguating.check b/test/scaladoc/run/implicits-ambiguating.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/implicits-ambiguating.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-ambiguating.scala b/test/scaladoc/run/implicits-ambiguating.scala
new file mode 100644
index 0000000000..1420593b74
--- /dev/null
+++ b/test/scaladoc/run/implicits-ambiguating.scala
@@ -0,0 +1,114 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-ambiguating-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ def isAmbiguous(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isAmbiguous).getOrElse(false)).getOrElse(false)
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._object("ambiguating")
+ var conv1: ImplicitConversion = null
+ var conv2: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ conv1 = A._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = A._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** - conv1-5 should be ambiguous
+ * - conv6-7 should not be ambiguous
+ * - conv8 should be ambiguous
+ * - conv9 should be ambiguous
+ * - conv10 and conv11 should not be ambiguous */
+ def check1to9(cls: String): Unit = {
+ for (conv <- (1 to 5).map("conv" + _)) {
+ assert(isAmbiguous(conv1._member(conv)), cls + " - AtoX." + conv + " is ambiguous")
+ assert(isAmbiguous(conv2._member(conv)), cls + " - AtoZ." + conv + " is ambiguous")
+ }
+ for (conv <- (6 to 7).map("conv" + _)) {
+ assert(!isAmbiguous(conv1._member(conv)), cls + " - AtoX." + conv + " is not ambiguous")
+ assert(!isAmbiguous(conv2._member(conv)), cls + " - AtoZ." + conv + " is not ambiguous")
+ }
+ assert(isAmbiguous(conv1._member("conv8")), cls + " - AtoX.conv8 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv8")), cls + " - AtoZ.conv8 is ambiguous")
+ assert(isAmbiguous(conv1._member("conv9")), cls + " - AtoX.conv9 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv9")), cls + " - AtoZ.conv9 is ambiguous")
+ }
+ check1to9("A")
+ assert(!isAmbiguous(conv1._member("conv10")), "A - AtoX.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv10")), "A - AtoZ.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv1._member("conv11")), "A - AtoX.conv11 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv11")), "A - AtoZ.conv11 is not ambiguous")
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ conv1 = B._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = B._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** conv1-9 should be the same, conv10 should be ambiguous, conv11 should be okay */
+ check1to9("B")
+ assert(isAmbiguous(conv1._member("conv10")), "B - AtoX.conv10 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv10")), "B - AtoZ.conv10 is ambiguous")
+ assert(!isAmbiguous(conv1._member("conv11")), "B - AtoX.conv11 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv11")), "B - AtoZ.conv11 is not ambiguous")
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ conv1 = C._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = C._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** conv1-9 should be the same, conv10 and conv11 should not be ambiguous */
+ check1to9("C")
+ assert(!isAmbiguous(conv1._member("conv10")), "C - AtoX.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv10")), "C - AtoZ.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv1._member("conv11")), "C - AtoX.conv11 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv11")), "C - AtoZ.conv11 is not ambiguous")
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ conv1 = D._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = D._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** conv1-9 should be the same, conv10 should not be ambiguous while conv11 should be ambiguous */
+ check1to9("D")
+ assert(!isAmbiguous(conv1._member("conv10")), "D - AtoX.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv10")), "D - AtoZ.conv10 is not ambiguous")
+ assert(isAmbiguous(conv1._member("conv11")), "D - AtoX.conv11 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv11")), "D - AtoZ.conv11 is ambiguous")
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala
index 06d017ed70..3d57306f5d 100644
--- a/test/scaladoc/run/implicits-base.scala
+++ b/test/scaladoc/run/implicits-base.scala
@@ -14,6 +14,9 @@ object Test extends ScaladocModelTest {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
+ def isShadowed(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isShadowed).getOrElse(false)).getOrElse(false)
+
// SEE THE test/resources/implicits-base-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("base")
var conv: ImplicitConversion = null
@@ -22,8 +25,12 @@ object Test extends ScaladocModelTest {
val A = base._class("A")
- // the method pimped on by pimpA0 should be shadowed by the method in class A
- assert(A._conversions(A.qualifiedName + ".pimpA0").isEmpty)
+ // def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
+ conv = A._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "T")
// def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
conv = A._conversion(A.qualifiedName + ".pimpA1")
@@ -53,6 +60,7 @@ object Test extends ScaladocModelTest {
conv = A._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]")
// def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
@@ -76,10 +84,16 @@ object Test extends ScaladocModelTest {
val B = base._class("B")
// these conversions should not affect B
- assert(B._conversions(A.qualifiedName + ".pimpA0").isEmpty)
assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty)
assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+ // def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
+ conv = B._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Double")
+
// def convToNumericA: Double // pimpA1: no constraintsd
conv = B._conversion(A.qualifiedName + ".pimpA1")
assert(conv.members.length == 1)
@@ -96,6 +110,7 @@ object Test extends ScaladocModelTest {
conv = B._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]")
// def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
@@ -119,11 +134,17 @@ object Test extends ScaladocModelTest {
val C = base._class("C")
// these conversions should not affect C
- assert(C._conversions(A.qualifiedName + ".pimpA0").isEmpty)
assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty)
assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty)
assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+ // def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
+ conv = C._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Int")
+
// def convToNumericA: Int // pimpA1: no constraints
conv = C._conversion(A.qualifiedName + ".pimpA1")
assert(conv.members.length == 1)
@@ -140,6 +161,7 @@ object Test extends ScaladocModelTest {
conv = C._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]")
// def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
@@ -153,12 +175,18 @@ object Test extends ScaladocModelTest {
val D = base._class("D")
// these conversions should not affect D
- assert(D._conversions(A.qualifiedName + ".pimpA0").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+ // def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
+ conv = D._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "String")
+
// def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
conv = D._conversion(A.qualifiedName + ".pimpA1")
assert(conv.members.length == 1)
@@ -169,6 +197,7 @@ object Test extends ScaladocModelTest {
conv = D._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]")
// def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
diff --git a/test/scaladoc/run/implicits-elimination.scala b/test/scaladoc/run/implicits-elimination.scala
deleted file mode 100644
index ed37b9cd90..0000000000
--- a/test/scaladoc/run/implicits-elimination.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.partest.ScaladocModelTest
-import language._
-
-object Test extends ScaladocModelTest {
-
- // test a file instead of a piece of code
- override def resourceFile = "implicits-elimination-res.scala"
-
- // start implicits
- def scaladocSettings = "-implicits"
-
- def testModel(root: Package) = {
- // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
- import access._
-
- // SEE THE test/resources/implicits-elimination-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
- val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("elimination")
- val A = base._class("A")
-
- assert(A._conversions(A.qualifiedName + ".toB").isEmpty)
- }
-}
diff --git a/test/scaladoc/run/implicits-shadowing.scala b/test/scaladoc/run/implicits-shadowing.scala
index 7835223d21..2827d31122 100644
--- a/test/scaladoc/run/implicits-shadowing.scala
+++ b/test/scaladoc/run/implicits-shadowing.scala
@@ -13,6 +13,9 @@ object Test extends ScaladocModelTest {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
+ def isShadowed(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isShadowed).getOrElse(false)).getOrElse(false)
+
// SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._object("shadowing")
var conv: ImplicitConversion = null
@@ -22,12 +25,8 @@ object Test extends ScaladocModelTest {
val A = base._class("A")
conv = A._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 5)
- conv._member("conv5")
- conv._member("conv8")
- conv._member("conv9")
- conv._member("conv10")
- conv._member("conv11")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
assert(conv.constraints.length == 0)
//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -35,11 +34,8 @@ object Test extends ScaladocModelTest {
val B = base._class("B")
conv = B._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 4)
- conv._member("conv5")
- conv._member("conv8")
- conv._member("conv9")
- conv._member("conv11")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
assert(conv.constraints.length == 0)
//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -47,12 +43,8 @@ object Test extends ScaladocModelTest {
val C = base._class("C")
conv = C._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 5)
- conv._member("conv5")
- conv._member("conv8")
- conv._member("conv9")
- conv._member("conv10")
- conv._member("conv11")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
assert(conv.constraints.length == 0)
//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -60,11 +52,8 @@ object Test extends ScaladocModelTest {
val D = base._class("D")
conv = D._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 4)
- conv._member("conv5")
- conv._member("conv8")
- conv._member("conv9")
- conv._member("conv10")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
assert(conv.constraints.length == 0)
}
-} \ No newline at end of file
+}
diff --git a/test/scaladoc/run/implicits-var-exp.check b/test/scaladoc/run/implicits-var-exp.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/implicits-var-exp.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-var-exp.scala b/test/scaladoc/run/implicits-var-exp.scala
new file mode 100644
index 0000000000..16569fe3c2
--- /dev/null
+++ b/test/scaladoc/run/implicits-var-exp.scala
@@ -0,0 +1,43 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.variable.expansion {
+ /**
+ * Blah blah blah
+ */
+ class A
+
+ object A {
+ import language.implicitConversions
+ implicit def aToB(a: A) = new B
+ }
+
+ /**
+ * @define coll collection
+ */
+ class B {
+ /**
+ * foo returns a $coll
+ */
+ def foo: Nothing = ???
+ }
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("variable")._package("expansion")
+ val foo = base._class("A")._method("foo")
+
+ assert(foo.comment.get.body.toString.contains("foo returns a collection"), "\"" + foo.comment.get.body.toString + "\".contains(\"foo returns a collection\")")
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/package-object.check b/test/scaladoc/run/package-object.check
index 4297847e73..01dbcc682f 100644
--- a/test/scaladoc/run/package-object.check
+++ b/test/scaladoc/run/package-object.check
@@ -1,2 +1,3 @@
-List((test.B,B), (test.A,A), (scala.AnyRef,AnyRef), (scala.Any,Any))
+List(test.B, test.A, scala.AnyRef, scala.Any)
+List(B, A, AnyRef, Any)
Done.
diff --git a/test/scaladoc/run/package-object.scala b/test/scaladoc/run/package-object.scala
index fd36a8df7b..5fb5a4ddf1 100644
--- a/test/scaladoc/run/package-object.scala
+++ b/test/scaladoc/run/package-object.scala
@@ -9,7 +9,8 @@ object Test extends ScaladocModelTest {
import access._
val p = root._package("test")
- println(p.linearization)
+ println(p.linearizationTemplates)
+ println(p.linearizationTypes)
}
}
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index 68ca68efdd..b576ba5544 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -5,10 +5,12 @@ import scala.tools.nsc.Global
import scala.tools.nsc.doc
import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.comment._
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
class Factory(val g: Global, val s: doc.Settings)
extends doc.model.ModelFactory(g, s) {
- thisFactory: Factory with ModelFactoryImplicitSupport with CommentFactory with doc.model.TreeFactory =>
+ thisFactory: Factory with ModelFactoryImplicitSupport with DiagramFactory with CommentFactory with doc.model.TreeFactory =>
def strip(c: Comment): Option[Inline] = {
c.body match {
@@ -29,7 +31,7 @@ object Test extends Properties("CommentFactory") {
val settings = new doc.Settings((str: String) => {})
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
val g = new Global(settings, reporter)
- (new Factory(g, settings) with ModelFactoryImplicitSupport with CommentFactory with doc.model.TreeFactory)
+ (new Factory(g, settings) with ModelFactoryImplicitSupport with DiagramFactory with CommentFactory with doc.model.TreeFactory)
}
def parse(src: String, dst: Inline) = {
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index a22747520c..64f62a103d 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -75,24 +75,21 @@ pushJarFile() {
local jar_dir=$(dirname $jar)
local jar_name=${jar#$jar_dir/}
pushd $jar_dir >/dev/null
- local jar_sha1=$(shasum -p $jar_name)
- local version=${jar_sha1% ?$jar_name}
+ local version=$(makeJarSha $jar_name)
local remote_uri=${version}${jar#$basedir}
echo " Pushing to ${remote_urlbase}/${remote_uri} ..."
echo " $curl"
curlUpload $remote_uri $jar_name $user $pw
echo " Making new sha1 file ...."
- echo "$jar_sha1" > "${jar_name}${desired_ext}"
+ echo "$version ?$jar_name" > "${jar_name}${desired_ext}"
popd >/dev/null
# TODO - Git remove jar and git add jar.desired.sha1
# rm $jar
}
-getJarSha() {
+makeJarSha() {
local jar=$1
- if [[ ! -f "$jar" ]]; then
- echo ""
- elif which sha1sum 2>/dev/null >/dev/null; then
+ if which sha1sum 2>/dev/null >/dev/null; then
shastring=$(sha1sum "$jar")
echo "$shastring" | sed 's/ .*//'
elif which shasum 2>/dev/null >/dev/null; then
@@ -104,6 +101,15 @@ getJarSha() {
fi
}
+getJarSha() {
+ local jar=$1
+ if [[ ! -f "$jar" ]]; then
+ echo ""
+ else
+ echo $(makeJarSha $jar)
+ fi
+}
+
# Tests whether or not the .desired.sha1 hash matches a given file.
# Arugment 1 - The jar file to test validity.
# Returns: Empty string on failure, "OK" on success.
diff --git a/tools/new-starr b/tools/new-starr
new file mode 100755
index 0000000000..5f00cc758e
--- /dev/null
+++ b/tools/new-starr
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+ant -Dscalac.args.optimise="-optimise" locker.done
+cp -R src/starr/* src/
+ant build-opt
+ant starr.done
diff --git a/tools/scaladoc-compare b/tools/scaladoc-compare
new file mode 100755
index 0000000000..74fbfd1dd4
--- /dev/null
+++ b/tools/scaladoc-compare
@@ -0,0 +1,50 @@
+#!/bin/bash
+#
+# Script to compare scaladoc raw files. For an explanation read the next echos.
+#
+
+if [ $# -ne 2 ]
+then
+ echo
+ echo "scaladoc-compare will compare the scaladoc-generated pages in two different locations and output the diff"
+ echo "it's main purpose is to track changes to scaladoc and prevent updates that break things."
+ echo
+ echo "This script is meant to be used with the scaladoc -raw-output option, as it compares .html.raw files "
+ echo "instead of markup-heavy .html files."
+ echo
+ echo "Script usage $0 <new api files path> <old api files path>"
+ echo " eg: $0 build/scaladoc/library build/scaladoc-prev/library | less"
+ echo
+ exit 1
+fi
+
+NEW_PATH=$1
+OLD_PATH=$2
+
+FILES=`find $NEW_PATH -name '*.html.raw'`
+if [ "$FILES" == "" ]
+then
+ echo "No .html.raw files found in $NEW_PATH!"
+ exit 1
+fi
+
+for NEW_FILE in $FILES
+do
+ OLD_FILE=${NEW_FILE/$NEW_PATH/$OLD_PATH}
+ if [ -f $OLD_FILE ]
+ then
+ #echo $NEW_FILE" => "$OLD_FILE
+ DIFF=`diff -q -w $NEW_FILE $OLD_FILE 2>&1`
+ if [ "$DIFF" != "" ]
+ then
+ # Redo the full diff
+ echo "$NEW_FILE:"
+ diff -w $NEW_FILE $OLD_FILE 2>&1
+ echo -e "\n\n"
+ fi
+ else
+ echo -e "$NEW_FILE: No corresponding file (expecting $OLD_FILE)\n\n"
+ fi
+done
+
+echo Done.