summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAleksandar Prokopec <axel22@gmail.com>2012-06-28 19:48:31 +0200
committerAleksandar Prokopec <axel22@gmail.com>2012-06-28 19:48:31 +0200
commitb379ff4f59c139ff7d2b77e7e808f0b74aa9f268 (patch)
treed531663cc08c71cc2631c119403af3032c71b543
parent2f0d94c02ab328a3f8da25b5ab8f402a68143af3 (diff)
parent6f08c06a35a0b70c49e23a296b13ac391a460584 (diff)
downloadscala-b379ff4f59c139ff7d2b77e7e808f0b74aa9f268.tar.gz
scala-b379ff4f59c139ff7d2b77e7e808f0b74aa9f268.tar.bz2
scala-b379ff4f59c139ff7d2b77e7e808f0b74aa9f268.zip
Merge branch 'master' into issue/5846,4597,4027,4112
Conflicts: src/library/scala/collection/MapLike.scala src/library/scala/collection/SortedMapLike.scala
-rw-r--r--META-INF/MANIFEST.MF4
-rw-r--r--classpath.SAMPLE12
-rw-r--r--project/Build.scala280
-rw-r--r--project/Packaging.scala129
-rw-r--r--project/Release.scala27
-rw-r--r--project/RemoteDependencies.scala53
-rw-r--r--project/Sametest.scala3
-rw-r--r--project/ScalaBuildKeys.scala35
-rw-r--r--project/ScalaToolRunner.scala21
-rw-r--r--project/ShaResolve.scala70
-rw-r--r--project/Testing.scala41
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala5
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala14
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala39
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala (renamed from src/compiler/scala/tools/nsc/ast/TreePrinters.scala)6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala172
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala26
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala58
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala16
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/Page.scala14
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala153
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala53
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala58
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala464
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala227
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.pngbin3910 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css135
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js324
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.pngbin3903 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js10
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.pngbin3882 -> 0 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala96
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala109
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala134
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Body.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala140
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala143
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala248
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala197
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala12
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala17
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala118
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala232
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala106
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala270
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala200
-rw-r--r--src/compiler/scala/tools/nsc/util/StatisticsInfo.scala38
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala4
-rw-r--r--src/eclipse/README23
-rw-r--r--src/eclipse/asm/.classpath6
-rw-r--r--src/eclipse/asm/.project29
-rw-r--r--src/eclipse/fjbg/.classpath7
-rw-r--r--src/eclipse/fjbg/.project (renamed from project.SAMPLE)14
-rw-r--r--src/eclipse/reflect/.classpath7
-rw-r--r--src/eclipse/reflect/.project30
-rw-r--r--src/eclipse/scala-compiler/.classpath14
-rw-r--r--src/eclipse/scala-compiler/.project35
-rw-r--r--src/eclipse/scala-library/.classpath7
-rw-r--r--src/eclipse/scala-library/.project30
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala19
-rw-r--r--src/library/scala/collection/IterableViewLike.scala2
-rw-r--r--src/library/scala/collection/Iterator.scala3
-rw-r--r--src/library/scala/collection/SeqViewLike.scala2
-rw-r--r--src/library/scala/collection/SortedMapLike.scala8
-rw-r--r--src/library/scala/collection/TraversableLike.scala7
-rw-r--r--src/library/scala/collection/TraversableOnce.scala17
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala6
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala289
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala318
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala33
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala21
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala8
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala1
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala3
-rw-r--r--src/library/scala/concurrent/SyncVar.scala20
-rw-r--r--src/library/scala/reflect/ClassTag.scala2
-rw-r--r--src/library/scala/reflect/base/Base.scala2
-rw-r--r--src/library/scala/reflect/base/Trees.scala4
-rw-r--r--src/library/scala/reflect/compat.scala33
-rw-r--r--src/library/scala/reflect/makro/internal/package.scala2
-rw-r--r--src/library/scala/reflect/package.scala2
-rw-r--r--src/library/scala/util/control/Breaks.scala8
-rw-r--r--src/library/scala/util/control/ControlThrowable.scala5
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala1
-rw-r--r--src/partest/scala/tools/partest/ScaladocModelTest.scala4
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala94
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala36
-rw-r--r--src/reflect/scala/reflect/api/TreePrinters.scala87
-rw-r--r--src/reflect/scala/reflect/api/Types.scala12
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala (renamed from src/reflect/scala/reflect/internal/TreePrinters.scala)176
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala57
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala113
-rw-r--r--src/reflect/scala/reflect/internal/util/StatBase.scala97
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala290
-rw-r--r--src/reflect/scala/reflect/makro/Universe.scala19
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala8
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala36
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala23
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala27
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala50
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala36
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala191
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala4
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala9
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala2
-rw-r--r--test/benchmarking/ParCtrie-size.scala11
-rw-r--r--test/files/continuations-neg/trycatch2.scala6
-rw-r--r--test/files/jvm/actmig-PinS.check19
-rw-r--r--test/files/jvm/actmig-PinS.scala112
-rw-r--r--test/files/jvm/actmig-PinS_1.check19
-rw-r--r--test/files/jvm/actmig-PinS_1.scala135
-rw-r--r--test/files/jvm/actmig-PinS_2.check19
-rw-r--r--test/files/jvm/actmig-PinS_2.scala155
-rw-r--r--test/files/jvm/actmig-PinS_3.check19
-rw-r--r--test/files/jvm/actmig-PinS_3.scala161
-rw-r--r--test/files/jvm/actmig-hierarchy.check2
-rw-r--r--test/files/jvm/actmig-hierarchy_1.check2
-rw-r--r--test/files/jvm/actmig-instantiation.check8
-rw-r--r--test/files/jvm/actmig-loop-react.check15
-rw-r--r--test/files/jvm/actmig-loop-react.scala188
-rw-r--r--test/files/jvm/actmig-public-methods.check6
-rw-r--r--test/files/jvm/actmig-public-methods_1.check6
-rw-r--r--test/files/jvm/actmig-public-methods_1.scala88
-rw-r--r--test/files/jvm/actmig-react-receive.check16
-rw-r--r--test/files/jvm/actmig-react-receive.scala111
-rw-r--r--test/files/lib/scalacheck.jar.desired.sha12
-rw-r--r--test/files/neg/catch-all.check10
-rw-r--r--test/files/neg/catch-all.flags1
-rw-r--r--test/files/neg/catch-all.scala19
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree.check4
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala2
-rw-r--r--test/files/neg/t2442.check9
-rw-r--r--test/files/neg/t2442.flags1
-rw-r--r--test/files/neg/t2442/MyEnum.java3
-rw-r--r--test/files/neg/t2442/MySecondEnum.java6
-rw-r--r--test/files/neg/t2442/t2442.scala15
-rw-r--r--test/files/neg/t4270.check4
-rw-r--r--test/files/neg/t4270.scala6
-rw-r--r--test/files/neg/t4541.check7
-rw-r--r--test/files/neg/t4541.scala16
-rw-r--r--test/files/neg/t4541b.check7
-rw-r--r--test/files/neg/t4541b.scala16
-rw-r--r--test/files/neg/t4842.check7
-rw-r--r--test/files/neg/t4842.scala7
-rw-r--r--test/files/neg/t4989.check7
-rw-r--r--test/files/neg/t4989.scala68
-rw-r--r--test/files/neg/t5148.check2
-rw-r--r--test/files/neg/t5148.scala4
-rw-r--r--test/files/neg/t5334_1.check4
-rw-r--r--test/files/neg/t5334_2.check4
-rw-r--r--test/files/neg/t5376.check11
-rw-r--r--test/files/neg/t5376.scala24
-rw-r--r--test/files/neg/t5429.check20
-rw-r--r--test/files/neg/t5617.check8
-rw-r--r--test/files/neg/t5617.scala14
-rw-r--r--test/files/neg/t5761.check16
-rw-r--r--test/files/neg/t5761.scala16
-rw-r--r--test/files/neg/t5839.check6
-rw-r--r--test/files/neg/t5839.scala7
-rw-r--r--test/files/pos/rangepos-anonapply.flags1
-rw-r--r--test/files/pos/rangepos-anonapply.scala9
-rw-r--r--test/files/pos/spec-params-new.scala2
-rw-r--r--test/files/pos/t4842.scala26
-rw-r--r--test/files/pos/t5899.flags1
-rw-r--r--test/files/pos/t5899.scala19
-rw-r--r--test/files/pos/t5910.java2
-rw-r--r--test/files/pos/t5953.scala16
-rw-r--r--test/files/pos/t5968.flags1
-rw-r--r--test/files/pos/t5968.scala8
-rw-r--r--test/files/presentation/ide-bug-1000531.check4
-rw-r--r--test/files/run/collection-conversions.check126
-rw-r--r--test/files/run/collection-conversions.scala64
-rw-r--r--test/files/run/macro-reify-splice-splice/Macros_1.scala1
-rw-r--r--test/files/run/showraw_mods.check1
-rw-r--r--test/files/run/showraw_mods.scala6
-rw-r--r--test/files/run/showraw_tree.check2
-rw-r--r--test/files/run/showraw_tree.scala8
-rw-r--r--test/files/run/showraw_tree_ids.check2
-rw-r--r--test/files/run/showraw_tree_ids.scala8
-rw-r--r--test/files/run/showraw_tree_kinds.check2
-rw-r--r--test/files/run/showraw_tree_kinds.scala8
-rw-r--r--test/files/run/showraw_tree_types_ids.check10
-rw-r--r--test/files/run/showraw_tree_types_ids.scala10
-rw-r--r--test/files/run/showraw_tree_types_typed.check10
-rw-r--r--test/files/run/showraw_tree_types_typed.scala10
-rw-r--r--test/files/run/showraw_tree_types_untyped.check2
-rw-r--r--test/files/run/showraw_tree_types_untyped.scala8
-rw-r--r--test/files/run/showraw_tree_ultimate.check10
-rw-r--r--test/files/run/showraw_tree_ultimate.scala10
-rw-r--r--test/files/run/t3326.check8
-rw-r--r--test/files/run/t3326.scala74
-rw-r--r--test/files/run/t4809.scala34
-rw-r--r--test/files/run/t4935.check1
-rw-r--r--test/files/run/t4935.flags1
-rw-r--r--test/files/run/t4935.scala9
-rw-r--r--test/files/run/t4954.scala45
-rw-r--r--test/files/run/t5284.check1
-rw-r--r--test/files/run/t5284.scala25
-rw-r--r--test/files/run/t5284b.check1
-rw-r--r--test/files/run/t5284b.scala28
-rw-r--r--test/files/run/t5284c.check1
-rw-r--r--test/files/run/t5284c.scala30
-rw-r--r--test/files/run/t5914.check1
-rw-r--r--test/files/run/t5914.scala23
-rw-r--r--test/files/run/t5966.check3
-rw-r--r--test/files/run/t5966.scala9
-rw-r--r--test/files/run/t5971.check4
-rw-r--r--test/files/run/t5971.scala23
-rw-r--r--test/files/run/t5986.check15
-rw-r--r--test/files/run/t5986.scala36
-rw-r--r--test/files/scalacheck/CheckEither.scala1
-rw-r--r--test/files/scalacheck/redblacktree.scala4
-rw-r--r--test/pending/run/t5334_1.scala (renamed from test/files/neg/t5334_1.scala)0
-rw-r--r--test/pending/run/t5334_2.scala (renamed from test/files/neg/t5334_2.scala)0
-rw-r--r--test/scaladoc/resources/implicits-ambiguating-res.scala72
-rw-r--r--test/scaladoc/resources/implicits-base-res.scala16
-rw-r--r--test/scaladoc/resources/implicits-elimination-res.scala6
-rw-r--r--test/scaladoc/run/diagrams-base.scala73
-rw-r--r--test/scaladoc/run/diagrams-determinism.check1
-rw-r--r--test/scaladoc/run/diagrams-determinism.scala67
-rw-r--r--test/scaladoc/run/diagrams-filtering.check1
-rw-r--r--test/scaladoc/run/diagrams-filtering.scala93
-rw-r--r--test/scaladoc/run/implicits-ambiguating.check1
-rw-r--r--test/scaladoc/run/implicits-ambiguating.scala111
-rw-r--r--test/scaladoc/run/implicits-base.scala36
-rw-r--r--test/scaladoc/run/implicits-elimination.check (renamed from test/scaladoc/run/diagrams-base.check)0
-rw-r--r--test/scaladoc/run/implicits-elimination.scala23
-rw-r--r--test/scaladoc/run/implicits-shadowing.scala32
-rw-r--r--test/scaladoc/scalacheck/CommentFactoryTest.scala6
271 files changed, 5060 insertions, 4900 deletions
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 9cbe99ab23..28a70d2879 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -40,9 +40,13 @@ Export-Package:
scala.tools.nsc.util,
scala.tools.util,
scala.reflect.internal,
+ scala.reflect.internal.pickling,
scala.reflect.internal.settings,
+ scala.reflect.internal.util,
+ scala.reflect.makro,
scala.reflect.runtime,
scala.reflect.internal.transform,
+ scala.reflect.api,
ch.epfl.lamp.compiler.msil,
ch.epfl.lamp.compiler.msil.emit,
ch.epfl.lamp.compiler.msil.util,
diff --git a/classpath.SAMPLE b/classpath.SAMPLE
deleted file mode 100644
index 9e607a41d9..0000000000
--- a/classpath.SAMPLE
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src/compiler"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/fjbg.jar"/>
- <classpathentry kind="lib" path="lib/forkjoin.jar"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="output" path="build/quick/classes/compiler"/>
-</classpath>
diff --git a/project/Build.scala b/project/Build.scala
index e2658e3405..58d322108b 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -1,12 +1,11 @@
import sbt._
import Keys._
import partest._
-import SameTest._
import ScalaBuildKeys._
+import Release._
-
-object ScalaBuild extends Build with Layers {
+object ScalaBuild extends Build with Layers with Packaging with Testing {
// Build wide settings:
override lazy val settings = super.settings ++ Versions.settings ++ Seq(
@@ -21,36 +20,13 @@ object ScalaBuild extends Build with Layers {
),
organization := "org.scala-lang",
version <<= Versions.mavenVersion,
- pomExtra := epflPomExtra,
- commands += Command.command("fix-uri-projects") { (state: State) =>
- if(state.get(buildFixed) getOrElse false) state
- else {
- // TODO -fix up scalacheck's dependencies!
- val extracted = Project.extract(state)
- import extracted._
- def fix(s: Setting[_]): Setting[_] = s match {
- case ScopedExternalSetting(`scalacheck`, scalaInstance.key, setting) => fullQuickScalaReference mapKey Project.mapScope(_ => s.key.scope)
- case s => s
- }
- val transformed = session.mergeSettings map ( s => fix(s) )
- val scopes = transformed collect { case ScopedExternalSetting(`scalacheck`, _, s) => s.key.scope } toSet
- // Create some fixers so we don't download scala or rely on it.
- val fixers = for { scope <- scopes
- setting <- Seq(autoScalaLibrary := false, crossPaths := false)
- } yield setting mapKey Project.mapScope(_ => scope)
- val newStructure = Load.reapply(transformed ++ fixers, structure)
- Project.setProject(session, newStructure, state).put(buildFixed, true)
- }
- },
- onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
- "fix-uri-projects" :: state
- })
- )
+ pomExtra := epflPomExtra
+ )
// Collections of projects to run 'compile' on.
- lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg)
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, actorsMigration, swing, forkjoin, fjbg)
// Collection of projects to 'package' and 'publish' together.
- lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, continuationsPlugin, jline, scalap)
+ lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, actorsMigration, continuationsPlugin, jline, scalap)
lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
private def epflPomExtra = (
@@ -110,7 +86,6 @@ object ScalaBuild extends Build with Layers {
},
// TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
commands += Release.pushStarr
- //commands += Release.setStarrHome
)
// Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
@@ -123,6 +98,11 @@ object ScalaBuild extends Build with Layers {
)
)
+ def fixArtifactSrc(dir: File, name: String) = name match {
+ case x if x startsWith "scala-" => dir / "src" / (name drop 6)
+ case x => dir / "src" / name
+ }
+
// These are setting overrides for most artifacts in the Scala build file.
def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
crossPaths := false,
@@ -134,9 +114,8 @@ object ScalaBuild extends Build with Layers {
target <<= (baseDirectory, name) apply (_ / "target" / _),
(classDirectory in Compile) <<= target(_ / "classes"),
javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
+ scalaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
+ javaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
unmanagedJars in Compile := Seq(),
// Most libs in the compiler use this order to build.
compileOrder in Compile := CompileOrder.JavaThenScala,
@@ -187,6 +166,7 @@ object ScalaBuild extends Build with Layers {
// Reference to quick scala instance.
lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickReflect, quickComp)
def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
+ def quickScalaReflectDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickReflect in Compile).identity
def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
// Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
@@ -222,17 +202,18 @@ object ScalaBuild extends Build with Layers {
}
// TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
- // TODO - Actors + swing separate jars...
lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
- lazy val actors = Project("actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
- // TODO - Remove actors dependency from pom...
- lazy val swing = Project("swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
+ lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ lazy val actorsMigration = Project("scala-actors-migration", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
// This project will generate man pages (in man1 and html) for scala.
lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
// Things that compile against the compiler.
- lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+ lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaReflectDependency, quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+
+ lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap)
lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
@@ -267,7 +248,7 @@ object ScalaBuild extends Build with Layers {
// --------------------------------------------------------------
val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
- lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin, actors).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
name := "scala-library",
crossPaths := false,
@@ -282,14 +263,13 @@ object ScalaBuild extends Build with Layers {
)
lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
- // TODO - Real Reflect instance
-
// --------------------------------------------------------------
- // Real Compiler Artifact
+ // Real Reflect Artifact
// --------------------------------------------------------------
- lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
- name := "scala-compiler",
+
+ lazy val packageScalaReflect = Seq(quickReflect).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaReflectArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaReflect)) ++ Seq(
+ name := "scala-reflect",
crossPaths := false,
exportJars := true,
autoScalaLibrary := false,
@@ -298,52 +278,33 @@ object ScalaBuild extends Build with Layers {
quickScalaInstance,
target <<= (baseDirectory, name) apply (_ / "target" / _)
)
- lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaLibrary)
- lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, quickReflect, scalaCompiler)
+ lazy val scalaReflect = Project("scala-reflect", file(".")) settings(publishSettings:_*) settings(scalaReflectArtifactSettings:_*) dependsOn(scalaLibrary)
+
// --------------------------------------------------------------
- // Testing
+ // Real Compiler Artifact
// --------------------------------------------------------------
- /* lazy val scalacheckSettings: Seq[Setting[_]] = Seq(fullQuickScalaReference, crossPaths := false)*/
- lazy val scalacheck = uri("git://github.com/jsuereth/scalacheck.git#scala-build")
-
- lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
- unmanagedBase <<= baseDirectory / "test/files/lib",
- fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
+ name := "scala-compiler",
+ crossPaths := false,
+ exportJars := true,
autoScalaLibrary := false,
- checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
- checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
- checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
- autoScalaLibrary := false
- )
- lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
- scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
- case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
- },
- partestDirs <<= baseDirectory apply { bd =>
- def mkFile(name: String) = bd / "test" / "files" / name
- def mkTestType(name: String) = name.drop("continuations-".length).toString
- Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
- }
- )
- val testsuite = (
- Project("testsuite", file("."))
- settings (testsuiteSettings:_*)
- dependsOn (swing, scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
- )
- val continuationsTestsuite = (
- Project("continuations-testsuite", file("."))
- settings (continuationsTestsuiteSettings:_*)
- dependsOn (partest, swing, scalaLibrary, scalaCompiler, fjbg)
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
)
+ lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
+
// --------------------------------------------------------------
// Generating Documentation.
// --------------------------------------------------------------
// TODO - Migrate this into the dist project.
// Scaladocs
- def distScalaInstance = makeScalaReference("dist", scalaLibrary, quickReflect, scalaCompiler)
lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
// TODO - Make these work for realz.
defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
@@ -373,163 +334,4 @@ object ScalaBuild extends Build with Layers {
settings (documentationSettings: _*)
dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
)
-
- // --------------------------------------------------------------
- // Packaging a distro
- // --------------------------------------------------------------
-
- class ScalaToolRunner(classpath: Classpath) {
- // TODO - Don't use the ant task directly...
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
- lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
- lazy val executeMethod = mainClass.getMethod("execute")
- lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
- lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
- lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
- lazy val instance = mainClass.newInstance()
-
- def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
- def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
- def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
- def execute(): Unit = executeMethod.invoke(instance)
- }
-
- def genBinTask(
- runner: ScopedTask[ScalaToolRunner],
- outputDir: ScopedSetting[File],
- classpath: ScopedTask[Classpath],
- useClasspath: Boolean
- ): Project.Initialize[sbt.Task[Map[File,String]]] = {
- (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
- IO.createDirectory(outDir)
- val classToFilename = Map(
- "scala.tools.nsc.MainGenericRunner" -> "scala",
- "scala.tools.nsc.Main" -> "scalac",
- "scala.tools.nsc.ScalaDoc" -> "scaladoc",
- "scala.tools.nsc.CompileClient" -> "fsc",
- "scala.tools.scalap.Main" -> "scalap"
- )
- if (useClasspath) {
- val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
- s.log.debug("Setting classpath = " + classpath)
- runner setClasspath classpath
- }
- def genBinFiles(cls: String, dest: File) = {
- runner.setClass(cls)
- runner.setFile(dest)
- runner.execute()
- // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
- dest.setExecutable(true)
- }
- def makeBinMappings(cls: String, binName: String): Map[File,String] = {
- val file = outDir / binName
- val winBinName = binName + ".bat"
- genBinFiles(cls, file)
- Map( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
- }
- classToFilename.flatMap((makeBinMappings _).tupled).toMap
- }
- }
- def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Map[File,String]]] =
- (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
- val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
- binaries map { bin =>
- val file = target / "man" / dir / (bin + ext)
- val classname = "scala.man1." + bin
- IO.createDirectory(file.getParentFile)
- toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
- file -> ("man/" + dir + "/" + bin + ext)
- } toMap
- }
-
- val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner",
- "Creates a utility to generate script files for Scala.")
- val genBin = TaskKey[Map[File,String]]("gen-bin",
- "Creates script files for Scala distribution.")
- val binDir = SettingKey[File]("binaries-directory",
- "Directory where binary scripts will be located.")
- val genBinQuick = TaskKey[Map[File,String]]("gen-quick-bin",
- "Creates script files for testing against current Scala build classfiles (not local dist).")
- val runManmakerMan = TaskKey[Map[File,String]]("make-man",
- "Runs the man maker project to generate man pages")
- val runManmakerHtml = TaskKey[Map[File,String]]("make-html",
- "Runs the man maker project to generate html pages")
-
- lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
- crossPaths := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
- binDir <<= target(_/"bin"),
- genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
- binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
- // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
- fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
- fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
- genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
- runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
- runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
- // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
- // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
- // really need to figure out a better way to pull jline + jansi.
- makeDistMappings <<= (genBin,
- runManmakerMan,
- runManmakerHtml,
- packageBin in scalaLibrary in Compile,
- packageBin in scalaCompiler in Compile,
- packageBin in jline in Compile,
- packageBin in continuationsPlugin in Compile,
- managedClasspath in jline in Compile,
- packageBin in scalap in Compile) map {
- (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
- val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
- binaries ++ man ++ html ++ jlineDepMap ++ Seq(
- lib -> "lib/scala-library.jar",
- comp -> "lib/scala-compiler.jar",
- jline -> "lib/jline.jar",
- continuations -> "misc/scala-devel/plugins/continuations.jar",
- scalap -> "lib/scalap.jar"
- ) toMap
- },
- // Add in some more dependencies
- makeDistMappings <<= (makeDistMappings,
- packageBin in swing in Compile) map {
- (dist, s) =>
- dist ++ Seq(s -> "lib/scala-swing.jar")
- },
- makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
- s.log.debug("Map = " + maps.mkString("\n"))
- val file = dir / "target" / "scala-dist.zip"
- IO.zip(maps, file)
- s.log.info("Created " + file.getAbsolutePath)
- file
- },
- makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
- def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
- IO.createDirectory(dir)
- IO.copy(for {
- (file, name) <- maps
- val file2 = dir / name
- if !sameFile(file,file2)
- } yield (file, file2))
- // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
- maps.values filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
- dir
- }
- )
- lazy val scaladist = (
- Project("dist", file("."))
- settings (scalaDistSettings: _*)
- )
-}
-
-/** Matcher to make updated remote project references easier. */
-object ScopedExternalSetting {
- def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
- s.key.scope.project match {
- case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
- case _ => None
- }
}
diff --git a/project/Packaging.scala b/project/Packaging.scala
new file mode 100644
index 0000000000..eb4e69f99e
--- /dev/null
+++ b/project/Packaging.scala
@@ -0,0 +1,129 @@
+import sbt._
+import Keys._
+import ScalaBuildKeys._
+
+/** All the settings related to *packaging* the built scala software. */
+trait Packaging { self: ScalaBuild.type =>
+
+ // --------------------------------------------------------------
+ // Packaging a distro
+ // --------------------------------------------------------------
+ lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
+ binDir <<= target(_/"bin"),
+ genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
+ binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
+ // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
+ genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
+ runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
+ runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
+ // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
+ // really need to figure out a better way to pull jline + jansi.
+ makeDistMappings <<= (genBin,
+ runManmakerMan,
+ runManmakerHtml,
+ packageBin in scalaLibrary in Compile,
+ packageBin in scalaCompiler in Compile,
+ packageBin in jline in Compile,
+ packageBin in continuationsPlugin in Compile,
+ managedClasspath in jline in Compile,
+ packageBin in scalap in Compile) map {
+ (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
+ val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
+ binaries ++ man ++ html ++ jlineDepMap ++ Seq(
+ lib -> "lib/scala-library.jar",
+ comp -> "lib/scala-compiler.jar",
+ jline -> "lib/jline.jar",
+ continuations -> "misc/scala-devel/plugins/continuations.jar",
+ scalap -> "lib/scalap.jar"
+ )
+ },
+ // Add in some more dependencies
+ makeDistMappings <+= (packageBin in swing in Compile) map (s => s -> "lib/scala-swing.jar"),
+ makeDistMappings <+= (packageBin in scalaReflect in Compile) map (s => s -> "lib/scala-reflect.jar"),
+ makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
+ s.log.debug("Map = " + maps.mkString("\n"))
+ val file = dir / "target" / "scala-dist.zip"
+ IO.zip(maps, file)
+ s.log.info("Created " + file.getAbsolutePath)
+ file
+ },
+ makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
+ def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
+ IO.createDirectory(dir)
+ IO.copy(for {
+ (file, name) <- maps
+ val file2 = dir / name
+ if !sameFile(file,file2)
+ } yield (file, file2))
+ // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
+ maps map (_._2) filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
+ dir
+ }
+ )
+ lazy val scaladist = (
+ Project("dist", file("."))
+ settings (scalaDistSettings: _*)
+ )
+
+
+// Helpers to make a distribution
+
+ /** Generates runner scripts for distribution. */
+ def genBinTask(
+ runner: ScopedTask[ScalaToolRunner],
+ outputDir: ScopedSetting[File],
+ classpath: ScopedTask[Classpath],
+ useClasspath: Boolean
+ ): Project.Initialize[sbt.Task[Seq[(File,String)]]] = {
+ (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
+ IO.createDirectory(outDir)
+ val classToFilename = Seq(
+ "scala.tools.nsc.MainGenericRunner" -> "scala",
+ "scala.tools.nsc.Main" -> "scalac",
+ "scala.tools.nsc.ScalaDoc" -> "scaladoc",
+ "scala.tools.nsc.CompileClient" -> "fsc",
+ "scala.tools.scalap.Main" -> "scalap"
+ )
+ if (useClasspath) {
+ val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
+ s.log.debug("Setting classpath = " + classpath)
+ runner setClasspath classpath
+ }
+ def genBinFiles(cls: String, dest: File) = {
+ runner.setClass(cls)
+ runner.setFile(dest)
+ runner.execute()
+ // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
+ dest.setExecutable(true)
+ }
+ def makeBinMappings(cls: String, binName: String): Seq[(File,String)] = {
+ val file = outDir / binName
+ val winBinName = binName + ".bat"
+ genBinFiles(cls, file)
+ Seq( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
+ }
+ classToFilename.flatMap((makeBinMappings _).tupled)
+ }
+ }
+ /** Creates man pages for distribution. */
+ def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Seq[(File,String)]]] =
+ (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
+ val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
+ binaries map { bin =>
+ val file = target / "man" / dir / (bin + ext)
+ val classname = "scala.man1." + bin
+ IO.createDirectory(file.getParentFile)
+ toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
+ file -> ("man/" + dir + "/" + bin + ext)
+ }
+ }
+}
diff --git a/project/Release.scala b/project/Release.scala
index 1a17956c13..feab8bdc8c 100644
--- a/project/Release.scala
+++ b/project/Release.scala
@@ -5,11 +5,26 @@ object Release {
// TODO - Just make the STARR artifacts and dump the sha1 files.
+ val starrLibs = Seq("scala-library.jar", "scala-reflect.jar", "scala-compiler.jar", "jline.jar")
- lazy val pushStarr = Command.command("push-starr") { (state: State) =>
- // TODO do something
- // Revert to previous project state.
- state
- }
-
+ val pushStarr = Command.command("new-starr") { (state: State) =>
+ /*val extracted = Project.extract(state)
+ import extracted._
+ // First run tests
+ val (s1, result) = runTask(test in Test, state)
+ // If successful, package artifacts
+ val (s2, distDir) = runTask(makeExplodedDist, s1)
+ // Then copy new libs in place
+ val bd = extracted get baseDirectory
+ for {
+ jarName <- starrLibs
+ jar = distDir / "lib" / jarName
+ if jar.exists
+ } IO.copyFile(jar, bd / "lib" / jarName)
+ // Invalidate SHA1 files.
+ ShaResolve.removeInvalidShaFiles(bd)
+ // Now run tests *again*?
+ s2*/
+ state
+ }
}
diff --git a/project/RemoteDependencies.scala b/project/RemoteDependencies.scala
new file mode 100644
index 0000000000..705b9dc402
--- /dev/null
+++ b/project/RemoteDependencies.scala
@@ -0,0 +1,53 @@
+import sbt._
+import Keys._
+import ScalaBuildKeys._
+
+
+object RemoteDependencies {
+ def buildSettings(externalProjects: Set[URI], localScala: Setting[_]): Seq[Setting[_]] = Seq(
+ commands += Command.command("fix-uri-projects") { (state: State) =>
+ if(state.get(buildFixed) getOrElse false) state
+ else {
+ // TODO -fix up scalacheck's dependencies!
+ val extracted = Project.extract(state)
+ import extracted._
+ val scalaVersionString = extracted get version
+
+ def fix(s: Setting[_]): Setting[_] = s match {
+ case ScopedExternalSetting(p, scalaInstance.key, setting) if externalProjects(p) => localScala mapKey Project.mapScope(_ => s.key.scope)
+ // TODO - Fix Actors dependency...
+ //case ScopedExternalSetting(p, libraryDependencies.key, setting) if externalProjects(p) => fixProjectDeps(s)
+ case s => s
+ }
+ val transformed = session.mergeSettings map ( s => fix(s) )
+ val scopes = transformed collect { case ScopedExternalSetting(p, _, s) if externalProjects(p) => s.key.scope } toSet
+ // Create some fixers so we don't download scala or rely on it.
+ // Also add dependencies that disappear in 2.10 for now...
+ val fixers = for { scope <- scopes
+ setting <- Seq(autoScalaLibrary := false,
+ crossPaths := false,
+ scalaVersion := scalaVersionString)
+ } yield setting mapKey Project.mapScope(_ => scope)
+ val newStructure = Load.reapply(transformed ++ fixers, structure)
+ Project.setProject(session, newStructure, state).put(buildFixed, true)
+ }
+ },
+ onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
+ "fix-uri-projects" :: state
+ })
+ )
+}
+
+
+
+/** Matcher to make updated remote project references easier. */
+object ScopedExternalSetting {
+ def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
+ s.key.scope.project match {
+ case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
+ case _ => None
+ }
+}
+
+
+
diff --git a/project/Sametest.scala b/project/Sametest.scala
index f44fe8ec65..6f12eb24b3 100644
--- a/project/Sametest.scala
+++ b/project/Sametest.scala
@@ -5,9 +5,6 @@ import Keys._
// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
object SameTest {
- lazy val checkSame: TaskKey[Unit] = TaskKey("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
- lazy val checkSameLibrary: TaskKey[Unit] = TaskKey("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
- lazy val checkSameCompiler: TaskKey[Unit] = TaskKey("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
(classDirectory in Compile in lhs, classDirectory in Compile in rhs,
diff --git a/project/ScalaBuildKeys.scala b/project/ScalaBuildKeys.scala
index dbde6bd18c..9e495de19f 100644
--- a/project/ScalaBuildKeys.scala
+++ b/project/ScalaBuildKeys.scala
@@ -2,21 +2,22 @@ import sbt._
import Keys._
object ScalaBuildKeys {
- val lockerLock: TaskKey[Unit] = TaskKey("locker-lock",
- "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
- val lockerUnlock: TaskKey[Unit] = TaskKey("locker-unlock",
- "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
- val lockFile: SettingKey[File] = SettingKey("lock-file",
- "Location of the lock file compiling this project.")
- // New tasks/settings specific to the scala build.
- val lock: TaskKey[Unit] = TaskKey("lock", "Locks this project so it won't be recompiled.")
- val unlock: TaskKey[Unit] = TaskKey("unlock", "Unlocks this project so it will be recompiled.")
- val makeDist: TaskKey[File] = TaskKey("make-dist",
- "Creates a mini-distribution (scala home directory) for this build in a zip file.")
- val makeExplodedDist: TaskKey[File] = TaskKey("make-exploded-dist",
- "Creates a mini-distribution (scala home directory) for this build in a directory.")
- val makeDistMappings: TaskKey[Map[File, String]] = TaskKey("make-dist-mappings",
- "Creates distribution mappings for creating zips,jars,directorys,etc.")
- val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
-
+ val lockerLock = TaskKey[Unit]("locker-lock", "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
+ val lockerUnlock = TaskKey[Unit]("locker-unlock", "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
+ val lockFile = SettingKey[File]("lock-file", "Location of the lock file compiling this project.")
+ val lock = TaskKey[Unit]("lock", "Locks this project so it won't be recompiled.")
+ val unlock = TaskKey[Unit]("unlock", "Unlocks this project so it will be recompiled.")
+ val makeDist = TaskKey[File]("make-dist", "Creates a mini-distribution (scala home directory) for this build in a zip file.")
+ val makeExplodedDist = TaskKey[File]("make-exploded-dist", "Creates a mini-distribution (scala home directory) for this build in a directory.")
+ val makeDistMappings = TaskKey[Seq[(File, String)]]("make-dist-mappings", "Creates distribution mappings for creating zips,jars,directorys,etc.")
+ val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
+ val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner", "Creates a utility to generate script files for Scala.")
+ val genBin = TaskKey[Seq[(File,String)]]("gen-bin", "Creates script files for Scala distribution.")
+ val binDir = SettingKey[File]("binaries-directory", "Directory where binary scripts will be located.")
+ val genBinQuick = TaskKey[Seq[(File,String)]]("gen-quick-bin", "Creates script files for testing against current Scala build classfiles (not local dist).")
+ val runManmakerMan = TaskKey[Seq[(File,String)]]("make-man", "Runs the man maker project to generate man pages")
+ val runManmakerHtml = TaskKey[Seq[(File,String)]]("make-html", "Runs the man maker project to generate html pages")
+ val checkSame = TaskKey[Unit]("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
+ val checkSameLibrary = TaskKey[Unit]("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
+ val checkSameCompiler = TaskKey[Unit]("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
}
diff --git a/project/ScalaToolRunner.scala b/project/ScalaToolRunner.scala
new file mode 100644
index 0000000000..d7338a54b3
--- /dev/null
+++ b/project/ScalaToolRunner.scala
@@ -0,0 +1,21 @@
+import sbt._
+import Keys._
+
+/** Reflection helper that runs ScalaTool.
+ * TODO - When SBT is on 2.10.x try to use Dynamic + Reflection. COULD BE FUN.
+ */
+class ScalaToolRunner(classpath: Classpath) {
+ // TODO - Don't use the ant task directly...
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
+ lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
+ lazy val executeMethod = mainClass.getMethod("execute")
+ lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
+ lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
+ lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
+ lazy val instance = mainClass.newInstance()
+
+ def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
+ def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
+ def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
+ def execute(): Unit = executeMethod.invoke(instance)
+}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
index e6824ee464..cea2b2d6cc 100644
--- a/project/ShaResolve.scala
+++ b/project/ShaResolve.scala
@@ -7,6 +7,7 @@ import scala.collection.{ mutable, immutable }
import scala.collection.parallel.CompositeThrowable
import java.security.MessageDigest
+case class Credentials(user: String, pw: String)
/** Helpers to resolve SHA artifacts from typesafe repo. */
object ShaResolve {
@@ -19,7 +20,8 @@ object ShaResolve {
def settings: Seq[Setting[_]] = Seq(
binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
- pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs
+ pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs,
+ pushBinaryLibs in ThisBuild <<= (baseDirectory, streams) map getCredentialsAndPushFiles
)
def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
@@ -33,6 +35,34 @@ object ShaResolve {
} pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
}
+ /** This method removes all SHA1 files that don't match their corresponding JAR. */
+ def removeInvalidShaFiles(dir: File): Unit = {
+ val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
+ for {
+ (file, name) <- (files x relativeTo(dir)).par
+ uri = name.dropRight(13).replace('\\', '/')
+ jar = dir / uri
+ if !jar.exists || !isValidSha(file)
+ } IO.delete(jar)
+ }
+ def getCredentials: Credentials = System.out.synchronized {
+ val user = (SimpleReader.readLine("Please enter your STARR username> ") getOrElse error("No username provided."))
+ val password = (SimpleReader.readLine("Please enter your STARR password> ", Some('*')) getOrElse error("No password provided."))
+ Credentials(user, password)
+ }
+
+ def getCredentialsAndPushFiles(dir: File, s: TaskStreams): Unit =
+ pushFiles(dir, getCredentials, s)
+
+ def pushFiles(dir: File, cred: Credentials, s: TaskStreams): Unit = loggingParallelExceptions(s) {
+ val files = (dir / "test" / "files" ** "*.jar") +++ (dir / "lib" ** "*.jar")
+ for {
+ (jar, name) <- (files x relativeTo(dir)).par
+ shafile = dir / (name + ".desired.sha1")
+ if !shafile.exists || !isValidSha(shafile)
+ } pushFile(jar, name, cred, s)
+ }
+
@inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
case t: CompositeThrowable =>
s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
@@ -60,24 +90,17 @@ object ShaResolve {
sha
}
- // TODO - Prettier way of doing this...
- private def convertToHex(data: Array[Byte]): String = {
+ def convertToHex(data: Array[Byte]): String = {
+ def byteToHex(b: Int) =
+ if ((0 <= b) && (b <= 9)) ('0' + b).toChar
+ else ('a' + (b-10)).toChar
val buf = new StringBuffer
- for (i <- 0 until data.length) {
- var halfbyte = (data(i) >>> 4) & 0x0F;
- var two_halfs = 0;
- while(two_halfs < 2) {
- if ((0 <= halfbyte) && (halfbyte <= 9))
- buf.append(('0' + halfbyte).toChar)
- else
- buf.append(('a' + (halfbyte - 10)).toChar);
- halfbyte = data(i) & 0x0F;
- two_halfs += 1
- }
- }
- return buf.toString
- }
-
+ for (i <- 0 until data.length) {
+ buf append byteToHex((data(i) >>> 4) & 0x0F)
+ buf append byteToHex(data(i) & 0x0F)
+ }
+ buf.toString
+ }
// Parses a sha file into a file and a sha.
def parseShaFile(file: File): (File, String) =
IO.read(file).split("\\s") match {
@@ -110,10 +133,15 @@ object ShaResolve {
IO.copyFile(cachedFile, file)
}
- def pushFile(file: File, uri: String, user: String, pw: String): Unit = {
- val url = remote_urlbase + "/" + uri
- val sender = dispatch.url(url).PUT.as(user,pw) <<< (file, "application/java-archive")
+ // Pushes a file and writes the new .desired.sha1 for git.
+ def pushFile(file: File, uri: String, cred: Credentials, s: TaskStreams): Unit = {
+ val sha = calculateSha(file)
+ val url = remote_urlbase + "/" + sha + "/" + uri
+ val sender = dispatch.url(url).PUT.as(cred.user,cred.pw) <<< (file, "application/java-archive")
// TODO - output to logger.
Http(sender >>> System.out)
+ val shafile = file.getParentFile / (file.getName + ".desired.sha1")
+ IO.touch(shafile)
+ IO.write(shafile, sha + " ?" + file.getName)
}
}
diff --git a/project/Testing.scala b/project/Testing.scala
new file mode 100644
index 0000000000..cec1d8c60b
--- /dev/null
+++ b/project/Testing.scala
@@ -0,0 +1,41 @@
+import sbt._
+import Keys._
+import partest._
+import SameTest._
+import ScalaBuildKeys._
+
+/** All settings/projects relating to testing. */
+trait Testing { self: ScalaBuild.type =>
+
+ lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
+ unmanagedBase <<= baseDirectory / "test/files/lib",
+ fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ autoScalaLibrary := false,
+ checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
+ checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
+ checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
+ autoScalaLibrary := false
+ )
+ lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ },
+ partestDirs <<= baseDirectory apply { bd =>
+ def mkFile(name: String) = bd / "test" / "files" / name
+ def mkTestType(name: String) = name.drop("continuations-".length).toString
+ Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
+ }
+ )
+ val testsuite = (
+ Project("testsuite", file("."))
+ settings (testsuiteSettings:_*)
+ dependsOn (scalaLibrary, scalaCompiler, fjbg, partest, scalacheck, actorsMigration)
+ )
+ val continuationsTestsuite = (
+ Project("continuations-testsuite", file("."))
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, scalaLibrary, scalaCompiler, fjbg, actorsMigration)
+ )
+
+}
+
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 714795503b..1b72b3075b 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -17,11 +17,6 @@ trait Errors {
// expected errors: these can happen if the user casually writes whatever.reify(...)
// hence we don't crash here, but nicely report a typechecking error and bail out asap
- def CannotReifyReifeeThatHasTypeLocalToReifee(tree: Tree) = {
- val msg = "implementation restriction: cannot reify block of type %s that involves a type declared inside the block being reified. consider casting the return value to a suitable type".format(tree.tpe)
- throw new ReificationError(tree.pos, msg)
- }
-
def CannotReifyType(tpe: Type) = {
val msg = "implementation restriction: cannot reify type %s (%s)".format(tpe, tpe.kind)
throw new ReificationError(defaultErrorPosition, msg)
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 00f25f0d8b..8fba7274be 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -68,20 +68,6 @@ abstract class Reifier extends States
val pipeline = mkReificationPipeline
val rtree = pipeline(tree)
- // consider the following code snippet
- //
- // val x = reify { class C; new C }
- //
- // inferred type for x will be C
- // but C ceases to exist after reification so this type is clearly incorrect
- // however, reify is "just" a library function, so it cannot affect type inference
- //
- // hence we crash here even though the reification itself goes well
- // fortunately, all that it takes to fix the error is to cast "new C" to Object
- // so I'm not very much worried about introducing this restriction
- if (tree.tpe exists (sub => sub.typeSymbol.isLocalToReifee))
- CannotReifyReifeeThatHasTypeLocalToReifee(tree)
-
val tpe = typer.packedType(tree, NoSymbol)
val ReifiedType(_, _, tpeSymtab, _, rtpe, tpeReificationIsConcrete) = `package`.reifyType(global)(typer, universe, mirror, tpe, concrete = false)
state.reificationIsConcrete &= tpeReificationIsConcrete
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index ce0ab2196a..7214da597e 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -15,41 +15,6 @@ trait NodePrinters {
import Flag._
object reifiedNodeToString extends (Tree => String) {
- // [Eugene++ to Martin] can we do better?
- // didn't want to invent anything myself in order not to interfere with your line of thought
- def bitsToFlags(bits: String): String = {
- val flags = bits.toLong
- if (flags == NoFlags) nme.NoFlags.toString
- else {
- val s_flags = new collection.mutable.ListBuffer[String]
- if (flags containsAll TRAIT) s_flags += "TRAIT"
- if (flags containsAll MODULE) s_flags += "MODULE"
- if (flags containsAll MUTABLE) s_flags += "MUTABLE"
- if (flags containsAll PACKAGE) s_flags += "PACKAGE"
- if (flags containsAll METHOD) s_flags += "METHOD"
- if (flags containsAll DEFERRED) s_flags += "DEFERRED"
- if (flags containsAll ABSTRACT) s_flags += "ABSTRACT"
- if (flags containsAll FINAL) s_flags += "FINAL"
- if (flags containsAll SEALED) s_flags += "SEALED"
- if (flags containsAll IMPLICIT) s_flags += "IMPLICIT"
- if (flags containsAll LAZY) s_flags += "LAZY"
- if (flags containsAll OVERRIDE) s_flags += "OVERRIDE"
- if (flags containsAll PRIVATE) s_flags += "PRIVATE"
- if (flags containsAll PROTECTED) s_flags += "PROTECTED"
- if (flags containsAll CASE) s_flags += "CASE"
- if (flags containsAll ABSOVERRIDE) s_flags += "ABSOVERRIDE"
- if (flags containsAll BYNAMEPARAM) s_flags += "BYNAMEPARAM"
- if (flags containsAll PARAM) s_flags += "PARAM"
- if (flags containsAll PARAMACCESSOR) s_flags += "PARAMACCESSOR"
- if (flags containsAll CASEACCESSOR) s_flags += "CASEACCESSOR"
- if (flags containsAll COVARIANT) s_flags += "COVARIANT"
- if (flags containsAll CONTRAVARIANT) s_flags += "CONTRAVARIANT"
- if (flags containsAll DEFAULTPARAM) s_flags += "DEFAULTPARAM"
- if (flags containsAll INTERFACE) s_flags += "INTERFACE"
- s_flags mkString " | "
- }
- }
-
def apply(tree: Tree): String = {
var mirrorIsUsed = false
var flagsAreUsed = false
@@ -70,7 +35,7 @@ trait NodePrinters {
s = s.replace("immutable.this.Nil", "List()")
s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
flagsAreUsed = true
- bitsToFlags(m.group(1))
+ show(m.group(1).toLong)
})
s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
@@ -87,7 +52,7 @@ trait NodePrinters {
val bits = m.group(1)
if (buf.nonEmpty || bits != "0L") {
flagsAreUsed = true
- buf.append(bitsToFlags(bits))
+ buf.append(show(bits.toLong))
}
val replacement = "Modifiers(" + buf.reverse.mkString(", ") + ")"
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 787c9c7f57..35bf2dd288 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -12,7 +12,7 @@ import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, Statistics, StatisticsInfo, ScalaClassLoader, returning }
+import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
import scala.reflect.internal.util.{ NoPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
@@ -39,7 +39,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
with Plugins
with PhaseAssembly
with Trees
- with TreePrinters
+ with Printers
with DocComments
with Positions { self =>
@@ -1667,13 +1667,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Compile abstract file until `globalPhase`, but at least to phase "namer".
*/
def compileLate(unit: CompilationUnit) {
- val maxId = math.max(globalPhase.id, typerPhase.id)
addUnit(unit)
- firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)
- )
- refreshProgress
+ if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary
+ val maxId = math.max(globalPhase.id, typerPhase.id)
+ firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
+ atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
+ refreshProgress
+ }
}
/** Reset package class to state at typer (not sure what this
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index c6fdb4b891..5a4b4172c6 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -20,8 +20,7 @@ class ScalaDoc {
def process(args: Array[String]): Boolean = {
var reporter: ConsoleReporter = null
- val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"),
- msg => reporter.printMessage(msg))
+ val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"))
reporter = new ConsoleReporter(docSettings) {
// need to do this so that the Global instance doesn't trash all the
// symbols just because there was an error
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index ba1f3b2e3c..4afd3545b9 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -146,7 +146,7 @@ abstract class NodePrinters {
}
def printModifiers(tree: MemberDef) {
// [Eugene++] there's most likely a bug here (?)
- // see `TreePrinters.printAnnotations` for more information
+ // see `Printers.printAnnotations` for more information
val annots0 = tree.symbol.annotations match {
case Nil => tree.mods.annotations
case xs => xs map annotationInfoToString
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 3371353f25..94d0c4f45e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -10,7 +10,7 @@ import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
+trait Printers extends reflect.internal.Printers { this: Global =>
import treeInfo.{ IsTrue, IsFalse }
@@ -276,8 +276,8 @@ trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
}
}
- def asString(t: Tree): String = show(t, newStandardTreePrinter)
- def asCompactString(t: Tree): String = show(t, newCompactTreePrinter)
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index fd154fe796..3232bde3b4 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -253,7 +253,18 @@ self =>
final val InBlock = 1
final val InTemplate = 2
- lazy val ScalaValueClassNames = tpnme.AnyVal :: definitions.ScalaValueClasses.map(_.name)
+ // These symbols may not yet be loaded (e.g. in the ide) so don't go
+ // through definitions to obtain the names.
+ lazy val ScalaValueClassNames = Seq(tpnme.AnyVal,
+ tpnme.Unit,
+ tpnme.Boolean,
+ tpnme.Byte,
+ tpnme.Short,
+ tpnme.Char,
+ tpnme.Int,
+ tpnme.Long,
+ tpnme.Float,
+ tpnme.Double)
import nme.raw
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 0c527fbaf4..59adcc637a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -458,6 +458,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val CLASS_CONSTRUCTOR_NAME = "<clinit>"
val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
// -----------------------------------------------------------------------------------------
// factory methods
// -----------------------------------------------------------------------------------------
@@ -644,6 +648,86 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
} // end of class JBuilder
@@ -654,10 +738,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// more constants
// -----------------------------------------------------------------------------------------
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
-
val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
@@ -969,86 +1049,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners: List[Symbol] = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
-
- // entries ready to be serialized into the classfile, used to detect duplicates.
- val entries = mutable.Map.empty[String, String]
-
- // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
- if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val jname = javaName(innerSym) // never null
- val oname = outerName(innerSym) // null when method-enclosed
- val iname = innerName(innerSym) // null for anonymous inner class
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- assert(jname != null, "javaName is broken.") // documentation
- val doAdd = entries.get(jname) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == oname, "duplicate")
- false
- case None => true
- }
-
- if(doAdd) {
- entries += (jname -> oname)
- jclass.visitInnerClass(jname, oname, iname, flags)
- }
-
- /*
- * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
- * If a class file has a version number that is greater than or equal to 51.0, and
- * has an InnerClasses attribute in its attributes table, then for all entries in the
- * classes array of the InnerClasses attribute, the value of the
- * outer_class_info_index item must be zero if the value of the
- * inner_name_index item is zero.
- */
-
- }
- }
- }
-
/** Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -3033,9 +3033,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- // TODO no inner classes attribute is written. Confirm intent.
- assert(innerClassBuffer.isEmpty, innerClassBuffer)
-
+ addInnerClasses(clasz.symbol, beanInfoClass)
beanInfoClass.visitEnd()
writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index d4ee9b6b48..5cc6e78e9d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -100,9 +100,29 @@ abstract class DeadCodeElimination extends SubComponent {
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
+
case LOAD_LOCAL(l) =>
defs = defs + Pair(((bb, idx)), rd.vars)
-// Console.println(i + ": " + (bb, idx) + " rd: " + rd + " and having: " + defs)
+
+ case STORE_LOCAL(_) =>
+ /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
+ * (otherwise any side-effects of the module's constructor go lost).
+ * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
+ * are already marked (case clause below).
+ * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below)
+ * will have the module's load marked provided `isSideEffecting(m1)`.
+ * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity).
+ * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+ */
+ val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
+ val (bb1, idx1) = p
+ bb1(idx1) match {
+ case LOAD_MODULE(module) => isLoadNeeded(module)
+ case _ => false
+ }
+ }
+ if (necessary) worklist += ((bb, idx))
+
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
@@ -129,6 +149,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
+ private def isLoadNeeded(module: Symbol): Boolean = {
+ module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol
+ }
+
/** Mark useful instructions. Instructions in the worklist are each inspected and their
* dependencies are marked useful too, and added to the worklist.
*/
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index c6a9422974..e2e1ddf065 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -81,7 +81,6 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
new { override val global: compiler.type = compiler }
with model.ModelFactory(compiler, settings)
with model.ModelFactoryImplicitSupport
- with model.diagram.DiagramFactory
with model.comment.CommentFactory
with model.TreeFactory {
override def templateShouldDocument(sym: compiler.Symbol) =
@@ -91,12 +90,11 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
modelFactory.makeModel match {
case Some(madeModel) =>
- if (!settings.scaladocQuietRun)
+ if (settings.reportModel)
println("model contains " + modelFactory.templatesCount + " documentable templates")
Some(madeModel)
case None =>
- if (!settings.scaladocQuietRun)
- println("no documentable class found in compilation units")
+ println("no documentable class found in compilation units")
None
}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index f8b2711c0c..4458889d55 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -11,9 +11,8 @@ import java.lang.System
import language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
- * @param error A function that prints a string to the appropriate error stream
- * @param print A function that prints the string, without any extra boilerplate of error */
-class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
+ * @param error A function that prints a string to the appropriate error stream. */
+class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
/** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
* `html`. */
@@ -105,12 +104,6 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"(for example conversions that require Numeric[String] to be in scope)"
)
- val docImplicitsSoundShadowing = BooleanSetting (
- "-implicits-sound-shadowing",
- "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " +
- "only use it if you haven't defined usecase for implicitly inherited members."
- )
-
val docDiagrams = BooleanSetting (
"-diagrams",
"Create inheritance diagrams for classes, traits and packages."
@@ -123,44 +116,10 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
val docDiagramsDotPath = PathSetting (
"-diagrams-dot-path",
- "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot",
+ "The path to the dot executable used to generate the inheritance diagrams. Ex: /usr/bin/dot",
"dot" // by default, just pick up the system-wide dot
)
- /** The maxium nuber of normal classes to show in the diagram */
- val docDiagramsMaxNormalClasses = IntSetting(
- "-diagrams-max-classes",
- "The maximum number of superclasses or subclasses to show in a diagram",
- 15,
- None,
- _ => None
- )
-
- /** The maxium nuber of implcit classes to show in the diagram */
- val docDiagramsMaxImplicitClasses = IntSetting(
- "-diagrams-max-implicits",
- "The maximum number of implicitly converted classes to show in a diagram",
- 10,
- None,
- _ => None
- )
-
- val docDiagramsDotTimeout = IntSetting(
- "-diagrams-dot-timeout",
- "The timeout before the graphviz dot util is forecefully closed, in seconds (default: 10)",
- 10,
- None,
- _ => None
- )
-
- val docDiagramsDotRestart = IntSetting(
- "-diagrams-dot-restart",
- "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)",
- 5,
- None,
- _ => None
- )
-
// Somewhere slightly before r18708 scaladoc stopped building unless the
// self-type check was suppressed. I hijacked the slotted-for-removal-anyway
// suppress-vt-warnings option and renamed it for this purpose.
@@ -170,16 +129,14 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
docDiagrams, docDiagramsDebug, docDiagramsDotPath,
- docDiagramsDotTimeout, docDiagramsDotRestart,
- docImplicits, docImplicitsDebug, docImplicitsShowAll,
- docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses
+ docImplicits, docImplicitsDebug, docImplicitsShowAll
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
override def isScaladoc = true
- // set by the testsuite, when checking test output
- var scaladocQuietRun = false
+ // unset by the testsuite, we don't need to count the entities in the model
+ var reportModel = true
/**
* This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
@@ -225,8 +182,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"scala.Predef.any2stringfmt",
"scala.Predef.any2stringadd",
"scala.Predef.any2ArrowAssoc",
- "scala.Predef.any2Ensuring",
- "scala.collection.TraversableOnce.alternateImplicit")
+ "scala.Predef.any2Ensuring")
/** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
val arraySkipConversions = List(
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 84d703aa58..914824d523 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -12,8 +12,6 @@ import java.io.{ File => JFile }
import io.{ Streamable, Directory }
import scala.collection._
-import html.page.diagram.DiagramGenerator
-
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
* @author Gilles Dubochet */
@@ -31,27 +29,21 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"jquery.js",
"jquery.layout.js",
"scheduler.js",
- "diagrams.js",
"template.js",
"tools.tooltip.js",
- "modernizr.custom.js",
"index.css",
"ref-index.css",
"template.css",
- "diagrams.css",
"class.png",
"class_big.png",
- "class_diagram.png",
"object.png",
"object_big.png",
- "object_diagram.png",
"package.png",
"package_big.png",
"trait.png",
"trait_big.png",
- "trait_diagram.png",
"class_to_object_big.png",
"object_to_class_big.png",
@@ -113,8 +105,6 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
finally out.close()
}
- DiagramGenerator.initialize(universe.settings)
-
libResources foreach (s => copyResource("lib/" + s))
new page.Index(universe, index) writeFor this
@@ -125,8 +115,6 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
for (letter <- index.firstLetterIndex) {
new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
}
-
- DiagramGenerator.cleanup()
}
def writeTemplates(writeForThis: HtmlPage => Unit) {
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 34e2f7bc53..e3da8bddea 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -116,25 +116,11 @@ abstract class HtmlPage extends Page { thisPage =>
case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
+ case EntityLink(entity) => templateToHtml(entity)
case Monospace(in) => <code>{ inlineToHtml(in) }</code>
case Text(text) => xml.Text(text)
case Summary(in) => inlineToHtml(in)
case HtmlTag(tag) => xml.Unparsed(tag)
- case EntityLink(target, template) => template() match {
- case Some(tpl) =>
- templateToHtml(tpl)
- case None =>
- xml.Text(target)
- }
- }
-
- def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match {
- case Nil =>
- sys.error("Internal Scaladoc error")
- case List(tpe) =>
- typeToHtml(tpe, hasLinks)
- case tpe :: rest =>
- typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks)
}
def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala
index 59f4c81632..c5bf3e0e37 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala
@@ -83,4 +83,18 @@ abstract class Page {
}
relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
}
+
+ def isExcluded(dtpl: DocTemplateEntity) = {
+ val qname = dtpl.qualifiedName
+ ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+ qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+ ) && !(
+ qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+ qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+ qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+ qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+ qname == "scala.runtime.AbstractFunction2"
+ )
+ )
+ }
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 0e894a03bf..8ed13e0da2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -61,7 +61,7 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
}
<ol class="templates">{
val tpls: Map[String, Seq[DocTemplateEntity]] =
- (pack.templates filter (t => !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) )) groupBy (_.name)
+ (pack.templates filter (t => !t.isPackage && !isExcluded(t) )) groupBy (_.name)
val placeholderSeq: NodeSeq = <div class="placeholder"></div>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
index 1b0599d145..7edd4937c4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -68,7 +68,7 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
def allPackagesWithTemplates = {
Map(allPackages.map((key) => {
- key -> key.templates.filter(t => !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName))
+ key -> key.templates.filter(t => !t.isPackage && !isExcluded(t))
}) : _*)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 262e91b510..66189a6854 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -9,16 +9,9 @@ package html
package page
import model._
-import model.diagram._
-import diagram._
-
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
import language.postfixOps
-import model._
-import model.diagram._
-import diagram._
-
class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage {
val path =
@@ -36,22 +29,10 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
val headers =
<xml:group>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
- { if (universe.settings.docDiagrams.isSetByUser) {
- <script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
- } else NodeSeq.Empty }
- <script type="text/javascript">
- if(top === self) {{
- var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
- var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
- window.location.href = url + '#' + hash;
- }}
- </script>
</xml:group>
val valueMembers =
@@ -60,12 +41,9 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
val (absValueMembers, nonAbsValueMembers) =
valueMembers partition (_.isAbstract)
- val (deprValueMembers, nonDeprValueMembers) =
+ val (deprValueMembers, concValueMembers) =
nonAbsValueMembers partition (_.deprecation.isDefined)
- val (concValueMembers, shadowedImplicitMembers) =
- nonDeprValueMembers partition (!Template.isShadowedOrAmbiguousImplicit(_))
-
val typeMembers =
tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted
@@ -185,13 +163,6 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</div>
}
- { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
- <h3>Shadowed Implict Value Members</h3>
- <ol>{ shadowedImplicitMembers map (memberToHtml(_)) }</ol>
- </div>
- }
-
{ if (deprValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>Deprecated Value Members</h3>
@@ -272,13 +243,11 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case _ => ""
}
val memberComment = memberToCommentHtml(mbr, false)
- <li name={ mbr.definitionName }
- visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString }
- fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }>
- <a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
- { signature(mbr, false) }
- { memberComment }
+ <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
+ data-isabs={ mbr.isAbstract.toString } fullComment={ if(memberComment.isEmpty) "no" else "yes" }>
+ <a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
+ { signature(mbr, false) }
+ { memberComment }
</li>
}
@@ -330,7 +299,6 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
def memberToCommentBodyHtml(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
- val s = universe.settings
val memberComment =
if (mbr.comment.isEmpty) NodeSeq.Empty
@@ -386,8 +354,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
val implicitInformation = mbr.byConversion match {
- case Some(info) =>
- val conv = info.conversion
+ case Some(conv) =>
<dt class="implicit">Implicit information</dt> ++
{
val targetType = typeToHtml(conv.targetType, true)
@@ -420,35 +387,6 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
{ targetType } performed by method { conversionMethod } in { conversionOwner }.
{ constraintText }
</dd>
- } ++ {
- if (Template.isShadowedOrAmbiguousImplicit(mbr)) {
- // These are the members that are shadowing or ambiguating the current implicit
- // see ImplicitMemberShadowing trait for more information
- val shadowingSuggestion = {
- val params = mbr match {
- case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
- case _ => "" // no parameters
- }
- <br/> ++ xml.Text("To access this member you can use a ") ++
- <a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
- target="_blank">type ascription</a> ++ xml.Text(":") ++
- <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
- }
-
- val shadowingWarning: NodeSeq =
- if (Template.isShadowedImplicit(mbr))
- xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
- "class.") ++ shadowingSuggestion
- else if (Template.isAmbiguousImplicit(mbr))
- xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
- "inherited members have similar signatures, so calling this member may produce an ambiguous " +
- "implicit conversion compiler error.") ++ shadowingSuggestion
- else NodeSeq.Empty
-
- <dt class="implicit">Shadowing</dt> ++
- <dd>{ shadowingWarning }</dd>
-
- } else NodeSeq.Empty
}
case _ =>
NodeSeq.Empty
@@ -624,29 +562,17 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
val subclasses = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
+ case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.subClasses.nonEmpty =>
<div class="toggleContainer block">
<span class="toggle">Known Subclasses</span>
<div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.allSubClasses.sortBy(_.name), xml.Text(", "))
+ templatesToHtml(dtpl.subClasses.sortBy(_.name), xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
}
- val typeHierarchy = if (s.docDiagrams.isSetByUser) mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.inheritanceDiagram.isDefined =>
- makeDiagramHtml(dtpl, dtpl.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
- case _ => NodeSeq.Empty
- } else NodeSeq.Empty // diagrams not generated
-
- val contentHierarchy = if (s.docDiagrams.isSetByUser) mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.contentDiagram.isDefined =>
- makeDiagramHtml(dtpl, dtpl.contentDiagram, "Content Hierarchy", "content-diagram")
- case _ => NodeSeq.Empty
- } else NodeSeq.Empty // diagrams not generated
-
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
+ memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses
}
def kindToString(mbr: MemberEntity): String = {
@@ -679,13 +605,13 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
Some(Paragraph(CText("private")))
case PrivateInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("private["), EntityLink(owner.qualifiedName, () => Some(owner)), CText("]")))))
+ Some(Paragraph(Chain(List(CText("private["), EntityLink(owner), CText("]")))))
case ProtectedInInstance() =>
Some(Paragraph(CText("protected[this]")))
case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
Some(Paragraph(CText("protected")))
case ProtectedInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner.qualifiedName, () => Some(owner)), CText("]")))))
+ Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner), CText("]")))))
case Public() =>
None
}
@@ -701,15 +627,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</span>
<span class="symbol">
{
- val nameClass =
- if (Template.isImplicit(mbr))
- if (Template.isShadowedOrAmbiguousImplicit(mbr))
- "implicit shadowed"
- else
- "implicit"
- else
- "name"
-
+ val nameClass = if (mbr.byConversion.isDefined) "implicit" else "name"
val nameHtml = {
val value = if (mbr.isConstructor) tpl.name else mbr.name
val span = if (mbr.deprecation.isDefined)
@@ -781,8 +699,8 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
}{ if (isReduced) NodeSeq.Empty else {
mbr match {
- case tpl: DocTemplateEntity if !tpl.parentTypes.isEmpty =>
- <span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
+ case tpl: DocTemplateEntity if tpl.parentType.isDefined =>
+ <span class="result"> extends { typeToHtml(tpl.parentType.get, hasLinks) }</span>
case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
<span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
@@ -953,43 +871,4 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
typeToHtml(ub.upperBound, true) ++ xml.Text(")")
}
- def makeDiagramHtml(tpl: DocTemplateEntity, diagram: Option[Diagram], description: String, id: String) = {
- val s = universe.settings
- val diagramSvg = Template.diagramGenerator(s).generate(diagram.get, tpl, this)
- if (diagramSvg != NodeSeq.Empty) {
- <div class="toggleContainer block diagram-container" id={ id + "-container"}>
- <span class="toggle diagram-link">{ description }</span>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
- <div class="diagram" id={ id }>{
- diagramSvg
- }</div>
- </div>
- } else NodeSeq.Empty
- }
-}
-
-object Template {
-
- private var _diagramGenerator: DiagramGenerator = null
- /**
- * Get a diagram generator instance based on the given settings.
- * NOTE: This function does not take into account changing settings, so the first setting will initialize the diagram
- * generator and the other settings will be ignored!
- */
- def diagramGenerator(settings: doc.Settings): DiagramGenerator = {
- if (_diagramGenerator == null) {
- // for now, the diagram generator is hard-coded to use GraphViz, in the future we should use a more general
- // approach. Also the DiagramGenerator should be stateless and a single instance should be used to generate all
- // the diagrams
- _diagramGenerator = new DotDiagramGenerator(settings)
- }
- _diagramGenerator
- }
-
- def isImplicit(mbr: MemberEntity) = mbr.byConversion.isDefined
- def isShadowedImplicit(mbr: MemberEntity) = mbr.byConversion.isDefined && mbr.byConversion.get.isShadowed
- def isAmbiguousImplicit(mbr: MemberEntity) = mbr.byConversion.isDefined && mbr.byConversion.get.isAmbiguous
- def isShadowedOrAmbiguousImplicit(mbr: MemberEntity) = isShadowedImplicit(mbr) || isAmbiguousImplicit(mbr)
-
- def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
deleted file mode 100644
index 61c1819d11..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import scala.xml.NodeSeq
-import scala.tools.nsc.doc.html.HtmlPage
-import scala.tools.nsc.doc.model.diagram.Diagram
-import scala.tools.nsc.doc.model.DocTemplateEntity
-
-trait DiagramGenerator {
-
- /**
- * Generates a visualization of the internal representation
- * of a diagram.
- *
- * @param d The model of the diagram
- * @param p The page the diagram will be embedded in (needed for link generation)
- * @return The HTML to be embedded in the Scaladoc page
- */
- def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage):NodeSeq
-}
-
-object DiagramGenerator {
-
- // TODO: This is tailored towards the dot generator, since it's the only generator. In the future it should be more
- // general.
-
- private[this] var dotRunner: DotRunner = null
- private[this] var settings: doc.Settings = null
-
- def initialize(s: doc.Settings) =
- settings = s
-
- def getDotRunner() = {
- if (dotRunner == null)
- dotRunner = new DotRunner(settings)
- dotRunner
- }
-
- def cleanup() = {
- DiagramStats.printStats(settings)
- if (dotRunner != null) {
- dotRunner.cleanup()
- dotRunner = null
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
deleted file mode 100644
index 16d859894f..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * @author Vlad Ureche
- */
-package scala.tools.nsc.doc
-package html.page.diagram
-
-object DiagramStats {
-
- class TimeTracker(title: String) {
- var totalTime: Long = 0l
- var maxTime: Long = 0l
- var instances: Int = 0
-
- def addTime(ms: Long) = {
- if (maxTime < ms)
- maxTime = ms
- totalTime += ms
- instances += 1
- }
-
- def printStats(print: String => Unit) = {
- if (instances == 0)
- print(title + ": no stats gathered")
- else {
- print(" " + title)
- print(" " + "=" * title.length)
- print(" count: " + instances + " items")
- print(" total time: " + totalTime + " ms")
- print(" average time: " + (totalTime/instances) + " ms")
- print(" maximum time: " + maxTime + " ms")
- print("")
- }
- }
- }
-
- private[this] val filterTrack = new TimeTracker("diagrams model filtering")
- private[this] val modelTrack = new TimeTracker("diagrams model generation")
- private[this] val dotGenTrack = new TimeTracker("dot diagram generation")
- private[this] val dotRunTrack = new TimeTracker("dot process runnning")
- private[this] val svgTrack = new TimeTracker("svg processing")
-
- def printStats(settings: Settings) = {
- if (settings.docDiagramsDebug.value) {
- settings.printMsg("\nDiagram generation running time breakdown:\n")
- filterTrack.printStats(settings.printMsg)
- modelTrack.printStats(settings.printMsg)
- dotGenTrack.printStats(settings.printMsg)
- dotRunTrack.printStats(settings.printMsg)
- svgTrack.printStats(settings.printMsg)
- }
- }
-
- def addFilterTime(ms: Long) = filterTrack.addTime(ms)
- def addModelTime(ms: Long) = modelTrack.addTime(ms)
- def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms)
- def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms)
- def addSvgTime(ms: Long) = svgTrack.addTime(ms)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
deleted file mode 100644
index 4291fc6810..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ /dev/null
@@ -1,464 +0,0 @@
-/**
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
-import scala.collection.immutable._
-import javax.xml.parsers.SAXParser
-import model._
-import model.diagram._
-
-class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
-
- // the page where the diagram will be embedded
- private var page: HtmlPage = null
- // path to the "lib" folder relative to the page
- private var pathToLib: String = null
- // maps nodes to unique indices
- private var node2Index: Map[Node, Int] = null
- // maps an index to its corresponding node
- private var index2Node: Map[Int, Node] = null
- // true if the current diagram is a class diagram
- private var isClassDiagram = false
- // incoming implicit nodes (needed for determining the CSS class of a node)
- private var incomingImplicitNodes: List[Node] = List()
- // the suffix used when there are two many classes to show
- private final val MultiSuffix = " classes/traits"
- // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams)
- private var counter = 0
-
- def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
- counter = counter + 1;
- this.page = page
- pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
- val dot = generateDot(diagram)
- generateSVG(dot, template)
- }
-
- /**
- * Generates a dot string for a given diagram.
- */
- private def generateDot(d: Diagram) = {
- // inheritance nodes (all nodes except thisNode and implicit nodes)
- var nodes: List[Node] = null
- // inheritance edges (all edges except implicit edges)
- var edges: List[(Node, List[Node])] = null
-
- // timing
- var tDot = -System.currentTimeMillis
-
- // variables specific to class diagrams:
- // current node of a class diagram
- var thisNode:Node = null
- var subClasses = List[Node]()
- var superClasses = List[Node]()
- var incomingImplicits = List[Node]()
- var outgoingImplicits = List[Node]()
- var subClassesTooltip: Option[String] = None
- var superClassesTooltip: Option[String] = None
- var incomingImplicitsTooltip: Option[String] = None
- var outgoingImplicitsTooltip: Option[String] = None
- isClassDiagram = false
-
- d match {
- case ClassDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) =>
-
- def textTypeEntity(text: String) =
- new TypeEntity {
- val name = text
- def refEntity: SortedMap[Int, (TemplateEntity, Int)] = SortedMap()
- }
-
- // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string
- // conservatively, we'll limit at 4000, to be sure:
- def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str
-
- // avoid overcrowding the diagram:
- // if there are too many super / sub / implicit nodes, represent
- // them by on node with a corresponding tooltip
- superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) {
- superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", ")))
- List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None))
- } else _superClasses
-
- subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) {
- subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", ")))
- List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None))
- } else _subClasses
-
- incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
- incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", ")))
- List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None))
- } else _incomingImplicits
-
- outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
- outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", ")))
- List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None))
- } else _outgoingImplicits
-
- thisNode = _thisNode
- nodes = List()
- edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode))
- node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap
- isClassDiagram = true
- incomingImplicitNodes = incomingImplicits
- case _ =>
- nodes = d.nodes
- edges = d.edges
- node2Index = d.nodes.zipWithIndex.toMap
- incomingImplicitNodes = List()
- }
- index2Node = node2Index map {_.swap}
-
- val implicitsDot = {
- if (!isClassDiagram) ""
- else {
- // dot cluster containing thisNode
- val thisCluster = "subgraph clusterThis {\n" +
- "style=\"invis\"\n" +
- node2Dot(thisNode, None) +
- "}"
- // dot cluster containing incoming implicit nodes, if any
- val incomingCluster = {
- if(incomingImplicits.isEmpty) ""
- else "subgraph clusterIncoming {\n" +
- "style=\"invis\"\n" +
- incomingImplicits.reverse.map(n => node2Dot(n, incomingImplicitsTooltip)).mkString +
- (if (incomingImplicits.size > 1)
- incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
- " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
- else "") +
- "}"
- }
- // dot cluster containing outgoing implicit nodes, if any
- val outgoingCluster = {
- if(outgoingImplicits.isEmpty) ""
- else "subgraph clusterOutgoing {\n" +
- "style=\"invis\"\n" +
- outgoingImplicits.reverse.map(n => node2Dot(n, outgoingImplicitsTooltip)).mkString +
- (if (outgoingImplicits.size > 1)
- outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
- " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
- else "") +
- "}"
- }
-
- // assemble clusters into another cluster
- val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name
- val outgoingTooltip = thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ")
- "subgraph clusterAll {\n" +
- "style=\"invis\"\n" +
- outgoingCluster + "\n" +
- thisCluster + "\n" +
- incomingCluster + "\n" +
- // incoming implicit edge
- (if (!incomingImplicits.isEmpty) {
- val n = incomingImplicits.last
- "node" + node2Index(n) +" -> node" + node2Index(thisNode) +
- " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" +
- ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n"
- } else "") +
- // outgoing implicit edge
- (if (!outgoingImplicits.isEmpty) {
- val n = outgoingImplicits.head
- "node" + node2Index(thisNode) + " -> node" + node2Index(n) +
- " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" +
- ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n"
- } else "") +
- "}"
- }
- }
-
- // assemble graph
- val graph = "digraph G {\n" +
- // graph / node / edge attributes
- graphAttributesStr +
- "node [" + nodeAttributesStr + "];\n" +
- "edge [" + edgeAttributesStr + "];\n" +
- implicitsDot + "\n" +
- // inheritance nodes
- nodes.map(n => node2Dot(n, None)).mkString +
- subClasses.map(n => node2Dot(n, subClassesTooltip)).mkString +
- superClasses.map(n => node2Dot(n, superClassesTooltip)).mkString +
- // inheritance edges
- edges.map{ case (from, tos) => tos.map(to => {
- val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from)
- // the X -> Y edge is inverted twice to keep the diagram flowing the right way
- // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"]
- "node" + node2Index(to) + " -> node" + node2Index(from) +
- " [id=\"" + cssClass(to, from) + "|" + id + "\", " +
- "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") +
- to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n"
- }).mkString}.mkString +
- "}"
-
- tDot += System.currentTimeMillis
- DiagramStats.addDotGenerationTime(tDot)
-
- graph
- }
-
- /**
- * Generates the dot string of a given node.
- */
- private def node2Dot(node: Node, tooltip: Option[String]) = {
-
- // escape HTML characters in node names
- def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;");
-
- // assemble node attribues in a map
- var attr = scala.collection.mutable.Map[String, String]()
-
- // link
- node.doctpl match {
- case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram")
- case _ =>
- }
-
- // tooltip
- tooltip match {
- case Some(text) => attr += "tooltip" -> text
- // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A])
- case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName
- case _ =>
- }
-
- // styles
- if(node.isImplicitNode)
- attr ++= implicitStyle
- else if(node.isOutsideNode)
- attr ++= outsideStyle
- else if(node.isTraitNode)
- attr ++= traitStyle
- else if(node.isClassNode)
- attr ++= classStyle
- else if(node.isObjectNode)
- attr ++= objectStyle
- else
- attr ++= defaultStyle
-
- // HTML label
- var name = escape(node.name)
- var img = ""
- if(node.isTraitNode)
- img = "trait_diagram.png"
- else if(node.isClassNode)
- img = "class_diagram.png"
- else if(node.isObjectNode)
- img = "object_diagram.png"
-
- if(!img.equals("")) {
- img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
- name = name + " "
- }
- val label = "<<TABLE BORDER=\"0\" CELLBORDER=\"0\">" +
- "<TR>" + img + "<TD VALIGN=\"MIDDLE\">" + name + "</TD></TR>" +
- "</TABLE>>"
-
- // dot does not allow to specify a CSS class, therefore
- // set the id to "{class}|{id}", which will be used in
- // the transform method
- val id = "graph" + counter + "_" + node2Index(node)
- attr += ("id" -> (cssClass(node) + "|" + id))
-
- // return dot string
- "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n"
- }
-
- /**
- * Returns the CSS class for an edge connecting node1 and node2.
- */
- private def cssClass(node1: Node, node2: Node): String = {
- if (node1.isImplicitNode && node2.isThisNode)
- "implicit-incoming"
- else if (node1.isThisNode && node2.isImplicitNode)
- "implicit-outgoing"
- else
- "inheritance"
- }
-
- /**
- * Returns the CSS class for a node.
- */
- private def cssClass(node: Node): String =
- if (node.isImplicitNode && incomingImplicitNodes.contains(node))
- "implicit-incoming"
- else if (node.isImplicitNode)
- "implicit-outgoing"
- else if (node.isObjectNode)
- "object"
- else if (node.isThisNode)
- "this" + cssBaseClass(node, "")
- else if (node.isOutsideNode)
- "outside" + cssBaseClass(node, "")
- else
- cssBaseClass(node, "default")
-
- private def cssBaseClass(node: Node, default: String) =
- if (node.isClassNode)
- " class"
- else if (node.isTraitNode)
- " trait"
- else if (node.isObjectNode)
- " trait"
- else
- default
-
- /**
- * Calls dot with a given dot string and returns the SVG output.
- */
- private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
- val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
- var tSVG = -System.currentTimeMillis
-
- val result = if (dotOutput != null) {
- val src = scala.io.Source.fromString(dotOutput);
- try {
- val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
- val doc = cpa.document()
- if (doc != null)
- transform(doc.docElem)
- else
- NodeSeq.Empty
- } catch {
- case exc =>
- if (settings.docDiagramsDebug.value) {
- settings.printMsg("\n\n**********************************************************************")
- settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
- settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
- settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t",""))
- settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ",""))
- settings.printMsg("\n\n**********************************************************************")
- } else {
- settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.")
- settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.")
- }
- NodeSeq.Empty
- }
- } else
- NodeSeq.Empty
-
- tSVG += System.currentTimeMillis
- DiagramStats.addSvgTime(tSVG)
-
- result
- }
-
- /**
- * Transforms the SVG generated by dot:
- * - adds a class attribute to the SVG element
- * - changes the path of the node images from absolute to relative
- * - assigns id and class attributes to nodes and edges
- * - removes title elements
- */
- private def transform(e:scala.xml.Node): scala.xml.Node = e match {
- // add an id and class attribute to the SVG element
- case Elem(prefix, "svg", attribs, scope, child @ _*) => {
- val klass = if (isClassDiagram) "class-diagram" else "package-diagram"
- Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
- new UnprefixedAttribute("id", "graph" + counter, Null) %
- new UnprefixedAttribute("class", klass, Null)
- }
- // change the path of the node images from absolute to relative
- case img @  => {
- val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString
- val file = href.substring(href.lastIndexOf("/") + 1, href.size)
- img.asInstanceOf[Elem] %
- new PrefixedAttribute("xlink", "href", pathToLib + file, Null)
- }
- // assign id and class attributes to edges and nodes:
- // the id attribute generated by dot has the format: "{class}|{id}"
- case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
- val res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
- val dotId = (g \ "@id").toString
- if (dotId.count(_ == '|') == 1) {
- val Array(klass, id) = dotId.toString.split("\\|")
- res % new UnprefixedAttribute("id", id, Null) %
- new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null)
- }
- else res
- }
- // remove titles
- case <title>{ _* }</title> =>
- scala.xml.Text("")
- // apply recursively
- case Elem(prefix, label, attribs, scope, child @ _*) =>
- Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
- case x => x
- }
-
- /* graph / node / edge attributes */
-
- private val graphAttributes: Map[String, String] = Map(
- "compound" -> "true",
- "rankdir" -> "TB"
- )
-
- private val nodeAttributes = Map(
- "shape" -> "rectangle",
- "style" -> "filled",
- "penwidth" -> "1",
- "margin" -> "0.08,0.01",
- "width" -> "0.0",
- "height" -> "0.0",
- "fontname" -> "Arial",
- "fontsize" -> "10.00"
- )
-
- private val edgeAttributes = Map(
- "color" -> "#d4d4d4",
- "arrowsize" -> "0.5",
- "fontcolor" -> "#aaaaaa",
- "fontsize" -> "10.00",
- "fontname" -> "Arial"
- )
-
- private val defaultStyle = Map(
- "color" -> "#ababab",
- "fillcolor" -> "#e1e1e1",
- "fontcolor" -> "#7d7d7d",
- "margin" -> "0.1,0.04"
- )
-
- private val implicitStyle = Map(
- "color" -> "#ababab",
- "fillcolor" -> "#e1e1e1",
- "fontcolor" -> "#7d7d7d"
- )
-
- private val outsideStyle = Map(
- "color" -> "#ababab",
- "fillcolor" -> "#e1e1e1",
- "fontcolor" -> "#7d7d7d"
- )
-
- private val traitStyle = Map(
- "color" -> "#37657D",
- "fillcolor" -> "#498AAD",
- "fontcolor" -> "#ffffff"
- )
-
- private val classStyle = Map(
- "color" -> "#115F3B",
- "fillcolor" -> "#0A955B",
- "fontcolor" -> "#ffffff"
- )
-
- private val objectStyle = Map(
- "color" -> "#102966",
- "fillcolor" -> "#3556a7",
- "fontcolor" -> "#ffffff"
- )
-
- private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ")
-
- private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
- private val nodeAttributesStr = flatten(nodeAttributes)
- private val edgeAttributesStr = flatten(edgeAttributes)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
deleted file mode 100644
index 37600fa908..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ /dev/null
@@ -1,227 +0,0 @@
-package scala.tools.nsc
-package doc
-package html
-package page
-package diagram
-
-import java.io.InputStream
-import java.io.OutputStream
-import java.io.InputStreamReader
-import java.io.OutputStreamWriter
-import java.io.BufferedWriter
-import java.io.BufferedReader
-import java.io.IOException
-import scala.sys.process._
-import scala.concurrent.SyncVar
-
-import model._
-import model.diagram._
-
-/** This class takes care of running the graphviz dot utility */
-class DotRunner(settings: doc.Settings) {
-
- private[this] var dotRestarts = 0
- private[this] var dotProcess: DotProcess = null
-
- def feedToDot(dotInput: String, template: DocTemplateEntity): String = {
-
- if (dotProcess == null) {
- if (dotRestarts < settings.docDiagramsDotRestart.value) {
- if (dotRestarts != 0)
- settings.printMsg("A new graphviz dot process will be created...\n")
- dotRestarts += 1
- dotProcess = new DotProcess(settings)
- } else
- return null
- }
-
- val tStart = System.currentTimeMillis
- val result = dotProcess.feedToDot(dotInput, template.qualifiedName)
- val tFinish = System.currentTimeMillis
- DiagramStats.addDotRunningTime(tFinish - tStart)
-
- if (result == null) {
- dotProcess.cleanup()
- dotProcess = null
- if (dotRestarts == settings.docDiagramsDotRestart.value) {
- settings.printMsg("\n")
- settings.printMsg("**********************************************************************")
- settings.printMsg("Diagrams will be disabled for this run beucause the graphviz dot tool")
- settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:")
- settings.printMsg("")
- val baseList = List(settings.docDiagramsDebug,
- settings.docDiagramsDotPath,
- settings.docDiagramsDotRestart,
- settings.docDiagramsDotTimeout)
- val width = (baseList map (_.helpSyntax.length)).max
- def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + " " + s.helpDescription
- baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett)))
- settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.")
- settings.printMsg("**********************************************************************\n\n")
-
- }
- }
-
- result
- }
-
- def cleanup() =
- if (dotProcess != null)
- dotProcess.cleanup()
-}
-
-class DotProcess(settings: doc.Settings) {
-
- @volatile var error: Boolean = false // signal an error
- val inputString = new SyncVar[String] // used for the dot process input
- val outputString = new SyncVar[String] // used for the dot process output
- val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging
-
- // set in only one place, in the main thread
- var process: Process = null
- var templateName: String = ""
- var templateInput: String = ""
-
- def feedToDot(input: String, template: String): String = {
-
- templateName = template
- templateInput = input
-
- try {
-
- // process creation
- if (process == null) {
- val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_))
- val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg")
- process = processBuilder.run(procIO)
- }
-
- // pass the input and wait for the output
- assert(!inputString.isSet)
- assert(!outputString.isSet)
- inputString.put(input)
- var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
- if (error) result = null
-
- result
-
- } catch {
- case exc =>
- errorBuffer.append(" Main thread in " + templateName + ": " +
- (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc))
- error = true
- return null
- }
- }
-
- def cleanup(): Unit = {
-
- // we'll need to know if there was any error for reporting
- val _error = error
-
- if (process != null) {
- // if there's no error, this should exit cleanly
- if (!error) feedToDot("<finish>", "<finishing>")
-
- // just in case there's any thread hanging, this will take it out of the loop
- error = true
- process.destroy()
- // we'll need to unblock the input again
- if (!inputString.isSet) inputString.put("")
- if (outputString.isSet) outputString.take()
- }
-
- if (_error) {
- if (settings.docDiagramsDebug.value) {
- settings.printMsg("\n**********************************************************************")
- settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.")
- settings.printMsg("\nThe following is the log of the failure:")
- settings.printMsg(errorBuffer.toString)
- settings.printMsg(" Cleanup: Last template: " + templateName)
- settings.printMsg(" Cleanup: Last dot input: \n " + templateInput.replaceAll("\n","\n ") + "\n")
- settings.printMsg(" Cleanup: Dot path: " + settings.docDiagramsDotPath.value)
- if (process != null)
- settings.printMsg(" Cleanup: Dot exit code: " + process.exitValue)
- settings.printMsg("**********************************************************************")
- } else {
- // we shouldn't just sit there for 50s not reporting anything, no?
- settings.printMsg("Graphviz dot encountered an error when generating the diagram for")
- settings.printMsg(templateName + ". Use the " + settings.docDiagramsDebug.name + " flag")
- settings.printMsg("for more information.")
- }
- }
- }
-
- /* The standard input passing function */
- private[this] def inputFn(stdin: OutputStream): Unit = {
- val writer = new BufferedWriter(new OutputStreamWriter(stdin))
- try {
- var input = inputString.take()
-
- while (!error) {
- if (input == "<finish>") {
- // empty => signal to finish
- stdin.close()
- return
- } else {
- // send output to dot
- writer.write(input + "\n\n")
- writer.flush()
- }
-
- if (!error) input = inputString.take()
- }
- stdin.close()
- } catch {
- case exc =>
- error = true
- stdin.close()
- errorBuffer.append(" Input thread in " + templateName + ": Exception: " + exc + "\n")
- }
- }
-
- private[this] def outputFn(stdOut: InputStream): Unit = {
- val reader = new BufferedReader(new InputStreamReader(stdOut))
- var buffer: StringBuilder = new StringBuilder()
- try {
- var line = reader.readLine
- while (!error && line != null) {
- buffer.append(line + "\n")
- // signal the last element in the svg (only for output)
- if (line == "</svg>") {
- outputString.put(buffer.toString)
- buffer.setLength(0)
- }
- if (error) { stdOut.close(); return }
- line = reader.readLine
- }
- assert(!outputString.isSet)
- outputString.put(buffer.toString)
- stdOut.close()
- } catch {
- case exc =>
- error = true
- stdOut.close()
- errorBuffer.append(" Output thread in " + templateName + ": Exception: " + exc + "\n")
- }
- }
-
- private[this] def errorFn(stdErr: InputStream): Unit = {
- val reader = new BufferedReader(new InputStreamReader(stdErr))
- var buffer: StringBuilder = new StringBuilder()
- try {
- var line = reader.readLine
- while (line != null) {
- errorBuffer.append(" DOT <error console>: " + line + "\n")
- error = true
- line = reader.readLine
- }
- stdErr.close()
- } catch {
- case exc =>
- error = true
- stdErr.close()
- errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n")
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
deleted file mode 100644
index 9d7aec792b..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
deleted file mode 100644
index 04d29580b7..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
+++ /dev/null
@@ -1,135 +0,0 @@
-.diagram-container
-{
- display: none;
-}
-
-.diagram
-{
- overflow: hidden;
- padding-top:15px;
-}
-
-.diagram svg
-{
- display: block;
- position: absolute;
- visibility: hidden;
- margin: auto;
-}
-
-.diagram-help
-{
- float:right;
- display:none;
-}
-
-.magnifying
-{
- cursor: -webkit-zoom-in ! important;
- cursor: -moz-zoom-in ! important;
- cursor: pointer;
-}
-
-#close-link
-{
- position: absolute;
- z-index: 100;
- font-family: Arial, sans-serif;
- font-size: 10pt;
- text-decoration: underline;
- color: #315479;
-}
-
-#close:hover
-{
- text-decoration: none;
-}
-
-svg a
-{
- cursor:pointer;
-}
-
-svg text
-{
- font-size: 10px;
-}
-
-/* try to move the node text 1px in order to be vertically
- centered (does not work in all browsers) */
-svg .node text
-{
- transform: translate(0px,1px);
- -ms-transform: translate(0px,1px);
- -webkit-transform: translate(0px,1px);
- -o-transform: translate(0px,1px);
- -moz-transform: translate(0px,1px);
-}
-
-/* hover effect for edges */
-
-svg .edge.over text,
-svg .edge.implicit-incoming.over polygon,
-svg .edge.implicit-outgoing.over polygon
-{
- fill: #202020;
-}
-
-svg .edge.over path,
-svg .edge.over polygon
-{
- stroke: #202020;
-}
-
-/* hover effect for nodes in class diagrams */
-
-svg.class-diagram .node
-{
- opacity: 0.75;
-}
-
-svg.class-diagram .node.this
-{
- opacity: 1.0;
-}
-
-svg.class-diagram .node.over
-{
- opacity: 1.0;
-}
-
-svg .node.over polygon
-{
- stroke: #202020;
-}
-
-/* hover effect for nodes in package diagrams */
-
-svg.package-diagram .node.class.over polygon,
-svg.class-diagram .node.this.class.over polygon
-{
- fill: #098552;
- fill: #04663e;
-}
-
-svg.package-diagram .node.trait.over polygon,
-svg.class-diagram .node.this.trait.over polygon
-{
- fill: #3c7b9b;
- fill: #235d7b;
-}
-
-svg.package-diagram .node.object.over polygon
-{
- fill: #183377;
-}
-
-svg.package-diagram .node.outside.over polygon
-{
- fill: #d4d4d4;
-}
-
-svg.package-diagram .node.default.over polygon
-{
- fill: #d4d4d4;
-}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
deleted file mode 100644
index 478f2e38ac..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ /dev/null
@@ -1,324 +0,0 @@
-/**
- * JavaScript functions enhancing the SVG diagrams.
- *
- * @author Damien Obrist
- */
-
-var diagrams = {};
-
-/**
- * Initializes the diagrams in the main window.
- */
-$(document).ready(function()
-{
- // hide diagrams in browsers not supporting SVG
- if(Modernizr && !Modernizr.inlinesvg)
- return;
-
- // only execute this in the main window
- if(diagrams.isPopup)
- return;
-
- if($("#content-diagram").length)
- $("#inheritance-diagram").css("padding-bottom", "20px");
-
- $(".diagram-container").css("display", "block");
-
- $(".diagram").each(function() {
- // store inital dimensions
- $(this).data("width", $("svg", $(this)).width());
- $(this).data("height", $("svg", $(this)).height());
- // store unscaled clone of SVG element
- $(this).data("svg", $(this).get(0).childNodes[0].cloneNode(true));
- });
-
- // make diagram visible, hide container
- $(".diagram").css("display", "none");
- $(".diagram svg").css({
- "position": "static",
- "visibility": "visible",
- "z-index": "auto"
- });
-
- // enable linking to diagrams
- if($(location).attr("hash") == "#inheritance-diagram") {
- diagrams.toggle($("#inheritance-diagram-container"), true);
- } else if($(location).attr("hash") == "#content-diagram") {
- diagrams.toggle($("#content-diagram-container"), true);
- }
-
- $(".diagram-link").click(function() {
- diagrams.toggle($(this).parent());
- });
-
- // register resize function
- $(window).resize(diagrams.resize);
-
- // don't bubble event to parent div
- // when clicking on a node of a resized
- // diagram
- $("svg a").click(function(e) {
- e.stopPropagation();
- });
-
- diagrams.initHighlighting();
-});
-
-/**
- * Initializes the diagrams in the popup.
- */
-diagrams.initPopup = function(id)
-{
- // copy diagram from main window
- if(!jQuery.browser.msie)
- $("body").append(opener.$("#" + id).data("svg"));
-
- // positioning
- $("svg").css("position", "absolute");
- $(window).resize(function()
- {
- var svg_w = $("svg").css("width").replace("px", "");
- var svg_h = $("svg").css("height").replace("px", "");
- var x = $(window).width() / 2 - svg_w / 2;
- if(x < 0) x = 0;
- var y = $(window).height() / 2 - svg_h / 2;
- if(y < 0) y = 0;
- $("svg").css("left", x + "px");
- $("svg").css("top", y + "px");
- });
- $(window).resize();
-
- diagrams.initHighlighting();
- $("svg a").click(function(e) {
- opener.diagrams.redirectFromPopup(this.href.baseVal);
- window.close();
- });
- $(document).keyup(function(e) {
- if (e.keyCode == 27) window.close();
- });
-}
-
-/**
- * Initializes highlighting for nodes and edges.
- */
-diagrams.initHighlighting = function()
-{
- // helper function since $.hover doesn't work in IE
-
- function hover(elements, fn)
- {
- elements.mouseover(fn);
- elements.mouseout(fn);
- }
-
- // inheritance edges
-
- hover($("svg .edge.inheritance"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- var parts = $(this).attr("id").split("_");
- toggleClass($("#" + parts[0] + "_" + parts[1]));
- toggleClass($("#" + parts[0] + "_" + parts[2]));
- toggleClass($(this));
- });
-
- // nodes
-
- hover($("svg .node"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- var parts = $(this).attr("id").split("_");
- var index = parts[1];
- $("svg#" + parts[0] + " .edge.inheritance").each(function(){
- var parts2 = $(this).attr("id").split("_");
- if(parts2[1] == index)
- {
- toggleClass($("#" + parts2[0] + "_" + parts2[2]));
- toggleClass($(this));
- } else if(parts2[2] == index)
- {
- toggleClass($("#" + parts2[0] + "_" + parts2[1]));
- toggleClass($(this));
- }
- });
- });
-
- // incoming implicits
-
- hover($("svg .node.implicit-incoming"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .edge.implicit-incoming"));
- toggleClass($("svg .node.this"));
- });
-
- hover($("svg .edge.implicit-incoming"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .node.this"));
- $("svg .node.implicit-incoming").each(function(){
- toggleClass($(this));
- });
- });
-
- // implicit outgoing nodes
-
- hover($("svg .node.implicit-outgoing"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .edge.implicit-outgoing"));
- toggleClass($("svg .node.this"));
- });
-
- hover($("svg .edge.implicit-outgoing"), function(evt){
- var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
- toggleClass($(this));
- toggleClass($("svg .node.this"));
- $("svg .node.implicit-outgoing").each(function(){
- toggleClass($(this));
- });
- });
-};
-
-/**
- * Resizes the diagrams according to the available width.
- */
-diagrams.resize = function()
-{
- // available width
- var availableWidth = $("body").width() - 20;
-
- $(".diagram-container").each(function() {
- // unregister click event on whole div
- $(".diagram", this).unbind("click");
- var diagramWidth = $(".diagram", this).data("width");
- var diagramHeight = $(".diagram", this).data("height");
-
- if(diagramWidth > availableWidth)
- {
- // resize diagram
- var height = diagramHeight / diagramWidth * availableWidth;
- $(".diagram svg", this).width(availableWidth);
- $(".diagram svg", this).height(height);
-
- // register click event on whole div
- $(".diagram", this).click(function() {
- diagrams.popup($(this));
- });
- $(".diagram", this).addClass("magnifying");
- }
- else
- {
- // restore full size of diagram
- $(".diagram svg", this).width(diagramWidth);
- $(".diagram svg", this).height(diagramHeight);
- // don't show custom cursor any more
- $(".diagram", this).removeClass("magnifying");
- }
- });
-};
-
-/**
- * Shows or hides a diagram depending on its current state.
- */
-diagrams.toggle = function(container, dontAnimate)
-{
- // change class of link
- $(".diagram-link", container).toggleClass("open");
- // get element to show / hide
- var div = $(".diagram", container);
- if (div.is(':visible'))
- {
- $(".diagram-help", container).hide();
- div.unbind("click");
- div.removeClass("magnifying");
- div.slideUp(100);
- }
- else
- {
- diagrams.resize();
- if(dontAnimate)
- div.show();
- else
- div.slideDown(100);
- $(".diagram-help", container).show();
- }
-};
-
-/**
- * Opens a popup containing a copy of a diagram.
- */
-diagrams.windows = {};
-diagrams.popup = function(diagram)
-{
- var id = diagram.attr("id");
- if(!diagrams.windows[id] || diagrams.windows[id].closed) {
- var title = $(".symbol .name", $("#signature")).text();
- // cloning from parent window to popup somehow doesn't work in IE
- // therefore include the SVG as a string into the HTML
- var svgIE = jQuery.browser.msie ? $("<div />").append(diagram.data("svg")).html() : "";
- var html = '' +
- '<?xml version="1.0" encoding="UTF-8"?>\n' +
- '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' +
- '<html>\n' +
- ' <head>\n' +
- ' <title>' + title + '</title>\n' +
- ' <link href="' + $("#diagrams-css").attr("href") + '" media="screen" type="text/css" rel="stylesheet" />\n' +
- ' <script type="text/javascript" src="' + $("#jquery-js").attr("src") + '"></script>\n' +
- ' <script type="text/javascript" src="' + $("#diagrams-js").attr("src") + '"></script>\n' +
- ' <script type="text/javascript">\n' +
- ' diagrams.isPopup = true;\n' +
- ' </script>\n' +
- ' </head>\n' +
- ' <body onload="diagrams.initPopup(\'' + id + '\');">\n' +
- ' <a href="#" onclick="window.close();" id="close-link">Close this window</a>\n' +
- ' ' + svgIE + '\n' +
- ' </body>\n' +
- '</html>';
-
- var padding = 30;
- var screenHeight = screen.availHeight;
- var screenWidth = screen.availWidth;
- var w = Math.min(screenWidth, diagram.data("width") + 2 * padding);
- var h = Math.min(screenHeight, diagram.data("height") + 2 * padding);
- var left = (screenWidth - w) / 2;
- var top = (screenHeight - h) / 2;
- var parameters = "height=" + h + ", width=" + w + ", left=" + left + ", top=" + top + ", scrollbars=yes, location=no, resizable=yes";
- var win = window.open("about:blank", "_blank", parameters);
- win.document.open();
- win.document.write(html);
- win.document.close();
- diagrams.windows[id] = win;
- }
- win.focus();
-};
-
-/**
- * This method is called from within the popup when a node is clicked.
- */
-diagrams.redirectFromPopup = function(url)
-{
- window.location = url;
-};
-
-/**
- * Helper method that adds a class to a SVG element.
- */
-diagrams.addClass = function(svgElem, newClass) {
- newClass = newClass || "over";
- var classes = svgElem.attr("class");
- if ($.inArray(newClass, classes.split(/\s+/)) == -1) {
- classes += (classes ? ' ' : '') + newClass;
- svgElem.attr("class", classes);
- }
-};
-
-/**
- * Helper method that removes a class from a SVG element.
- */
-diagrams.removeClass = function(svgElem, oldClass) {
- oldClass = oldClass || "over";
- var classes = svgElem.attr("class");
- classes = $.grep(classes.split(/\s+/), function(n, i) { return n != oldClass; }).join(' ');
- svgElem.attr("class", classes);
-};
-
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
deleted file mode 100644
index 4688d633fe..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
+++ /dev/null
@@ -1,4 +0,0 @@
-/* Modernizr 2.5.3 (Custom Build) | MIT & BSD
- * Build: http://www.modernizr.com/download/#-inlinesvg
- */
-;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnProperty,t;!w(s,"undefined")&&!w(s.call,"undefined")?t=function(a,b){return s.call(a,b)}:t=function(a,b){return b in a&&w(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=q.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(q.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(q.call(arguments)))};return e}),m.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="<svg/>",(a.firstChild&&a.firstChild.namespaceURI)==l.svg};for(var z in m)t(m,z)&&(r=z.toLowerCase(),e[r]=m[z](),p.push((e[r]?"":"no-")+r));return u(""),h=j=null,e._version=d,e}(this,this.document); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
deleted file mode 100644
index 6e9f2f743f..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
deleted file mode 100644
index d30dbad858..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// ┌────────────────────────────────────────────────────────────────────┐ \\
-// │ Raphaël 2.1.0 - JavaScript Vector Library │ \\
-// ├────────────────────────────────────────────────────────────────────┤ \\
-// │ Copyright © 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) │ \\
-// │ Copyright © 2008-2012 Sencha Labs (http://sencha.com) │ \\
-// ├────────────────────────────────────────────────────────────────────┤ \\
-// │ Licensed under the MIT (http://raphaeljs.com/license.html) license.│ \\
-// └────────────────────────────────────────────────────────────────────┘ \\
-
-(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;t<u;t++)"zIndex"in f[t]&&(o.push(f[t].zIndex),f[t].zIndex<0&&(p[f[t].zIndex]=f[t]));o.sort(g);while(o[l]<0){n=p[o[l++]],q.push(n.apply(b,e));if(i){i=d;return q}}for(t=0;t<u;t++){n=f[t];if("zIndex"in n)if(n.zIndex==o[l]){q.push(n.apply(b,e));if(i)break;do{l++,n=p[o[l]],n&&q.push(n.apply(b,e));if(i)break}while(n)}else p[n.zIndex]=n;else{q.push(n.apply(b,e));if(i)break}}i=d,h=r;return q.length?q:null};k.listeners=function(a){var b=a.split(d),c=j,f,g,h,i,k,l,m,n,o=[c],p=[];for(i=0,k=b.length;i<k;i++){n=[];for(l=0,m=o.length;l<m;l++){c=o[l].n,g=[c[b[i]],c[e]],h=2;while(h--)f=g[h],f&&(n.push(f),p=p.concat(f.f||[]))}o=n}return p},k.on=function(a,b){var c=a.split(d),e=j;for(var g=0,h=c.length;g<h;g++)e=e.n,!e[c[g]]&&(e[c[g]]={n:{}}),e=e[c[g]];e.f=e.f||[];for(g=0,h=e.f.length;g<h;g++)if(e.f[g]==b)return f;e.f.push(b);return function(a){+a==+a&&(b.zIndex=+a)}},k.stop=function(){i=1},k.nt=function(a){if(a)return(new RegExp("(?:\\.|\\/|^)"+a+"(?:\\.|\\/|$)")).test(h);return h},k.off=k.unbind=function(a,b){var f=a.split(d),g,h,i,k,l,m,n,o=[j];for(k=0,l=f.length;k<l;k++)for(m=0;m<o.length;m+=i.length-2){i=[m,1],g=o[m].n;if(f[k]!=e)g[f[k]]&&i.push(g[f[k]]);else for(h in g)g[c](h)&&i.push(g[h]);o.splice.apply(o,i)}for(k=0,l=o.length;k<l;k++){g=o[k];while(g.n){if(b){if(g.f){for(m=0,n=g.f.length;m<n;m++)if(g.f[m]==b){g.f.splice(m,1);break}!g.f.length&&delete g.f}for(h in g.n)if(g.n[c](h)&&g.n[h].f){var p=g.n[h].f;for(m=0,n=p.length;m<n;m++)if(p[m]==b){p.splice(m,1);break}!p.length&&delete g.n[h].f}}else{delete g.f;for(h in g.n)g.n[c](h)&&g.n[h].f&&delete g.n[h].f}g=g.n}}},k.once=function(a,b){var c=function(){var d=b.apply(this,arguments);k.unbind(a,c);return d};return k.on(a,c)},k.version=b,k.toString=function(){return"You are running Eve "+b},typeof module!="undefined"&&module.exports?module.exports=k:typeof define!="undefined"?define("eve",[],function(){return k}):a.eve=k})(this),function(){function cF(a){for(var b=0;b<cy.length;b++)cy[b].el.paper==a&&cy.splice(b--,1)}function cE(b,d,e,f,h,i){e=Q(e);var j,k,l,m=[],o,p,q,t=b.ms,u={},v={},w={};if(f)for(y=0,z=cy.length;y<z;y++){var x=cy[y];if(x.el.id==d.id&&x.anim==b){x.percent!=e?(cy.splice(y,1),l=1):k=x,d.attr(x.totalOrigin);break}}else f=+v;for(var y=0,z=b.percents.length;y<z;y++){if(b.percents[y]==e||b.percents[y]>f*b.top){e=b.percents[y],p=b.percents[y-1]||0,t=t/b.top*(e-p),o=b.percents[y+1],j=b.anim[e];break}f&&d.attr(b.anim[b.percents[y]])}if(!!j){if(!k){for(var A in j)if(j[g](A))if(U[g](A)||d.paper.customAttributes[g](A)){u[A]=d.attr(A),u[A]==null&&(u[A]=T[A]),v[A]=j[A];switch(U[A]){case C:w[A]=(v[A]-u[A])/t;break;case"colour":u[A]=a.getRGB(u[A]);var B=a.getRGB(v[A]);w[A]={r:(B.r-u[A].r)/t,g:(B.g-u[A].g)/t,b:(B.b-u[A].b)/t};break;case"path":var D=bR(u[A],v[A]),E=D[1];u[A]=D[0],w[A]=[];for(y=0,z=u[A].length;y<z;y++){w[A][y]=[0];for(var F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(E[y][F]-u[A][y][F])/t}break;case"transform":var H=d._,I=ca(H[A],v[A]);if(I){u[A]=I.from,v[A]=I.to,w[A]=[],w[A].real=!0;for(y=0,z=u[A].length;y<z;y++){w[A][y]=[u[A][y][0]];for(F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(v[A][y][F]-u[A][y][F])/t}}else{var J=d.matrix||new cb,K={_:{transform:H.transform},getBBox:function(){return d.getBBox(1)}};u[A]=[J.a,J.b,J.c,J.d,J.e,J.f],b$(K,v[A]),v[A]=K._.transform,w[A]=[(K.matrix.a-J.a)/t,(K.matrix.b-J.b)/t,(K.matrix.c-J.c)/t,(K.matrix.d-J.d)/t,(K.matrix.e-J.e)/t,(K.matrix.f-J.f)/t]}break;case"csv":var L=r(j[A])[s](c),M=r(u[A])[s](c);if(A=="clip-rect"){u[A]=M,w[A]=[],y=M.length;while(y--)w[A][y]=(L[y]-u[A][y])/t}v[A]=L;break;default:L=[][n](j[A]),M=[][n](u[A]),w[A]=[],y=d.paper.customAttributes[A].length;while(y--)w[A][y]=((L[y]||0)-(M[y]||0))/t}}var O=j.easing,P=a.easing_formulas[O];if(!P){P=r(O).match(N);if(P&&P.length==5){var R=P;P=function(a){return cC(a,+R[1],+R[2],+R[3],+R[4],t)}}else P=bf}q=j.start||b.start||+(new Date),x={anim:b,percent:e,timestamp:q,start:q+(b.del||0),status:0,initstatus:f||0,stop:!1,ms:t,easing:P,from:u,diff:w,to:v,el:d,callback:j.callback,prev:p,next:o,repeat:i||b.times,origin:d.attr(),totalOrigin:h},cy.push(x);if(f&&!k&&!l){x.stop=!0,x.start=new Date-t*f;if(cy.length==1)return cA()}l&&(x.start=new Date-x.ms*f),cy.length==1&&cz(cA)}else k.initstatus=f,k.start=new Date-k.ms*f;eve("raphael.anim.start."+d.id,d,b)}}function cD(a,b){var c=[],d={};this.ms=b,this.times=1;if(a){for(var e in a)a[g](e)&&(d[Q(e)]=a[e],c.push(Q(e)));c.sort(bd)}this.anim=d,this.top=c[c.length-1],this.percents=c}function cC(a,b,c,d,e,f){function o(a,b){var c,d,e,f,j,k;for(e=a,k=0;k<8;k++){f=m(e)-a;if(z(f)<b)return e;j=(3*i*e+2*h)*e+g;if(z(j)<1e-6)break;e=e-f/j}c=0,d=1,e=a;if(e<c)return c;if(e>d)return d;while(c<d){f=m(e);if(z(f-a)<b)return e;a>f?c=e:d=e,e=(d-c)/2+c}return e}function n(a,b){var c=o(a,b);return((l*c+k)*c+j)*c}function m(a){return((i*a+h)*a+g)*a}var g=3*b,h=3*(d-b)-g,i=1-g-h,j=3*c,k=3*(e-c)-j,l=1-j-k;return n(a,1/(200*f))}function cq(){return this.x+q+this.y+q+this.width+" × "+this.height}function cp(){return this.x+q+this.y}function cb(a,b,c,d,e,f){a!=null?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0)}function bH(b,c,d){b=a._path2curve(b),c=a._path2curve(c);var e,f,g,h,i,j,k,l,m,n,o=d?0:[];for(var p=0,q=b.length;p<q;p++){var r=b[p];if(r[0]=="M")e=i=r[1],f=j=r[2];else{r[0]=="C"?(m=[e,f].concat(r.slice(1)),e=m[6],f=m[7]):(m=[e,f,e,f,i,j,i,j],e=i,f=j);for(var s=0,t=c.length;s<t;s++){var u=c[s];if(u[0]=="M")g=k=u[1],h=l=u[2];else{u[0]=="C"?(n=[g,h].concat(u.slice(1)),g=n[6],h=n[7]):(n=[g,h,g,h,k,l,k,l],g=k,h=l);var v=bG(m,n,d);if(d)o+=v;else{for(var w=0,x=v.length;w<x;w++)v[w].segment1=p,v[w].segment2=s,v[w].bez1=m,v[w].bez2=n;o=o.concat(v)}}}}}return o}function bG(b,c,d){var e=a.bezierBBox(b),f=a.bezierBBox(c);if(!a.isBBoxIntersect(e,f))return d?0:[];var g=bB.apply(0,b),h=bB.apply(0,c),i=~~(g/5),j=~~(h/5),k=[],l=[],m={},n=d?0:[];for(var o=0;o<i+1;o++){var p=a.findDotsAtSegment.apply(a,b.concat(o/i));k.push({x:p.x,y:p.y,t:o/i})}for(o=0;o<j+1;o++)p=a.findDotsAtSegment.apply(a,c.concat(o/j)),l.push({x:p.x,y:p.y,t:o/j});for(o=0;o<i;o++)for(var q=0;q<j;q++){var r=k[o],s=k[o+1],t=l[q],u=l[q+1],v=z(s.x-r.x)<.001?"y":"x",w=z(u.x-t.x)<.001?"y":"x",x=bD(r.x,r.y,s.x,s.y,t.x,t.y,u.x,u.y);if(x){if(m[x.x.toFixed(4)]==x.y.toFixed(4))continue;m[x.x.toFixed(4)]=x.y.toFixed(4);var y=r.t+z((x[v]-r[v])/(s[v]-r[v]))*(s.t-r.t),A=t.t+z((x[w]-t[w])/(u[w]-t[w]))*(u.t-t.t);y>=0&&y<=1&&A>=0&&A<=1&&(d?n++:n.push({x:x.x,y:x.y,t1:y,t2:A}))}}return n}function bF(a,b){return bG(a,b,1)}function bE(a,b){return bG(a,b)}function bD(a,b,c,d,e,f,g,h){if(!(x(a,c)<y(e,g)||y(a,c)>x(e,g)||x(b,d)<y(f,h)||y(b,d)>x(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(!k)return;var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(n<+y(a,c).toFixed(2)||n>+x(a,c).toFixed(2)||n<+y(e,g).toFixed(2)||n>+x(e,g).toFixed(2)||o<+y(b,d).toFixed(2)||o>+x(b,d).toFixed(2)||o<+y(f,h).toFixed(2)||o>+x(f,h).toFixed(2))return;return{x:l,y:m}}}function bC(a,b,c,d,e,f,g,h,i){if(!(i<0||bB(a,b,c,d,e,f,g,h)<i)){var j=1,k=j/2,l=j-k,m,n=.01;m=bB(a,b,c,d,e,f,g,h,l);while(z(m-i)>n)k/=2,l+=(m<i?1:-1)*k,m=bB(a,b,c,d,e,f,g,h,l);return l}}function bB(a,b,c,d,e,f,g,h,i){i==null&&(i=1),i=i>1?1:i<0?0:i;var j=i/2,k=12,l=[-0.1252,.1252,-0.3678,.3678,-0.5873,.5873,-0.7699,.7699,-0.9041,.9041,-0.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0;for(var o=0;o<k;o++){var p=j*l[o]+j,q=bA(p,a,c,e,g),r=bA(p,b,d,f,h),s=q*q+r*r;n+=m[o]*w.sqrt(s)}return j*n}function bA(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function by(a,b){var c=[];for(var d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function bx(){return this.hex}function bv(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),h=d.cache=d.cache||{},i=d.count=d.count||[];if(h[g](f)){bu(i,f);return c?c(h[f]):h[f]}i.length>=1e3&&delete h[i.shift()],i.push(f),h[f]=a[m](b,e);return c?c(h[f]):h[f]}return d}function bu(a,b){for(var c=0,d=a.length;c<d;c++)if(a[c]===b)return a.push(a.splice(c,1)[0])}function bm(a){if(Object(a)!==a)return a;var b=new a.constructor;for(var c in a)a[g](c)&&(b[c]=bm(a[c]));return b}function a(c){if(a.is(c,"function"))return b?c():eve.on("raphael.DOMload",c);if(a.is(c,E))return a._engine.create[m](a,c.splice(0,3+a.is(c[0],C))).add(c);var d=Array.prototype.slice.call(arguments,0);if(a.is(d[d.length-1],"function")){var e=d.pop();return b?e.call(a._engine.create[m](a,d)):eve.on("raphael.DOMload",function(){e.call(a._engine.create[m](a,d))})}return a._engine.create[m](a,arguments)}a.version="2.1.0",a.eve=eve;var b,c=/[, ]+/,d={circle:1,rect:1,path:1,ellipse:1,text:1,image:1},e=/\{(\d+)\}/g,f="prototype",g="hasOwnProperty",h={doc:document,win:window},i={was:Object.prototype[g].call(h.win,"Raphael"),is:h.win.Raphael},j=function(){this.ca=this.customAttributes={}},k,l="appendChild",m="apply",n="concat",o="createTouch"in h.doc,p="",q=" ",r=String,s="split",t="click dblclick mousedown mousemove mouseout mouseover mouseup touchstart touchmove touchend touchcancel"[s](q),u={mousedown:"touchstart",mousemove:"touchmove",mouseup:"touchend"},v=r.prototype.toLowerCase,w=Math,x=w.max,y=w.min,z=w.abs,A=w.pow,B=w.PI,C="number",D="string",E="array",F="toString",G="fill",H=Object.prototype.toString,I={},J="push",K=a._ISURL=/^url\(['"]?([^\)]+?)['"]?\)$/i,L=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\))\s*$/i,M={NaN:1,Infinity:1,"-Infinity":1},N=/^(?:cubic-)?bezier\(([^,]+),([^,]+),([^,]+),([^\)]+)\)/,O=w.round,P="setAttribute",Q=parseFloat,R=parseInt,S=r.prototype.toUpperCase,T=a._availableAttrs={"arrow-end":"none","arrow-start":"none",blur:0,"clip-rect":"0 0 1e9 1e9",cursor:"default",cx:0,cy:0,fill:"#fff","fill-opacity":1,font:'10px "Arial"',"font-family":'"Arial"',"font-size":"10","font-style":"normal","font-weight":400,gradient:0,height:0,href:"http://raphaeljs.com/","letter-spacing":0,opacity:1,path:"M0,0",r:0,rx:0,ry:0,src:"",stroke:"#000","stroke-dasharray":"","stroke-linecap":"butt","stroke-linejoin":"butt","stroke-miterlimit":0,"stroke-opacity":1,"stroke-width":1,target:"_blank","text-anchor":"middle",title:"Raphael",transform:"",width:0,x:0,y:0},U=a._availableAnimAttrs={blur:C,"clip-rect":"csv",cx:C,cy:C,fill:"colour","fill-opacity":C,"font-size":C,height:C,opacity:C,path:"path",r:C,rx:C,ry:C,stroke:"colour","stroke-opacity":C,"stroke-width":C,transform:"transform",width:C,x:C,y:C},V=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]/g,W=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/,X={hs:1,rg:1},Y=/,?([achlmqrstvxz]),?/gi,Z=/([achlmrqstvz])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,$=/([rstm])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,_=/(-?\d*\.?\d*(?:e[\-+]?\d+)?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/ig,ba=a._radial_gradient=/^r(?:\(([^,]+?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*([^\)]+?)\))?/,bb={},bc=function(a,b){return a.key-b.key},bd=function(a,b){return Q(a)-Q(b)},be=function(){},bf=function(a){return a},bg=a._rectPath=function(a,b,c,d,e){if(e)return[["M",a+e,b],["l",c-e*2,0],["a",e,e,0,0,1,e,e],["l",0,d-e*2],["a",e,e,0,0,1,-e,e],["l",e*2-c,0],["a",e,e,0,0,1,-e,-e],["l",0,e*2-d],["a",e,e,0,0,1,e,-e],["z"]];return[["M",a,b],["l",c,0],["l",0,d],["l",-c,0],["z"]]},bh=function(a,b,c,d){d==null&&(d=c);return[["M",a,b],["m",0,-d],["a",c,d,0,1,1,0,2*d],["a",c,d,0,1,1,0,-2*d],["z"]]},bi=a._getPath={path:function(a){return a.attr("path")},circle:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.r)},ellipse:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.rx,b.ry)},rect:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height,b.r)},image:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height)},text:function(a){var b=a._getBBox();return bg(b.x,b.y,b.width,b.height)}},bj=a.mapPath=function(a,b){if(!b)return a;var c,d,e,f,g,h,i;a=bR(a);for(e=0,g=a.length;e<g;e++){i=a[e];for(f=1,h=i.length;f<h;f+=2)c=b.x(i[f],i[f+1]),d=b.y(i[f],i[f+1]),i[f]=c,i[f+1]=d}return a};a._g=h,a.type=h.win.SVGAngle||h.doc.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure","1.1")?"SVG":"VML";if(a.type=="VML"){var bk=h.doc.createElement("div"),bl;bk.innerHTML='<v:shape adj="1"/>',bl=bk.firstChild,bl.style.behavior="url(#default#VML)";if(!bl||typeof bl.adj!="object")return a.type=p;bk=null}a.svg=!(a.vml=a.type=="VML"),a._Paper=j,a.fn=k=j.prototype=a.prototype,a._id=0,a._oid=0,a.is=function(a,b){b=v.call(b);if(b=="finite")return!M[g](+a);if(b=="array")return a instanceof Array;return b=="null"&&a===null||b==typeof a&&a!==null||b=="object"&&a===Object(a)||b=="array"&&Array.isArray&&Array.isArray(a)||H.call(a).slice(8,-1).toLowerCase()==b},a.angle=function(b,c,d,e,f,g){if(f==null){var h=b-d,i=c-e;if(!h&&!i)return 0;return(180+w.atan2(-i,-h)*180/B+360)%360}return a.angle(b,c,f,g)-a.angle(d,e,f,g)},a.rad=function(a){return a%360*B/180},a.deg=function(a){return a*180/B%360},a.snapTo=function(b,c,d){d=a.is(d,"finite")?d:10;if(a.is(b,E)){var e=b.length;while(e--)if(z(b[e]-c)<=d)return b[e]}else{b=+b;var f=c%b;if(f<d)return c-f;if(f>b-d)return c-f+b}return c};var bn=a.createUUID=function(a,b){return function(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(a,b).toUpperCase()}}(/[xy]/g,function(a){var b=w.random()*16|0,c=a=="x"?b:b&3|8;return c.toString(16)});a.setWindow=function(b){eve("raphael.setWindow",a,h.win,b),h.win=b,h.doc=h.win.document,a._engine.initWin&&a._engine.initWin(h.win)};var bo=function(b){if(a.vml){var c=/^\s+|\s+$/g,d;try{var e=new ActiveXObject("htmlfile");e.write("<body>"),e.close(),d=e.body}catch(f){d=createPopup().document.body}var g=d.createTextRange();bo=bv(function(a){try{d.style.color=r(a).replace(c,p);var b=g.queryCommandValue("ForeColor");b=(b&255)<<16|b&65280|(b&16711680)>>>16;return"#"+("000000"+b.toString(16)).slice(-6)}catch(e){return"none"}})}else{var i=h.doc.createElement("i");i.title="Raphaël Colour Picker",i.style.display="none",h.doc.body.appendChild(i),bo=bv(function(a){i.style.color=a;return h.doc.defaultView.getComputedStyle(i,p).getPropertyValue("color")})}return bo(b)},bp=function(){return"hsb("+[this.h,this.s,this.b]+")"},bq=function(){return"hsl("+[this.h,this.s,this.l]+")"},br=function(){return this.hex},bs=function(b,c,d){c==null&&a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b&&(d=b.b,c=b.g,b=b.r);if(c==null&&a.is(b,D)){var e=a.getRGB(b);b=e.r,c=e.g,d=e.b}if(b>1||c>1||d>1)b/=255,c/=255,d/=255;return[b,c,d]},bt=function(b,c,d,e){b*=255,c*=255,d*=255;var f={r:b,g:c,b:d,hex:a.rgb(b,c,d),toString:br};a.is(e,"finite")&&(f.opacity=e);return f};a.color=function(b){var c;a.is(b,"object")&&"h"in b&&"s"in b&&"b"in b?(c=a.hsb2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):a.is(b,"object")&&"h"in b&&"s"in b&&"l"in b?(c=a.hsl2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):(a.is(b,"string")&&(b=a.getRGB(b)),a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b?(c=a.rgb2hsl(b),b.h=c.h,b.s=c.s,b.l=c.l,c=a.rgb2hsb(b),b.v=c.b):(b={hex:"none"},b.r=b.g=b.b=b.h=b.s=b.v=b.l=-1)),b.toString=br;return b},a.hsb2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,a=a.h,d=a.o),a*=360;var e,f,g,h,i;a=a%360/60,i=c*b,h=i*(1-z(a%2-1)),e=f=g=c-i,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.hsl2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h);if(a>1||b>1||c>1)a/=360,b/=100,c/=100;a*=360;var e,f,g,h,i;a=a%360/60,i=2*b*(c<.5?c:1-c),h=i*(1-z(a%2-1)),e=f=g=c-i/2,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.rgb2hsb=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;f=x(a,b,c),g=f-y(a,b,c),d=g==0?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=g==0?0:g/f;return{h:d,s:e,b:f,toString:bp}},a.rgb2hsl=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;g=x(a,b,c),h=y(a,b,c),i=g-h,d=i==0?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=i==0?0:f<.5?i/(2*f):i/(2-2*f);return{h:d,s:e,l:f,toString:bq}},a._path2string=function(){return this.join(",").replace(Y,"$1")};var bw=a._preload=function(a,b){var c=h.doc.createElement("img");c.style.cssText="position:absolute;left:-9999em;top:-9999em",c.onload=function(){b.call(this),this.onload=null,h.doc.body.removeChild(this)},c.onerror=function(){h.doc.body.removeChild(this)},h.doc.body.appendChild(c),c.src=a};a.getRGB=bv(function(b){if(!b||!!((b=r(b)).indexOf("-")+1))return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx};if(b=="none")return{r:-1,g:-1,b:-1,hex:"none",toString:bx};!X[g](b.toLowerCase().substring(0,2))&&b.charAt()!="#"&&(b=bo(b));var c,d,e,f,h,i,j,k=b.match(L);if(k){k[2]&&(f=R(k[2].substring(5),16),e=R(k[2].substring(3,5),16),d=R(k[2].substring(1,3),16)),k[3]&&(f=R((i=k[3].charAt(3))+i,16),e=R((i=k[3].charAt(2))+i,16),d=R((i=k[3].charAt(1))+i,16)),k[4]&&(j=k[4][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),k[1].toLowerCase().slice(0,4)=="rgba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100));if(k[5]){j=k[5][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsb2rgb(d,e,f,h)}if(k[6]){j=k[6][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsla"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsl2rgb(d,e,f,h)}k={r:d,g:e,b:f,toString:bx},k.hex="#"+(16777216|f|e<<8|d<<16).toString(16).slice(1),a.is(h,"finite")&&(k.opacity=h);return k}return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx}},a),a.hsb=bv(function(b,c,d){return a.hsb2rgb(b,c,d).hex}),a.hsl=bv(function(b,c,d){return a.hsl2rgb(b,c,d).hex}),a.rgb=bv(function(a,b,c){return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)}),a.getColor=function(a){var b=this.getColor.start=this.getColor.start||{h:0,s:1,b:a||.75},c=this.hsb2rgb(b.h,b.s,b.b);b.h+=.075,b.h>1&&(b.h=0,b.s-=.2,b.s<=0&&(this.getColor.start={h:0,s:1,b:b.b}));return c.hex},a.getColor.reset=function(){delete this.start},a.parsePathString=function(b){if(!b)return null;var c=bz(b);if(c.arr)return bJ(c.arr);var d={a:7,c:6,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,z:0},e=[];a.is(b,E)&&a.is(b[0],E)&&(e=bJ(b)),e.length||r(b).replace(Z,function(a,b,c){var f=[],g=b.toLowerCase();c.replace(_,function(a,b){b&&f.push(+b)}),g=="m"&&f.length>2&&(e.push([b][n](f.splice(0,2))),g="l",b=b=="m"?"l":"L");if(g=="r")e.push([b][n](f));else while(f.length>=d[g]){e.push([b][n](f.splice(0,d[g])));if(!d[g])break}}),e.toString=a._path2string,c.arr=bJ(e);return e},a.parseTransformString=bv(function(b){if(!b)return null;var c={r:3,s:4,t:2,m:6},d=[];a.is(b,E)&&a.is(b[0],E)&&(d=bJ(b)),d.length||r(b).replace($,function(a,b,c){var e=[],f=v.call(b);c.replace(_,function(a,b){b&&e.push(+b)}),d.push([b][n](e))}),d.toString=a._path2string;return d});var bz=function(a){var b=bz.ps=bz.ps||{};b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[g](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])});return b[a]};a.findDotsAtSegment=function(a,b,c,d,e,f,g,h,i){var j=1-i,k=A(j,3),l=A(j,2),m=i*i,n=m*i,o=k*a+l*3*i*c+j*3*i*i*e+n*g,p=k*b+l*3*i*d+j*3*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,x=j*e+i*g,y=j*f+i*h,z=90-w.atan2(q-s,r-t)*180/B;(q>s||r<t)&&(z+=180);return{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:x,y:y},alpha:z}},a.bezierBBox=function(b,c,d,e,f,g,h,i){a.is(b,"array")||(b=[b,c,d,e,f,g,h,i]);var j=bQ.apply(null,b);return{x:j.min.x,y:j.min.y,x2:j.max.x,y2:j.max.y,width:j.max.x-j.min.x,height:j.max.y-j.min.y}},a.isPointInsideBBox=function(a,b,c){return b>=a.x&&b<=a.x2&&c>=a.y&&c<=a.y2},a.isBBoxIntersect=function(b,c){var d=a.isPointInsideBBox;return d(c,b.x,b.y)||d(c,b.x2,b.y)||d(c,b.x,b.y2)||d(c,b.x2,b.y2)||d(b,c.x,c.y)||d(b,c.x2,c.y)||d(b,c.x,c.y2)||d(b,c.x2,c.y2)||(b.x<c.x2&&b.x>c.x||c.x<b.x2&&c.x>b.x)&&(b.y<c.y2&&b.y>c.y||c.y<b.y2&&c.y>b.y)},a.pathIntersection=function(a,b){return bH(a,b)},a.pathIntersectionNumber=function(a,b){return bH(a,b,1)},a.isPointInsidePath=function(b,c,d){var e=a.pathBBox(b);return a.isPointInsideBBox(e,c,d)&&bH(b,[["M",c,d],["H",e.x2+10]],1)%2==1},a._removedFactory=function(a){return function(){eve("raphael.log",null,"Raphaël: you are calling to method “"+a+"” of removed object",a)}};var bI=a.pathBBox=function(a){var b=bz(a);if(b.bbox)return b.bbox;if(!a)return{x:0,y:0,width:0,height:0,x2:0,y2:0};a=bR(a);var c=0,d=0,e=[],f=[],g;for(var h=0,i=a.length;h<i;h++){g=a[h];if(g[0]=="M")c=g[1],d=g[2],e.push(c),f.push(d);else{var j=bQ(c,d,g[1],g[2],g[3],g[4],g[5],g[6]);e=e[n](j.min.x,j.max.x),f=f[n](j.min.y,j.max.y),c=g[5],d=g[6]}}var k=y[m](0,e),l=y[m](0,f),o=x[m](0,e),p=x[m](0,f),q={x:k,y:l,x2:o,y2:p,width:o-k,height:p-l};b.bbox=bm(q);return q},bJ=function(b){var c=bm(b);c.toString=a._path2string;return c},bK=a._pathToRelative=function(b){var c=bz(b);if(c.rel)return bJ(c.rel);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=b[0][1],f=b[0][2],g=e,h=f,i++,d.push(["M",e,f]));for(var j=i,k=b.length;j<k;j++){var l=d[j]=[],m=b[j];if(m[0]!=v.call(m[0])){l[0]=v.call(m[0]);switch(l[0]){case"a":l[1]=m[1],l[2]=m[2],l[3]=m[3],l[4]=m[4],l[5]=m[5],l[6]=+(m[6]-e).toFixed(3),l[7]=+(m[7]-f).toFixed(3);break;case"v":l[1]=+(m[1]-f).toFixed(3);break;case"m":g=m[1],h=m[2];default:for(var n=1,o=m.length;n<o;n++)l[n]=+(m[n]-(n%2?e:f)).toFixed(3)}}else{l=d[j]=[],m[0]=="m"&&(g=m[1]+e,h=m[2]+f);for(var p=0,q=m.length;p<q;p++)d[j][p]=m[p]}var r=d[j].length;switch(d[j][0]){case"z":e=g,f=h;break;case"h":e+=+d[j][r-1];break;case"v":f+=+d[j][r-1];break;default:e+=+d[j][r-2],f+=+d[j][r-1]}}d.toString=a._path2string,c.rel=bJ(d);return d},bL=a._pathToAbsolute=function(b){var c=bz(b);if(c.abs)return bJ(c.abs);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);if(!b||!b.length)return[["M",0,0]];var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=+b[0][1],f=+b[0][2],g=e,h=f,i++,d[0]=["M",e,f]);var j=b.length==3&&b[0][0]=="M"&&b[1][0].toUpperCase()=="R"&&b[2][0].toUpperCase()=="Z";for(var k,l,m=i,o=b.length;m<o;m++){d.push(k=[]),l=b[m];if(l[0]!=S.call(l[0])){k[0]=S.call(l[0]);switch(k[0]){case"A":k[1]=l[1],k[2]=l[2],k[3]=l[3],k[4]=l[4],k[5]=l[5],k[6]=+(l[6]+e),k[7]=+(l[7]+f);break;case"V":k[1]=+l[1]+f;break;case"H":k[1]=+l[1]+e;break;case"R":var p=[e,f][n](l.slice(1));for(var q=2,r=p.length;q<r;q++)p[q]=+p[q]+e,p[++q]=+p[q]+f;d.pop(),d=d[n](by(p,j));break;case"M":g=+l[1]+e,h=+l[2]+f;default:for(q=1,r=l.length;q<r;q++)k[q]=+l[q]+(q%2?e:f)}}else if(l[0]=="R")p=[e,f][n](l.slice(1)),d.pop(),d=d[n](by(p,j)),k=["R"][n](l.slice(-2));else for(var s=0,t=l.length;s<t;s++)k[s]=l[s];switch(k[0]){case"Z":e=g,f=h;break;case"H":e=k[1];break;case"V":f=k[1];break;case"M":g=k[k.length-2],h=k[k.length-1];default:e=k[k.length-2],f=k[k.length-1]}}d.toString=a._path2string,c.abs=bJ(d);return d},bM=function(a,b,c,d){return[a,b,c,d,c,d]},bN=function(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]},bO=function(a,b,c,d,e,f,g,h,i,j){var k=B*120/180,l=B/180*(+e||0),m=[],o,p=bv(function(a,b,c){var d=a*w.cos(c)-b*w.sin(c),e=a*w.sin(c)+b*w.cos(c);return{x:d,y:e}});if(!j){o=p(a,b,-l),a=o.x,b=o.y,o=p(h,i,-l),h=o.x,i=o.y;var q=w.cos(B/180*e),r=w.sin(B/180*e),t=(a-h)/2,u=(b-i)/2,v=t*t/(c*c)+u*u/(d*d);v>1&&(v=w.sqrt(v),c=v*c,d=v*d);var x=c*c,y=d*d,A=(f==g?-1:1)*w.sqrt(z((x*y-x*u*u-y*t*t)/(x*u*u+y*t*t))),C=A*c*u/d+(a+h)/2,D=A*-d*t/c+(b+i)/2,E=w.asin(((b-D)/d).toFixed(9)),F=w.asin(((i-D)/d).toFixed(9));E=a<C?B-E:E,F=h<C?B-F:F,E<0&&(E=B*2+E),F<0&&(F=B*2+F),g&&E>F&&(E=E-B*2),!g&&F>E&&(F=F-B*2)}else E=j[0],F=j[1],C=j[2],D=j[3];var G=F-E;if(z(G)>k){var H=F,I=h,J=i;F=E+k*(g&&F>E?1:-1),h=C+c*w.cos(F),i=D+d*w.sin(F),m=bO(h,i,c,d,e,0,g,I,J,[F,H,C,D])}G=F-E;var K=w.cos(E),L=w.sin(E),M=w.cos(F),N=w.sin(F),O=w.tan(G/4),P=4/3*c*O,Q=4/3*d*O,R=[a,b],S=[a+P*L,b-Q*K],T=[h+P*N,i-Q*M],U=[h,i];S[0]=2*R[0]-S[0],S[1]=2*R[1]-S[1];if(j)return[S,T,U][n](m);m=[S,T,U][n](m).join()[s](",");var V=[];for(var W=0,X=m.length;W<X;W++)V[W]=W%2?p(m[W-1],m[W],l).y:p(m[W],m[W+1],l).x;return V},bP=function(a,b,c,d,e,f,g,h,i){var j=1-i;return{x:A(j,3)*a+A(j,2)*3*i*c+j*3*i*i*e+A(i,3)*g,y:A(j,3)*b+A(j,2)*3*i*d+j*3*i*i*f+A(i,3)*h}},bQ=bv(function(a,b,c,d,e,f,g,h){var i=e-2*c+a-(g-2*e+c),j=2*(c-a)-2*(e-c),k=a-c,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,o=[b,h],p=[a,g],q;z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y)),i=f-2*d+b-(h-2*f+d),j=2*(d-b)-2*(f-d),k=b-d,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y));return{min:{x:y[m](0,p),y:y[m](0,o)},max:{x:x[m](0,p),y:x[m](0,o)}}}),bR=a._path2curve=bv(function(a,b){var c=!b&&bz(a);if(!b&&c.curve)return bJ(c.curve);var d=bL(a),e=b&&bL(b),f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},h=function(a,b){var c,d;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null);switch(a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"][n](bO[m](0,[b.x,b.y][n](a.slice(1))));break;case"S":c=b.x+(b.x-(b.bx||b.x)),d=b.y+(b.y-(b.by||b.y)),a=["C",c,d][n](a.slice(1));break;case"T":b.qx=b.x+(b.x-(b.qx||b.x)),b.qy=b.y+(b.y-(b.qy||b.y)),a=["C"][n](bN(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"][n](bN(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"][n](bM(b.x,b.y,a[1],a[2]));break;case"H":a=["C"][n](bM(b.x,b.y,a[1],b.y));break;case"V":a=["C"][n](bM(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"][n](bM(b.x,b.y,b.X,b.Y))}return a},i=function(a,b){if(a[b].length>7){a[b].shift();var c=a[b];while(c.length)a.splice(b++,0,["C"][n](c.splice(0,6)));a.splice(b,1),l=x(d.length,e&&e.length||0)}},j=function(a,b,c,f,g){a&&b&&a[g][0]=="M"&&b[g][0]!="M"&&(b.splice(g,0,["M",f.x,f.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],l=x(d.length,e&&e.length||0))};for(var k=0,l=x(d.length,e&&e.length||0);k<l;k++){d[k]=h(d[k],f),i(d,k),e&&(e[k]=h(e[k],g)),e&&i(e,k),j(d,e,f,g,k),j(e,d,g,f,k);var o=d[k],p=e&&e[k],q=o.length,r=e&&p.length;f.x=o[q-2],f.y=o[q-1],f.bx=Q(o[q-4])||f.x,f.by=Q(o[q-3])||f.y,g.bx=e&&(Q(p[r-4])||g.x),g.by=e&&(Q(p[r-3])||g.y),g.x=e&&p[r-2],g.y=e&&p[r-1]}e||(c.curve=bJ(d));return e?[d,e]:d},null,bJ),bS=a._parseDots=bv(function(b){var c=[];for(var d=0,e=b.length;d<e;d++){var f={},g=b[d].match(/^([^:]*):?([\d\.]*)/);f.color=a.getRGB(g[1]);if(f.color.error)return null;f.color=f.color.hex,g[2]&&(f.offset=g[2]+"%"),c.push(f)}for(d=1,e=c.length-1;d<e;d++)if(!c[d].offset){var h=Q(c[d-1].offset||0),i=0;for(var j=d+1;j<e;j++)if(c[j].offset){i=c[j].offset;break}i||(i=100,j=e),i=Q(i);var k=(i-h)/(j-d+1);for(;d<j;d++)h+=k,c[d].offset=h+"%"}return c}),bT=a._tear=function(a,b){a==b.top&&(b.top=a.prev),a==b.bottom&&(b.bottom=a.next),a.next&&(a.next.prev=a.prev),a.prev&&(a.prev.next=a.next)},bU=a._tofront=function(a,b){b.top!==a&&(bT(a,b),a.next=null,a.prev=b.top,b.top.next=a,b.top=a)},bV=a._toback=function(a,b){b.bottom!==a&&(bT(a,b),a.next=b.bottom,a.prev=null,b.bottom.prev=a,b.bottom=a)},bW=a._insertafter=function(a,b,c){bT(a,c),b==c.top&&(c.top=a),b.next&&(b.next.prev=a),a.next=b.next,a.prev=b,b.next=a},bX=a._insertbefore=function(a,b,c){bT(a,c),b==c.bottom&&(c.bottom=a),b.prev&&(b.prev.next=a),a.prev=b.prev,b.prev=a,a.next=b},bY=a.toMatrix=function(a,b){var c=bI(a),d={_:{transform:p},getBBox:function(){return c}};b$(d,b);return d.matrix},bZ=a.transformPath=function(a,b){return bj(a,bY(a,b))},b$=a._extractTransform=function(b,c){if(c==null)return b._.transform;c=r(c).replace(/\.{3}|\u2026/g,b._.transform||p);var d=a.parseTransformString(c),e=0,f=0,g=0,h=1,i=1,j=b._,k=new cb;j.transform=d||[];if(d)for(var l=0,m=d.length;l<m;l++){var n=d[l],o=n.length,q=r(n[0]).toLowerCase(),s=n[0]!=q,t=s?k.invert():0,u,v,w,x,y;q=="t"&&o==3?s?(u=t.x(0,0),v=t.y(0,0),w=t.x(n[1],n[2]),x=t.y(n[1],n[2]),k.translate(w-u,x-v)):k.translate(n[1],n[2]):q=="r"?o==2?(y=y||b.getBBox(1),k.rotate(n[1],y.x+y.width/2,y.y+y.height/2),e+=n[1]):o==4&&(s?(w=t.x(n[2],n[3]),x=t.y(n[2],n[3]),k.rotate(n[1],w,x)):k.rotate(n[1],n[2],n[3]),e+=n[1]):q=="s"?o==2||o==3?(y=y||b.getBBox(1),k.scale(n[1],n[o-1],y.x+y.width/2,y.y+y.height/2),h*=n[1],i*=n[o-1]):o==5&&(s?(w=t.x(n[3],n[4]),x=t.y(n[3],n[4]),k.scale(n[1],n[2],w,x)):k.scale(n[1],n[2],n[3],n[4]),h*=n[1],i*=n[2]):q=="m"&&o==7&&k.add(n[1],n[2],n[3],n[4],n[5],n[6]),j.dirtyT=1,b.matrix=k}b.matrix=k,j.sx=h,j.sy=i,j.deg=e,j.dx=f=k.e,j.dy=g=k.f,h==1&&i==1&&!e&&j.bbox?(j.bbox.x+=+f,j.bbox.y+=+g):j.dirtyT=1},b_=function(a){var b=a[0];switch(b.toLowerCase()){case"t":return[b,0,0];case"m":return[b,1,0,0,1,0,0];case"r":return a.length==4?[b,0,a[2],a[3]]:[b,0];case"s":return a.length==5?[b,1,1,a[3],a[4]]:a.length==3?[b,1,1]:[b,1]}},ca=a._equaliseTransform=function(b,c){c=r(c).replace(/\.{3}|\u2026/g,b),b=a.parseTransformString(b)||[],c=a.parseTransformString(c)||[];var d=x(b.length,c.length),e=[],f=[],g=0,h,i,j,k;for(;g<d;g++){j=b[g]||b_(c[g]),k=c[g]||b_(j);if(j[0]!=k[0]||j[0].toLowerCase()=="r"&&(j[2]!=k[2]||j[3]!=k[3])||j[0].toLowerCase()=="s"&&(j[3]!=k[3]||j[4]!=k[4]))return;e[g]=[],f[g]=[];for(h=0,i=x(j.length,k.length);h<i;h++)h in j&&(e[g][h]=j[h]),h in k&&(f[g][h]=k[h])}return{from:e,to:f}};a._getContainer=function(b,c,d,e){var f;f=e==null&&!a.is(b,"object")?h.doc.getElementById(b):b;if(f!=null){if(f.tagName)return c==null?{container:f,width:f.style.pixelWidth||f.offsetWidth,height:f.style.pixelHeight||f.offsetHeight}:{container:f,width:c,height:d};return{container:1,x:b,y:c,width:d,height:e}}},a.pathToRelative=bK,a._engine={},a.path2curve=bR,a.matrix=function(a,b,c,d,e,f){return new cb(a,b,c,d,e,f)},function(b){function d(a){var b=w.sqrt(c(a));a[0]&&(a[0]/=b),a[1]&&(a[1]/=b)}function c(a){return a[0]*a[0]+a[1]*a[1]}b.add=function(a,b,c,d,e,f){var g=[[],[],[]],h=[[this.a,this.c,this.e],[this.b,this.d,this.f],[0,0,1]],i=[[a,c,e],[b,d,f],[0,0,1]],j,k,l,m;a&&a instanceof cb&&(i=[[a.a,a.c,a.e],[a.b,a.d,a.f],[0,0,1]]);for(j=0;j<3;j++)for(k=0;k<3;k++){m=0;for(l=0;l<3;l++)m+=h[j][l]*i[l][k];g[j][k]=m}this.a=g[0][0],this.b=g[1][0],this.c=g[0][1],this.d=g[1][1],this.e=g[0][2],this.f=g[1][2]},b.invert=function(){var a=this,b=a.a*a.d-a.b*a.c;return new cb(a.d/b,-a.b/b,-a.c/b,a.a/b,(a.c*a.f-a.d*a.e)/b,(a.b*a.e-a.a*a.f)/b)},b.clone=function(){return new cb(this.a,this.b,this.c,this.d,this.e,this.f)},b.translate=function(a,b){this.add(1,0,0,1,a,b)},b.scale=function(a,b,c,d){b==null&&(b=a),(c||d)&&this.add(1,0,0,1,c,d),this.add(a,0,0,b,0,0),(c||d)&&this.add(1,0,0,1,-c,-d)},b.rotate=function(b,c,d){b=a.rad(b),c=c||0,d=d||0;var e=+w.cos(b).toFixed(9),f=+w.sin(b).toFixed(9);this.add(e,f,-f,e,c,d),this.add(1,0,0,1,-c,-d)},b.x=function(a,b){return a*this.a+b*this.c+this.e},b.y=function(a,b){return a*this.b+b*this.d+this.f},b.get=function(a){return+this[r.fromCharCode(97+a)].toFixed(4)},b.toString=function(){return a.svg?"matrix("+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)].join()+")":[this.get(0),this.get(2),this.get(1),this.get(3),0,0].join()},b.toFilter=function(){return"progid:DXImageTransform.Microsoft.Matrix(M11="+this.get(0)+", M12="+this.get(2)+", M21="+this.get(1)+", M22="+this.get(3)+", Dx="+this.get(4)+", Dy="+this.get(5)+", sizingmethod='auto expand')"},b.offset=function(){return[this.e.toFixed(4),this.f.toFixed(4)]},b.split=function(){var b={};b.dx=this.e,b.dy=this.f;var e=[[this.a,this.c],[this.b,this.d]];b.scalex=w.sqrt(c(e[0])),d(e[0]),b.shear=e[0][0]*e[1][0]+e[0][1]*e[1][1],e[1]=[e[1][0]-e[0][0]*b.shear,e[1][1]-e[0][1]*b.shear],b.scaley=w.sqrt(c(e[1])),d(e[1]),b.shear/=b.scaley;var f=-e[0][1],g=e[1][1];g<0?(b.rotate=a.deg(w.acos(g)),f<0&&(b.rotate=360-b.rotate)):b.rotate=a.deg(w.asin(f)),b.isSimple=!+b.shear.toFixed(9)&&(b.scalex.toFixed(9)==b.scaley.toFixed(9)||!b.rotate),b.isSuperSimple=!+b.shear.toFixed(9)&&b.scalex.toFixed(9)==b.scaley.toFixed(9)&&!b.rotate,b.noRotation=!+b.shear.toFixed(9)&&!b.rotate;return b},b.toTransformString=function(a){var b=a||this[s]();if(b.isSimple){b.scalex=+b.scalex.toFixed(4),b.scaley=+b.scaley.toFixed(4),b.rotate=+b.rotate.toFixed(4);return(b.dx||b.dy?"t"+[b.dx,b.dy]:p)+(b.scalex!=1||b.scaley!=1?"s"+[b.scalex,b.scaley,0,0]:p)+(b.rotate?"r"+[b.rotate,0,0]:p)}return"m"+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)]}}(cb.prototype);var cc=navigator.userAgent.match(/Version\/(.*?)\s/)||navigator.userAgent.match(/Chrome\/(\d+)/);navigator.vendor=="Apple Computer, Inc."&&(cc&&cc[1]<4||navigator.platform.slice(0,2)=="iP")||navigator.vendor=="Google Inc."&&cc&&cc[1]<8?k.safari=function(){var a=this.rect(-99,-99,this.width+99,this.height+99).attr({stroke:"none"});setTimeout(function(){a.remove()})}:k.safari=be;var cd=function(){this.returnValue=!1},ce=function(){return this.originalEvent.preventDefault()},cf=function(){this.cancelBubble=!0},cg=function(){return this.originalEvent.stopPropagation()},ch=function(){if(h.doc.addEventListener)return function(a,b,c,d){var e=o&&u[b]?u[b]:b,f=function(e){var f=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,i=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,j=e.clientX+i,k=e.clientY+f;if(o&&u[g](b))for(var l=0,m=e.targetTouches&&e.targetTouches.length;l<m;l++)if(e.targetTouches[l].target==a){var n=e;e=e.targetTouches[l],e.originalEvent=n,e.preventDefault=ce,e.stopPropagation=cg;break}return c.call(d,e,j,k)};a.addEventListener(e,f,!1);return function(){a.removeEventListener(e,f,!1);return!0}};if(h.doc.attachEvent)return function(a,b,c,d){var e=function(a){a=a||h.win.event;var b=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f=a.clientX+e,g=a.clientY+b;a.preventDefault=a.preventDefault||cd,a.stopPropagation=a.stopPropagation||cf;return c.call(d,a,f,g)};a.attachEvent("on"+b,e);var f=function(){a.detachEvent("on"+b,e);return!0};return f}}(),ci=[],cj=function(a){var b=a.clientX,c=a.clientY,d=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f,g=ci.length;while(g--){f=ci[g];if(o){var i=a.touches.length,j;while(i--){j=a.touches[i];if(j.identifier==f.el._drag.id){b=j.clientX,c=j.clientY,(a.originalEvent?a.originalEvent:a).preventDefault();break}}}else a.preventDefault();var k=f.el.node,l,m=k.nextSibling,n=k.parentNode,p=k.style.display;h.win.opera&&n.removeChild(k),k.style.display="none",l=f.el.paper.getElementByPoint(b,c),k.style.display=p,h.win.opera&&(m?n.insertBefore(k,m):n.appendChild(k)),l&&eve("raphael.drag.over."+f.el.id,f.el,l),b+=e,c+=d,eve("raphael.drag.move."+f.el.id,f.move_scope||f.el,b-f.el._drag.x,c-f.el._drag.y,b,c,a)}},ck=function(b){a.unmousemove(cj).unmouseup(ck);var c=ci.length,d;while(c--)d=ci[c],d.el._drag={},eve("raphael.drag.end."+d.el.id,d.end_scope||d.start_scope||d.move_scope||d.el,b);ci=[]},cl=a.el={};for(var cm=t.length;cm--;)(function(b){a[b]=cl[b]=function(c,d){a.is(c,"function")&&(this.events=this.events||[],this.events.push({name:b,f:c,unbind:ch(this.shape||this.node||h.doc,b,c,d||this)}));return this},a["un"+b]=cl["un"+b]=function(a){var c=this.events||[],d=c.length;while(d--)if(c[d].name==b&&c[d].f==a){c[d].unbind(),c.splice(d,1),!c.length&&delete this.events;return this}return this}})(t[cm]);cl.data=function(b,c){var d=bb[this.id]=bb[this.id]||{};if(arguments.length==1){if(a.is(b,"object")){for(var e in b)b[g](e)&&this.data(e,b[e]);return this}eve("raphael.data.get."+this.id,this,d[b],b);return d[b]}d[b]=c,eve("raphael.data.set."+this.id,this,c,b);return this},cl.removeData=function(a){a==null?bb[this.id]={}:bb[this.id]&&delete bb[this.id][a];return this},cl.hover=function(a,b,c,d){return this.mouseover(a,c).mouseout(b,d||c)},cl.unhover=function(a,b){return this.unmouseover(a).unmouseout(b)};var cn=[];cl.drag=function(b,c,d,e,f,g){function i(i){(i.originalEvent||i).preventDefault();var j=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,k=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft;this._drag.x=i.clientX+k,this._drag.y=i.clientY+j,this._drag.id=i.identifier,!ci.length&&a.mousemove(cj).mouseup(ck),ci.push({el:this,move_scope:e,start_scope:f,end_scope:g}),c&&eve.on("raphael.drag.start."+this.id,c),b&&eve.on("raphael.drag.move."+this.id,b),d&&eve.on("raphael.drag.end."+this.id,d),eve("raphael.drag.start."+this.id,f||e||this,i.clientX+k,i.clientY+j,i)}this._drag={},cn.push({el:this,start:i}),this.mousedown(i);return this},cl.onDragOver=function(a){a?eve.on("raphael.drag.over."+this.id,a):eve.unbind("raphael.drag.over."+this.id)},cl.undrag=function(){var b=cn.length;while(b--)cn[b].el==this&&(this.unmousedown(cn[b].start),cn.splice(b,1),eve.unbind("raphael.drag.*."+this.id));!cn.length&&a.unmousemove(cj).unmouseup(ck)},k.circle=function(b,c,d){var e=a._engine.circle(this,b||0,c||0,d||0);this.__set__&&this.__set__.push(e);return e},k.rect=function(b,c,d,e,f){var g=a._engine.rect(this,b||0,c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.ellipse=function(b,c,d,e){var f=a._engine.ellipse(this,b||0,c||0,d||0,e||0);this.__set__&&this.__set__.push(f);return f},k.path=function(b){b&&!a.is(b,D)&&!a.is(b[0],E)&&(b+=p);var c=a._engine.path(a.format[m](a,arguments),this);this.__set__&&this.__set__.push(c);return c},k.image=function(b,c,d,e,f){var g=a._engine.image(this,b||"about:blank",c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.text=function(b,c,d){var e=a._engine.text(this,b||0,c||0,r(d));this.__set__&&this.__set__.push(e);return e},k.set=function(b){!a.is(b,"array")&&(b=Array.prototype.splice.call(arguments,0,arguments.length));var c=new cG(b);this.__set__&&this.__set__.push(c);return c},k.setStart=function(a){this.__set__=a||this.set()},k.setFinish=function(a){var b=this.__set__;delete this.__set__;return b},k.setSize=function(b,c){return a._engine.setSize.call(this,b,c)},k.setViewBox=function(b,c,d,e,f){return a._engine.setViewBox.call(this,b,c,d,e,f)},k.top=k.bottom=null,k.raphael=a;var co=function(a){var b=a.getBoundingClientRect(),c=a.ownerDocument,d=c.body,e=c.documentElement,f=e.clientTop||d.clientTop||0,g=e.clientLeft||d.clientLeft||0,i=b.top+(h.win.pageYOffset||e.scrollTop||d.scrollTop)-f,j=b.left+(h.win.pageXOffset||e.scrollLeft||d.scrollLeft)-g;return{y:i,x:j}};k.getElementByPoint=function(a,b){var c=this,d=c.canvas,e=h.doc.elementFromPoint(a,b);if(h.win.opera&&e.tagName=="svg"){var f=co(d),g=d.createSVGRect();g.x=a-f.x,g.y=b-f.y,g.width=g.height=1;var i=d.getIntersectionList(g,null);i.length&&(e=i[i.length-1])}if(!e)return null;while(e.parentNode&&e!=d.parentNode&&!e.raphael)e=e.parentNode;e==c.canvas.parentNode&&(e=d),e=e&&e.raphael?c.getById(e.raphaelid):null;return e},k.getById=function(a){var b=this.bottom;while(b){if(b.id==a)return b;b=b.next}return null},k.forEach=function(a,b){var c=this.bottom;while(c){if(a.call(b,c)===!1)return this;c=c.next}return this},k.getElementsByPoint=function(a,b){var c=this.set();this.forEach(function(d){d.isPointInside(a,b)&&c.push(d)});return c},cl.isPointInside=function(b,c){var d=this.realPath=this.realPath||bi[this.type](this);return a.isPointInsidePath(d,b,c)},cl.getBBox=function(a){if(this.removed)return{};var b=this._;if(a){if(b.dirty||!b.bboxwt)this.realPath=bi[this.type](this),b.bboxwt=bI(this.realPath),b.bboxwt.toString=cq,b.dirty=0;return b.bboxwt}if(b.dirty||b.dirtyT||!b.bbox){if(b.dirty||!this.realPath)b.bboxwt=0,this.realPath=bi[this.type](this);b.bbox=bI(bj(this.realPath,this.matrix)),b.bbox.toString=cq,b.dirty=b.dirtyT=0}return b.bbox},cl.clone=function(){if(this.removed)return null;var a=this.paper[this.type]().attr(this.attr());this.__set__&&this.__set__.push(a);return a},cl.glow=function(a){if(this.type=="text")return null;a=a||{};var b={width:(a.width||10)+(+this.attr("stroke-width")||1),fill:a.fill||!1,opacity:a.opacity||.5,offsetx:a.offsetx||0,offsety:a.offsety||0,color:a.color||"#000"},c=b.width/2,d=this.paper,e=d.set(),f=this.realPath||bi[this.type](this);f=this.matrix?bj(f,this.matrix):f;for(var g=1;g<c+1;g++)e.push(d.path(f).attr({stroke:b.color,fill:b.fill?b.color:"none","stroke-linejoin":"round","stroke-linecap":"round","stroke-width":+(b.width/c*g).toFixed(3),opacity:+(b.opacity/c).toFixed(3)}));return e.insertBefore(this).translate(b.offsetx,b.offsety)};var cr={},cs=function(b,c,d,e,f,g,h,i,j){return j==null?bB(b,c,d,e,f,g,h,i):a.findDotsAtSegment(b,c,d,e,f,g,h,i,bC(b,c,d,e,f,g,h,i,j))},ct=function(b,c){return function(d,e,f){d=bR(d);var g,h,i,j,k="",l={},m,n=0;for(var o=0,p=d.length;o<p;o++){i=d[o];if(i[0]=="M")g=+i[1],h=+i[2];else{j=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6]);if(n+j>e){if(c&&!l.start){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n),k+=["C"+m.start.x,m.start.y,m.m.x,m.m.y,m.x,m.y];if(f)return k;l.start=k,k=["M"+m.x,m.y+"C"+m.n.x,m.n.y,m.end.x,m.end.y,i[5],i[6]].join(),n+=j,g=+i[5],h=+i[6];continue}if(!b&&!c){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n);return{x:m.x,y:m.y,alpha:m.alpha}}}n+=j,g=+i[5],h=+i[6]}k+=i.shift()+i}l.end=k,m=b?n:c?l:a.findDotsAtSegment(g,h,i[0],i[1],i[2],i[3],i[4],i[5],1),m.alpha&&(m={x:m.x,y:m.y,alpha:m.alpha});return m}},cu=ct(1),cv=ct(),cw=ct(0,1);a.getTotalLength=cu,a.getPointAtLength=cv,a.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return cw(a,b).end;var d=cw(a,c,1);return b?cw(d,b).end:d},cl.getTotalLength=function(){if(this.type=="path"){if(this.node.getTotalLength)return this.node.getTotalLength();return cu(this.attrs.path)}},cl.getPointAtLength=function(a){if(this.type=="path")return cv(this.attrs.path,a)},cl.getSubpath=function(b,c){if(this.type=="path")return a.getSubpath(this.attrs.path,b,c)};var cx=a.easing_formulas={linear:function(a){return a},"<":function(a){return A(a,1.7)},">":function(a){return A(a,.48)},"<>":function(a){var b=.48-a/1.04,c=w.sqrt(.1734+b*b),d=c-b,e=A(z(d),1/3)*(d<0?-1:1),f=-c-b,g=A(z(f),1/3)*(f<0?-1:1),h=e+g+.5;return(1-h)*3*h*h+h*h*h},backIn:function(a){var b=1.70158;return a*a*((b+1)*a-b)},backOut:function(a){a=a-1;var b=1.70158;return a*a*((b+1)*a+b)+1},elastic:function(a){if(a==!!a)return a;return A(2,-10*a)*w.sin((a-.075)*2*B/.3)+1},bounce:function(a){var b=7.5625,c=2.75,d;a<1/c?d=b*a*a:a<2/c?(a-=1.5/c,d=b*a*a+.75):a<2.5/c?(a-=2.25/c,d=b*a*a+.9375):(a-=2.625/c,d=b*a*a+.984375);return d}};cx.easeIn=cx["ease-in"]=cx["<"],cx.easeOut=cx["ease-out"]=cx[">"],cx.easeInOut=cx["ease-in-out"]=cx["<>"],cx["back-in"]=cx.backIn,cx["back-out"]=cx.backOut;var cy=[],cz=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,16)},cA=function(){var b=+(new Date),c=0;for(;c<cy.length;c++){var d=cy[c];if(d.el.removed||d.paused)continue;var e=b-d.start,f=d.ms,h=d.easing,i=d.from,j=d.diff,k=d.to,l=d.t,m=d.el,o={},p,r={},s;d.initstatus?(e=(d.initstatus*d.anim.top-d.prev)/(d.percent-d.prev)*f,d.status=d.initstatus,delete d.initstatus,d.stop&&cy.splice(c--,1)):d.status=(d.prev+(d.percent-d.prev)*(e/f))/d.anim.top;if(e<0)continue;if(e<f){var t=h(e/f);for(var u in i)if(i[g](u)){switch(U[u]){case C:p=+i[u]+t*f*j[u];break;case"colour":p="rgb("+[cB(O(i[u].r+t*f*j[u].r)),cB(O(i[u].g+t*f*j[u].g)),cB(O(i[u].b+t*f*j[u].b))].join(",")+")";break;case"path":p=[];for(var v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(var x=1,y=i[u][v].length;x<y;x++)p[v][x]=+i[u][v][x]+t*f*j[u][v][x];p[v]=p[v].join(q)}p=p.join(q);break;case"transform":if(j[u].real){p=[];for(v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(x=1,y=i[u][v].length;x<y;x++)p[v][x]=i[u][v][x]+t*f*j[u][v][x]}}else{var z=function(a){return+i[u][a]+t*f*j[u][a]};p=[["m",z(0),z(1),z(2),z(3),z(4),z(5)]]}break;case"csv":if(u=="clip-rect"){p=[],v=4;while(v--)p[v]=+i[u][v]+t*f*j[u][v]}break;default:var A=[][n](i[u]);p=[],v=m.paper.customAttributes[u].length;while(v--)p[v]=+A[v]+t*f*j[u][v]}o[u]=p}m.attr(o),function(a,b,c){setTimeout(function(){eve("raphael.anim.frame."+a,b,c)})}(m.id,m,d.anim)}else{(function(b,c,d){setTimeout(function(){eve("raphael.anim.frame."+c.id,c,d),eve("raphael.anim.finish."+c.id,c,d),a.is(b,"function")&&b.call(c)})})(d.callback,m,d.anim),m.attr(k),cy.splice(c--,1);if(d.repeat>1&&!d.next){for(s in k)k[g](s)&&(r[s]=d.totalOrigin[s]);d.el.attr(r),cE(d.anim,d.el,d.anim.percents[0],null,d.totalOrigin,d.repeat-1)}d.next&&!d.stop&&cE(d.anim,d.el,d.next,null,d.totalOrigin,d.repeat)}}a.svg&&m&&m.paper&&m.paper.safari(),cy.length&&cz(cA)},cB=function(a){return a>255?255:a<0?0:a};cl.animateWith=function(b,c,d,e,f,g){var h=this;if(h.removed){g&&g.call(h);return h}var i=d instanceof cD?d:a.animation(d,e,f,g),j,k;cE(i,h,i.percents[0],null,h.attr());for(var l=0,m=cy.length;l<m;l++)if(cy[l].anim==c&&cy[l].el==b){cy[m-1].start=cy[l].start;break}return h},cl.onAnimation=function(a){a?eve.on("raphael.anim.frame."+this.id,a):eve.unbind("raphael.anim.frame."+this.id);return this},cD.prototype.delay=function(a){var b=new cD(this.anim,this.ms);b.times=this.times,b.del=+a||0;return b},cD.prototype.repeat=function(a){var b=new cD(this.anim,this.ms);b.del=this.del,b.times=w.floor(x(a,0))||1;return b},a.animation=function(b,c,d,e){if(b instanceof cD)return b;if(a.is(d,"function")||!d)e=e||d||null,d=null;b=Object(b),c=+c||0;var f={},h,i;for(i in b)b[g](i)&&Q(i)!=i&&Q(i)+"%"!=i&&(h=!0,f[i]=b[i]);if(!h)return new cD(b,c);d&&(f.easing=d),e&&(f.callback=e);return new cD({100:f},c)},cl.animate=function(b,c,d,e){var f=this;if(f.removed){e&&e.call(f);return f}var g=b instanceof cD?b:a.animation(b,c,d,e);cE(g,f,g.percents[0],null,f.attr());return f},cl.setTime=function(a,b){a&&b!=null&&this.status(a,y(b,a.ms)/a.ms);return this},cl.status=function(a,b){var c=[],d=0,e,f;if(b!=null){cE(a,this,-1,y(b,1));return this}e=cy.length;for(;d<e;d++){f=cy[d];if(f.el.id==this.id&&(!a||f.anim==a)){if(a)return f.status;c.push({anim:f.anim,status:f.status})}}if(a)return 0;return c},cl.pause=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.pause."+this.id,this,cy[b].anim)!==!1&&(cy[b].paused=!0);return this},cl.resume=function(a){for(var b=0;b<cy.length;b++)if(cy[b].el.id==this.id&&(!a||cy[b].anim==a)){var c=cy[b];eve("raphael.anim.resume."+this.id,this,c.anim)!==!1&&(delete c.paused,this.status(c.anim,c.status))}return this},cl.stop=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.stop."+this.id,this,cy[b].anim)!==!1&&cy.splice(b--,1);return this},eve.on("raphael.remove",cF),eve.on("raphael.clear",cF),cl.toString=function(){return"Raphaël’s object"};var cG=function(a){this.items=[],this.length=0,this.type="set";if(a)for(var b=0,c=a.length;b<c;b++)a[b]&&(a[b].constructor==cl.constructor||a[b].constructor==cG)&&(this[this.items.length]=this.items[this.items.length]=a[b],this.length++)},cH=cG.prototype;cH.push=function(){var a,b;for(var c=0,d=arguments.length;c<d;c++)a=arguments[c],a&&(a.constructor==cl.constructor||a.constructor==cG)&&(b=this.items.length,this[b]=this.items[b]=a,this.length++);return this},cH.pop=function(){this.length&&delete this[this.length--];return this.items.pop()},cH.forEach=function(a,b){for(var c=0,d=this.items.length;c<d;c++)if(a.call(b,this.items[c],c)===!1)return this;return this};for(var cI in cl)cl[g](cI)&&(cH[cI]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a][m](c,b)})}}(cI));cH.attr=function(b,c){if(b&&a.is(b,E)&&a.is(b[0],"object"))for(var d=0,e=b.length;d<e;d++)this.items[d].attr(b[d]);else for(var f=0,g=this.items.length;f<g;f++)this.items[f].attr(b,c);return this},cH.clear=function(){while(this.length)this.pop()},cH.splice=function(a,b,c){a=a<0?x(this.length+a,0):a,b=x(0,y(this.length-a,b));var d=[],e=[],f=[],g;for(g=2;g<arguments.length;g++)f.push(arguments[g]);for(g=0;g<b;g++)e.push(this[a+g]);for(;g<this.length-a;g++)d.push(this[a+g]);var h=f.length;for(g=0;g<h+d.length;g++)this.items[a+g]=this[a+g]=g<h?f[g]:d[g-h];g=this.items.length=this.length-=b-h;while(this[g])delete this[g++];return new cG(e)},cH.exclude=function(a){for(var b=0,c=this.length;b<c;b++)if(this[b]==a){this.splice(b,1);return!0}},cH.animate=function(b,c,d,e){(a.is(d,"function")||!d)&&(e=d||null);var f=this.items.length,g=f,h,i=this,j;if(!f)return this;e&&(j=function(){!--f&&e.call(i)}),d=a.is(d,D)?d:j;var k=a.animation(b,c,d,j);h=this.items[--g].animate(k);while(g--)this.items[g]&&!this.items[g].removed&&this.items[g].animateWith(h,k,k);return this},cH.insertAfter=function(a){var b=this.items.length;while(b--)this.items[b].insertAfter(a);return this},cH.getBBox=function(){var a=[],b=[],c=[],d=[];for(var e=this.items.length;e--;)if(!this.items[e].removed){var f=this.items[e].getBBox();a.push(f.x),b.push(f.y),c.push(f.x+f.width),d.push(f.y+f.height)}a=y[m](0,a),b=y[m](0,b),c=x[m](0,c),d=x[m](0,d);return{x:a,y:b,x2:c,y2:d,width:c-a,height:d-b}},cH.clone=function(a){a=new cG;for(var b=0,c=this.items.length;b<c;b++)a.push(this.items[b].clone());return a},cH.toString=function(){return"Raphaël‘s set"},a.registerFont=function(a){if(!a.face)return a;this.fonts=this.fonts||{};var b={w:a.w,face:{},glyphs:{}},c=a.face["font-family"];for(var d in a.face)a.face[g](d)&&(b.face[d]=a.face[d]);this.fonts[c]?this.fonts[c].push(b):this.fonts[c]=[b];if(!a.svg){b.face["units-per-em"]=R(a.face["units-per-em"],10);for(var e in a.glyphs)if(a.glyphs[g](e)){var f=a.glyphs[e];b.glyphs[e]={w:f.w,k:{},d:f.d&&"M"+f.d.replace(/[mlcxtrv]/g,function(a){return{l:"L",c:"C",x:"z",t:"m",r:"l",v:"c"}[a]||"M"})+"z"};if(f.k)for(var h in f.k)f[g](h)&&(b.glyphs[e].k[h]=f.k[h])}}return a},k.getFont=function(b,c,d,e){e=e||"normal",d=d||"normal",c=+c||{normal:400,bold:700,lighter:300,bolder:800}[c]||400;if(!!a.fonts){var f=a.fonts[b];if(!f){var h=new RegExp("(^|\\s)"+b.replace(/[^\w\d\s+!~.:_-]/g,p)+"(\\s|$)","i");for(var i in a.fonts)if(a.fonts[g](i)&&h.test(i)){f=a.fonts[i];break}}var j;if(f)for(var k=0,l=f.length;k<l;k++){j=f[k];if(j.face["font-weight"]==c&&(j.face["font-style"]==d||!j.face["font-style"])&&j.face["font-stretch"]==e)break}return j}},k.print=function(b,d,e,f,g,h,i){h=h||"middle",i=x(y(i||0,1),-1);var j=r(e)[s](p),k=0,l=0,m=p,n;a.is(f,e)&&(f=this.getFont(f));if(f){n=(g||16)/f.face["units-per-em"];var o=f.face.bbox[s](c),q=+o[0],t=o[3]-o[1],u=0,v=+o[1]+(h=="baseline"?t+ +f.face.descent:t/2);for(var w=0,z=j.length;w<z;w++){if(j[w]=="\n")k=0,B=0,l=0,u+=t;else{var A=l&&f.glyphs[j[w-1]]||{},B=f.glyphs[j[w]];k+=l?(A.w||f.w)+(A.k&&A.k[j[w]]||0)+f.w*i:0,l=1}B&&B.d&&(m+=a.transformPath(B.d,["t",k*n,u*n,"s",n,n,q,v,"t",(b-q)/n,(d-v)/n]))}}return this.path(m).attr({fill:"#000",stroke:"none"})},k.add=function(b){if(a.is(b,"array")){var c=this.set(),e=0,f=b.length,h;for(;e<f;e++)h=b[e]||{},d[g](h.type)&&c.push(this[h.type]().attr(h))}return c},a.format=function(b,c){var d=a.is(c,E)?[0][n](c):arguments;b&&a.is(b,D)&&d.length-1&&(b=b.replace(e,function(a,b){return d[++b]==null?p:d[b]}));return b||p},a.fullfill=function(){var a=/\{([^\}]+)\}/g,b=/(?:(?:^|\.)(.+?)(?=\[|\.|$|\()|\[('|")(.+?)\2\])(\(\))?/g,c=function(a,c,d){var e=d;c.replace(b,function(a,b,c,d,f){b=b||d,e&&(b in e&&(e=e[b]),typeof e=="function"&&f&&(e=e()))}),e=(e==null||e==d?a:e)+"";return e};return function(b,d){return String(b).replace(a,function(a,b){return c(a,b,d)})}}(),a.ninja=function(){i.was?h.win.Raphael=i.is:delete Raphael;return a},a.st=cH,function(b,c,d){function e(){/in/.test(b.readyState)?setTimeout(e,9):a.eve("raphael.DOMload")}b.readyState==null&&b.addEventListener&&(b.addEventListener(c,d=function(){b.removeEventListener(c,d,!1),b.readyState="complete"},!1),b.readyState="loading"),e()}(document,"DOMContentLoaded"),i.was?h.win.Raphael=a:Raphael=a,eve.on("raphael.DOMload",function(){b=!0})}(),window.Raphael.svg&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=parseInt,f=Math,g=f.max,h=f.abs,i=f.pow,j=/[, ]+/,k=a.eve,l="",m=" ",n="http://www.w3.org/1999/xlink",o={block:"M5,0 0,2.5 5,5z",classic:"M5,0 0,2.5 5,5 3.5,3 3.5,2z",diamond:"M2.5,0 5,2.5 2.5,5 0,2.5z",open:"M6,1 1,3.5 6,6",oval:"M2.5,0A2.5,2.5,0,0,1,2.5,5 2.5,2.5,0,0,1,2.5,0z"},p={};a.toString=function(){return"Your browser supports SVG.\nYou are running Raphaël "+this.version};var q=function(d,e){if(e){typeof d=="string"&&(d=q(d));for(var f in e)e[b](f)&&(f.substring(0,6)=="xlink:"?d.setAttributeNS(n,f.substring(6),c(e[f])):d.setAttribute(f,c(e[f])))}else d=a._g.doc.createElementNS("http://www.w3.org/2000/svg",d),d.style&&(d.style.webkitTapHighlightColor="rgba(0,0,0,0)");return d},r=function(b,e){var j="linear",k=b.id+e,m=.5,n=.5,o=b.node,p=b.paper,r=o.style,s=a._g.doc.getElementById(k);if(!s){e=c(e).replace(a._radial_gradient,function(a,b,c){j="radial";if(b&&c){m=d(b),n=d(c);var e=(n>.5)*2-1;i(m-.5,2)+i(n-.5,2)>.25&&(n=f.sqrt(.25-i(m-.5,2))*e+.5)&&n!=.5&&(n=n.toFixed(5)-1e-5*e)}return l}),e=e.split(/\s*\-\s*/);if(j=="linear"){var t=e.shift();t=-d(t);if(isNaN(t))return null;var u=[0,0,f.cos(a.rad(t)),f.sin(a.rad(t))],v=1/(g(h(u[2]),h(u[3]))||1);u[2]*=v,u[3]*=v,u[2]<0&&(u[0]=-u[2],u[2]=0),u[3]<0&&(u[1]=-u[3],u[3]=0)}var w=a._parseDots(e);if(!w)return null;k=k.replace(/[\(\)\s,\xb0#]/g,"_"),b.gradient&&k!=b.gradient.id&&(p.defs.removeChild(b.gradient),delete b.gradient);if(!b.gradient){s=q(j+"Gradient",{id:k}),b.gradient=s,q(s,j=="radial"?{fx:m,fy:n}:{x1:u[0],y1:u[1],x2:u[2],y2:u[3],gradientTransform:b.matrix.invert()}),p.defs.appendChild(s);for(var x=0,y=w.length;x<y;x++)s.appendChild(q("stop",{offset:w[x].offset?w[x].offset:x?"100%":"0%","stop-color":w[x].color||"#fff"}))}}q(o,{fill:"url(#"+k+")",opacity:1,"fill-opacity":1}),r.fill=l,r.opacity=1,r.fillOpacity=1;return 1},s=function(a){var b=a.getBBox(1);q(a.pattern,{patternTransform:a.matrix.invert()+" translate("+b.x+","+b.y+")"})},t=function(d,e,f){if(d.type=="path"){var g=c(e).toLowerCase().split("-"),h=d.paper,i=f?"end":"start",j=d.node,k=d.attrs,m=k["stroke-width"],n=g.length,r="classic",s,t,u,v,w,x=3,y=3,z=5;while(n--)switch(g[n]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":r=g[n];break;case"wide":y=5;break;case"narrow":y=2;break;case"long":x=5;break;case"short":x=2}r=="open"?(x+=2,y+=2,z+=2,u=1,v=f?4:1,w={fill:"none",stroke:k.stroke}):(v=u=x/2,w={fill:k.stroke,stroke:"none"}),d._.arrows?f?(d._.arrows.endPath&&p[d._.arrows.endPath]--,d._.arrows.endMarker&&p[d._.arrows.endMarker]--):(d._.arrows.startPath&&p[d._.arrows.startPath]--,d._.arrows.startMarker&&p[d._.arrows.startMarker]--):d._.arrows={};if(r!="none"){var A="raphael-marker-"+r,B="raphael-marker-"+i+r+x+y;a._g.doc.getElementById(A)?p[A]++:(h.defs.appendChild(q(q("path"),{"stroke-linecap":"round",d:o[r],id:A})),p[A]=1);var C=a._g.doc.getElementById(B),D;C?(p[B]++,D=C.getElementsByTagName("use")[0]):(C=q(q("marker"),{id:B,markerHeight:y,markerWidth:x,orient:"auto",refX:v,refY:y/2}),D=q(q("use"),{"xlink:href":"#"+A,transform:(f?"rotate(180 "+x/2+" "+y/2+") ":l)+"scale("+x/z+","+y/z+")","stroke-width":(1/((x/z+y/z)/2)).toFixed(4)}),C.appendChild(D),h.defs.appendChild(C),p[B]=1),q(D,w);var F=u*(r!="diamond"&&r!="oval");f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-F*m):(s=F*m,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),w={},w["marker-"+i]="url(#"+B+")";if(t||s)w.d=Raphael.getSubpath(k.path,s,t);q(j,w),d._.arrows[i+"Path"]=A,d._.arrows[i+"Marker"]=B,d._.arrows[i+"dx"]=F,d._.arrows[i+"Type"]=r,d._.arrows[i+"String"]=e}else f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-s):(s=0,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),d._.arrows[i+"Path"]&&q(j,{d:Raphael.getSubpath(k.path,s,t)}),delete d._.arrows[i+"Path"],delete d._.arrows[i+"Marker"],delete d._.arrows[i+"dx"],delete d._.arrows[i+"Type"],delete d._.arrows[i+"String"];for(w in p)if(p[b](w)&&!p[w]){var G=a._g.doc.getElementById(w);G&&G.parentNode.removeChild(G)}}},u={"":[0],none:[0],"-":[3,1],".":[1,1],"-.":[3,1,1,1],"-..":[3,1,1,1,1,1],". ":[1,3],"- ":[4,3],"--":[8,3],"- .":[4,3,1,3],"--.":[8,3,1,3],"--..":[8,3,1,3,1,3]},v=function(a,b,d){b=u[c(b).toLowerCase()];if(b){var e=a.attrs["stroke-width"]||"1",f={round:e,square:e,butt:0}[a.attrs["stroke-linecap"]||d["stroke-linecap"]]||0,g=[],h=b.length;while(h--)g[h]=b[h]*e+(h%2?1:-1)*f;q(a.node,{"stroke-dasharray":g.join(",")})}},w=function(d,f){var i=d.node,k=d.attrs,m=i.style.visibility;i.style.visibility="hidden";for(var o in f)if(f[b](o)){if(!a._availableAttrs[b](o))continue;var p=f[o];k[o]=p;switch(o){case"blur":d.blur(p);break;case"href":case"title":case"target":var u=i.parentNode;if(u.tagName.toLowerCase()!="a"){var w=q("a");u.insertBefore(w,i),w.appendChild(i),u=w}o=="target"?u.setAttributeNS(n,"show",p=="blank"?"new":p):u.setAttributeNS(n,o,p);break;case"cursor":i.style.cursor=p;break;case"transform":d.transform(p);break;case"arrow-start":t(d,p);break;case"arrow-end":t(d,p,1);break;case"clip-rect":var x=c(p).split(j);if(x.length==4){d.clip&&d.clip.parentNode.parentNode.removeChild(d.clip.parentNode);var z=q("clipPath"),A=q("rect");z.id=a.createUUID(),q(A,{x:x[0],y:x[1],width:x[2],height:x[3]}),z.appendChild(A),d.paper.defs.appendChild(z),q(i,{"clip-path":"url(#"+z.id+")"}),d.clip=A}if(!p){var B=i.getAttribute("clip-path");if(B){var C=a._g.doc.getElementById(B.replace(/(^url\(#|\)$)/g,l));C&&C.parentNode.removeChild(C),q(i,{"clip-path":l}),delete d.clip}}break;case"path":d.type=="path"&&(q(i,{d:p?k.path=a._pathToAbsolute(p):"M0,0"}),d._.dirty=1,d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1)));break;case"width":i.setAttribute(o,p),d._.dirty=1;if(k.fx)o="x",p=k.x;else break;case"x":k.fx&&(p=-k.x-(k.width||0));case"rx":if(o=="rx"&&d.type=="rect")break;case"cx":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"height":i.setAttribute(o,p),d._.dirty=1;if(k.fy)o="y",p=k.y;else break;case"y":k.fy&&(p=-k.y-(k.height||0));case"ry":if(o=="ry"&&d.type=="rect")break;case"cy":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"r":d.type=="rect"?q(i,{rx:p,ry:p}):i.setAttribute(o,p),d._.dirty=1;break;case"src":d.type=="image"&&i.setAttributeNS(n,"href",p);break;case"stroke-width":if(d._.sx!=1||d._.sy!=1)p/=g(h(d._.sx),h(d._.sy))||1;d.paper._vbSize&&(p*=d.paper._vbSize),i.setAttribute(o,p),k["stroke-dasharray"]&&v(d,k["stroke-dasharray"],f),d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"stroke-dasharray":v(d,p,f);break;case"fill":var D=c(p).match(a._ISURL);if(D){z=q("pattern");var F=q("image");z.id=a.createUUID(),q(z,{x:0,y:0,patternUnits:"userSpaceOnUse",height:1,width:1}),q(F,{x:0,y:0,"xlink:href":D[1]}),z.appendChild(F),function(b){a._preload(D[1],function(){var a=this.offsetWidth,c=this.offsetHeight;q(b,{width:a,height:c}),q(F,{width:a,height:c}),d.paper.safari()})}(z),d.paper.defs.appendChild(z),q(i,{fill:"url(#"+z.id+")"}),d.pattern=z,d.pattern&&s(d);break}var G=a.getRGB(p);if(!G.error)delete f.gradient,delete k.gradient,!a.is(k.opacity,"undefined")&&a.is(f.opacity,"undefined")&&q(i,{opacity:k.opacity}),!a.is(k["fill-opacity"],"undefined")&&a.is(f["fill-opacity"],"undefined")&&q(i,{"fill-opacity":k["fill-opacity"]});else if((d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p)){if("opacity"in k||"fill-opacity"in k){var H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l));if(H){var I=H.getElementsByTagName("stop");q(I[I.length-1],{"stop-opacity":("opacity"in k?k.opacity:1)*("fill-opacity"in k?k["fill-opacity"]:1)})}}k.gradient=p,k.fill="none";break}G[b]("opacity")&&q(i,{"fill-opacity":G.opacity>1?G.opacity/100:G.opacity});case"stroke":G=a.getRGB(p),i.setAttribute(o,G.hex),o=="stroke"&&G[b]("opacity")&&q(i,{"stroke-opacity":G.opacity>1?G.opacity/100:G.opacity}),o=="stroke"&&d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"gradient":(d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p);break;case"opacity":k.gradient&&!k[b]("stroke-opacity")&&q(i,{"stroke-opacity":p>1?p/100:p});case"fill-opacity":if(k.gradient){H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l)),H&&(I=H.getElementsByTagName("stop"),q(I[I.length-1],{"stop-opacity":p}));break};default:o=="font-size"&&(p=e(p,10)+"px");var J=o.replace(/(\-.)/g,function(a){return a.substring(1).toUpperCase()});i.style[J]=p,d._.dirty=1,i.setAttribute(o,p)}}y(d,f),i.style.visibility=m},x=1.2,y=function(d,f){if(d.type=="text"&&!!(f[b]("text")||f[b]("font")||f[b]("font-size")||f[b]("x")||f[b]("y"))){var g=d.attrs,h=d.node,i=h.firstChild?e(a._g.doc.defaultView.getComputedStyle(h.firstChild,l).getPropertyValue("font-size"),10):10;if(f[b]("text")){g.text=f.text;while(h.firstChild)h.removeChild(h.firstChild);var j=c(f.text).split("\n"),k=[],m;for(var n=0,o=j.length;n<o;n++)m=q("tspan"),n&&q(m,{dy:i*x,x:g.x}),m.appendChild(a._g.doc.createTextNode(j[n])),h.appendChild(m),k[n]=m}else{k=h.getElementsByTagName("tspan");for(n=0,o=k.length;n<o;n++)n?q(k[n],{dy:i*x,x:g.x}):q(k[0],{dy:0})}q(h,{x:g.x,y:g.y}),d._.dirty=1;var p=d._getBBox(),r=g.y-(p.y+p.height/2);r&&a.is(r,"finite")&&q(k[0],{dy:r})}},z=function(b,c){var d=0,e=0;this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.matrix=a.matrix(),this.realPath=null,this.paper=c,this.attrs=this.attrs||{},this._={transform:[],sx:1,sy:1,deg:0,dx:0,dy:0,dirty:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},A=a.el;z.prototype=A,A.constructor=z,a._engine.path=function(a,b){var c=q("path");b.canvas&&b.canvas.appendChild(c);var d=new z(c,b);d.type="path",w(d,{fill:"none",stroke:"#000",path:a});return d},A.rotate=function(a,b,e){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this.transform(this._.transform.concat([["r",a,b,e]]));return this},A.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3])),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]]));return this},A.translate=function(a,b){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this.transform(this._.transform.concat([["t",a,b]]));return this},A.transform=function(c){var d=this._;if(c==null)return d.transform;a._extractTransform(this,c),this.clip&&q(this.clip,{transform:this.matrix.invert()}),this.pattern&&s(this),this.node&&q(this.node,{transform:this.matrix});if(d.sx!=1||d.sy!=1){var e=this.attrs[b]("stroke-width")?this.attrs["stroke-width"]:1;this.attr({"stroke-width":e})}return this},A.hide=function(){!this.removed&&this.paper.safari(this.node.style.display="none");return this},A.show=function(){!this.removed&&this.paper.safari(this.node.style.display="");return this},A.remove=function(){if(!this.removed&&!!this.node.parentNode){var b=this.paper;b.__set__&&b.__set__.exclude(this),k.unbind("raphael.*.*."+this.id),this.gradient&&b.defs.removeChild(this.gradient),a._tear(this,b),this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.removeChild(this.node.parentNode):this.node.parentNode.removeChild(this.node);for(var c in this)this[c]=typeof this[c]=="function"?a._removedFactory(c):null;this.removed=!0}},A._getBBox=function(){if(this.node.style.display=="none"){this.show();var a=!0}var b={};try{b=this.node.getBBox()}catch(c){}finally{b=b||{}}a&&this.hide();return b},A.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c=="fill"&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;if(c=="transform")return this._.transform;var g=c.split(j),h={};for(var i=0,l=g.length;i<l;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return l-1?h:h[g[0]]}if(d==null&&a.is(c,"array")){h={};for(i=0,l=c.length;i<l;i++)h[c[i]]=this.attr(c[i]);return h}if(d!=null){var m={};m[c]=d}else c!=null&&a.is(c,"object")&&(m=c);for(var n in m)k("raphael.attr."+n+"."+this.id,this,m[n]);for(n in this.paper.customAttributes)if(this.paper.customAttributes[b](n)&&m[b](n)&&a.is(this.paper.customAttributes[n],"function")){var o=this.paper.customAttributes[n].apply(this,[].concat(m[n]));this.attrs[n]=m[n];for(var p in o)o[b](p)&&(m[p]=o[p])}w(this,m);return this},A.toFront=function(){if(this.removed)return this;this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.appendChild(this.node.parentNode):this.node.parentNode.appendChild(this.node);var b=this.paper;b.top!=this&&a._tofront(this,b);return this},A.toBack=function(){if(this.removed)return this;var b=this.node.parentNode;b.tagName.toLowerCase()=="a"?b.parentNode.insertBefore(this.node.parentNode,this.node.parentNode.parentNode.firstChild):b.firstChild!=this.node&&b.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper);var c=this.paper;return this},A.insertAfter=function(b){if(this.removed)return this;var c=b.node||b[b.length-1].node;c.nextSibling?c.parentNode.insertBefore(this.node,c.nextSibling):c.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},A.insertBefore=function(b){if(this.removed)return this;var c=b.node||b[0].node;c.parentNode.insertBefore(this.node,c),a._insertbefore(this,b,this.paper);return this},A.blur=function(b){var c=this;if(+b!==0){var d=q("filter"),e=q("feGaussianBlur");c.attrs.blur=b,d.id=a.createUUID(),q(e,{stdDeviation:+b||1.5}),d.appendChild(e),c.paper.defs.appendChild(d),c._blur=d,q(c.node,{filter:"url(#"+d.id+")"})}else c._blur&&(c._blur.parentNode.removeChild(c._blur),delete c._blur,delete c.attrs.blur),c.node.removeAttribute("filter")},a._engine.circle=function(a,b,c,d){var e=q("circle");a.canvas&&a.canvas.appendChild(e);var f=new z(e,a);f.attrs={cx:b,cy:c,r:d,fill:"none",stroke:"#000"},f.type="circle",q(e,f.attrs);return f},a._engine.rect=function(a,b,c,d,e,f){var g=q("rect");a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:b,y:c,width:d,height:e,r:f||0,rx:f||0,ry:f||0,fill:"none",stroke:"#000"},h.type="rect",q(g,h.attrs);return h},a._engine.ellipse=function(a,b,c,d,e){var f=q("ellipse");a.canvas&&a.canvas.appendChild(f);var g=new z(f,a);g.attrs={cx:b,cy:c,rx:d,ry:e,fill:"none",stroke:"#000"},g.type="ellipse",q(f,g.attrs);return g},a._engine.image=function(a,b,c,d,e,f){var g=q("image");q(g,{x:c,y:d,width:e,height:f,preserveAspectRatio:"none"}),g.setAttributeNS(n,"href",b),a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:c,y:d,width:e,height:f,src:b},h.type="image";return h},a._engine.text=function(b,c,d,e){var f=q("text");b.canvas&&b.canvas.appendChild(f);var g=new z(f,b);g.attrs={x:c,y:d,"text-anchor":"middle",text:e,font:a._availableAttrs.font,stroke:"none",fill:"#000"},g.type="text",w(g,g.attrs);return g},a._engine.setSize=function(a,b){this.width=a||this.width,this.height=b||this.height,this.canvas.setAttribute("width",this.width),this.canvas.setAttribute("height",this.height),this._viewBox&&this.setViewBox.apply(this,this._viewBox);return this},a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b&&b.container,d=b.x,e=b.y,f=b.width,g=b.height;if(!c)throw new Error("SVG container not found.");var h=q("svg"),i="overflow:hidden;",j;d=d||0,e=e||0,f=f||512,g=g||342,q(h,{height:g,version:1.1,width:f,xmlns:"http://www.w3.org/2000/svg"}),c==1?(h.style.cssText=i+"position:absolute;left:"+d+"px;top:"+e+"px",a._g.doc.body.appendChild(h),j=1):(h.style.cssText=i+"position:relative",c.firstChild?c.insertBefore(h,c.firstChild):c.appendChild(h)),c=new a._Paper,c.width=f,c.height=g,c.canvas=h,c.clear(),c._left=c._top=0,j&&(c.renderfix=function(){}),c.renderfix();return c},a._engine.setViewBox=function(a,b,c,d,e){k("raphael.setViewBox",this,this._viewBox,[a,b,c,d,e]);var f=g(c/this.width,d/this.height),h=this.top,i=e?"meet":"xMinYMin",j,l;a==null?(this._vbSize&&(f=1),delete this._vbSize,j="0 0 "+this.width+m+this.height):(this._vbSize=f,j=a+m+b+m+c+m+d),q(this.canvas,{viewBox:j,preserveAspectRatio:i});while(f&&h)l="stroke-width"in h.attrs?h.attrs["stroke-width"]:1,h.attr({"stroke-width":l}),h._.dirty=1,h._.dirtyT=1,h=h.prev;this._viewBox=[a,b,c,d,!!e];return this},a.prototype.renderfix=function(){var a=this.canvas,b=a.style,c;try{c=a.getScreenCTM()||a.createSVGMatrix()}catch(d){c=a.createSVGMatrix()}var e=-c.e%1,f=-c.f%1;if(e||f)e&&(this._left=(this._left+e)%1,b.left=this._left+"px"),f&&(this._top=(this._top+f)%1,b.top=this._top+"px")},a.prototype.clear=function(){a.eve("raphael.clear",this);var b=this.canvas;while(b.firstChild)b.removeChild(b.firstChild);this.bottom=this.top=null,(this.desc=q("desc")).appendChild(a._g.doc.createTextNode("Created with Raphaël "+a.version)),b.appendChild(this.desc),b.appendChild(this.defs=q("defs"))},a.prototype.remove=function(){k("raphael.remove",this),this.canvas.parentNode&&this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null};var B=a.st;for(var C in A)A[b](C)&&!B[b](C)&&(B[C]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(C))}(window.Raphael),window.Raphael.vml&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=Math,f=e.round,g=e.max,h=e.min,i=e.abs,j="fill",k=/[, ]+/,l=a.eve,m=" progid:DXImageTransform.Microsoft",n=" ",o="",p={M:"m",L:"l",C:"c",Z:"x",m:"t",l:"r",c:"v",z:"x"},q=/([clmz]),?([^clmz]*)/gi,r=/ progid:\S+Blur\([^\)]+\)/g,s=/-?[^,\s-]+/g,t="position:absolute;left:0;top:0;width:1px;height:1px",u=21600,v={path:1,rect:1,image:1},w={circle:1,ellipse:1},x=function(b){var d=/[ahqstv]/ig,e=a._pathToAbsolute;c(b).match(d)&&(e=a._path2curve),d=/[clmz]/g;if(e==a._pathToAbsolute&&!c(b).match(d)){var g=c(b).replace(q,function(a,b,c){var d=[],e=b.toLowerCase()=="m",g=p[b];c.replace(s,function(a){e&&d.length==2&&(g+=d+p[b=="m"?"l":"L"],d=[]),d.push(f(a*u))});return g+d});return g}var h=e(b),i,j;g=[];for(var k=0,l=h.length;k<l;k++){i=h[k],j=h[k][0].toLowerCase(),j=="z"&&(j="x");for(var m=1,r=i.length;m<r;m++)j+=f(i[m]*u)+(m!=r-1?",":o);g.push(j)}return g.join(n)},y=function(b,c,d){var e=a.matrix();e.rotate(-b,.5,.5);return{dx:e.x(c,d),dy:e.y(c,d)}},z=function(a,b,c,d,e,f){var g=a._,h=a.matrix,k=g.fillpos,l=a.node,m=l.style,o=1,p="",q,r=u/b,s=u/c;m.visibility="hidden";if(!!b&&!!c){l.coordsize=i(r)+n+i(s),m.rotation=f*(b*c<0?-1:1);if(f){var t=y(f,d,e);d=t.dx,e=t.dy}b<0&&(p+="x"),c<0&&(p+=" y")&&(o=-1),m.flip=p,l.coordorigin=d*-r+n+e*-s;if(k||g.fillsize){var v=l.getElementsByTagName(j);v=v&&v[0],l.removeChild(v),k&&(t=y(f,h.x(k[0],k[1]),h.y(k[0],k[1])),v.position=t.dx*o+n+t.dy*o),g.fillsize&&(v.size=g.fillsize[0]*i(b)+n+g.fillsize[1]*i(c)),l.appendChild(v)}m.visibility="visible"}};a.toString=function(){return"Your browser doesn’t support SVG. Falling down to VML.\nYou are running Raphaël "+this.version};var A=function(a,b,d){var e=c(b).toLowerCase().split("-"),f=d?"end":"start",g=e.length,h="classic",i="medium",j="medium";while(g--)switch(e[g]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":h=e[g];break;case"wide":case"narrow":j=e[g];break;case"long":case"short":i=e[g]}var k=a.node.getElementsByTagName("stroke")[0];k[f+"arrow"]=h,k[f+"arrowlength"]=i,k[f+"arrowwidth"]=j},B=function(e,i){e.attrs=e.attrs||{};var l=e.node,m=e.attrs,p=l.style,q,r=v[e.type]&&(i.x!=m.x||i.y!=m.y||i.width!=m.width||i.height!=m.height||i.cx!=m.cx||i.cy!=m.cy||i.rx!=m.rx||i.ry!=m.ry||i.r!=m.r),s=w[e.type]&&(m.cx!=i.cx||m.cy!=i.cy||m.r!=i.r||m.rx!=i.rx||m.ry!=i.ry),t=e;for(var y in i)i[b](y)&&(m[y]=i[y]);r&&(m.path=a._getPath[e.type](e),e._.dirty=1),i.href&&(l.href=i.href),i.title&&(l.title=i.title),i.target&&(l.target=i.target),i.cursor&&(p.cursor=i.cursor),"blur"in i&&e.blur(i.blur);if(i.path&&e.type=="path"||r)l.path=x(~c(m.path).toLowerCase().indexOf("r")?a._pathToAbsolute(m.path):m.path),e.type=="image"&&(e._.fillpos=[m.x,m.y],e._.fillsize=[m.width,m.height],z(e,1,1,0,0,0));"transform"in i&&e.transform(i.transform);if(s){var B=+m.cx,D=+m.cy,E=+m.rx||+m.r||0,G=+m.ry||+m.r||0;l.path=a.format("ar{0},{1},{2},{3},{4},{1},{4},{1}x",f((B-E)*u),f((D-G)*u),f((B+E)*u),f((D+G)*u),f(B*u))}if("clip-rect"in i){var H=c(i["clip-rect"]).split(k);if(H.length==4){H[2]=+H[2]+ +H[0],H[3]=+H[3]+ +H[1];var I=l.clipRect||a._g.doc.createElement("div"),J=I.style;J.clip=a.format("rect({1}px {2}px {3}px {0}px)",H),l.clipRect||(J.position="absolute",J.top=0,J.left=0,J.width=e.paper.width+"px",J.height=e.paper.height+"px",l.parentNode.insertBefore(I,l),I.appendChild(l),l.clipRect=I)}i["clip-rect"]||l.clipRect&&(l.clipRect.style.clip="auto")}if(e.textpath){var K=e.textpath.style;i.font&&(K.font=i.font),i["font-family"]&&(K.fontFamily='"'+i["font-family"].split(",")[0].replace(/^['"]+|['"]+$/g,o)+'"'),i["font-size"]&&(K.fontSize=i["font-size"]),i["font-weight"]&&(K.fontWeight=i["font-weight"]),i["font-style"]&&(K.fontStyle=i["font-style"])}"arrow-start"in i&&A(t,i["arrow-start"]),"arrow-end"in i&&A(t,i["arrow-end"],1);if(i.opacity!=null||i["stroke-width"]!=null||i.fill!=null||i.src!=null||i.stroke!=null||i["stroke-width"]!=null||i["stroke-opacity"]!=null||i["fill-opacity"]!=null||i["stroke-dasharray"]!=null||i["stroke-miterlimit"]!=null||i["stroke-linejoin"]!=null||i["stroke-linecap"]!=null){var L=l.getElementsByTagName(j),M=!1;L=L&&L[0],!L&&(M=L=F(j)),e.type=="image"&&i.src&&(L.src=i.src),i.fill&&(L.on=!0);if(L.on==null||i.fill=="none"||i.fill===null)L.on=!1;if(L.on&&i.fill){var N=c(i.fill).match(a._ISURL);if(N){L.parentNode==l&&l.removeChild(L),L.rotate=!0,L.src=N[1],L.type="tile";var O=e.getBBox(1);L.position=O.x+n+O.y,e._.fillpos=[O.x,O.y],a._preload(N[1],function(){e._.fillsize=[this.offsetWidth,this.offsetHeight]})}else L.color=a.getRGB(i.fill).hex,L.src=o,L.type="solid",a.getRGB(i.fill).error&&(t.type in{circle:1,ellipse:1}||c(i.fill).charAt()!="r")&&C(t,i.fill,L)&&(m.fill="none",m.gradient=i.fill,L.rotate=!1)}if("fill-opacity"in i||"opacity"in i){var P=((+m["fill-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+a.getRGB(i.fill).o+1||2)-1);P=h(g(P,0),1),L.opacity=P,L.src&&(L.color="none")}l.appendChild(L);var Q=l.getElementsByTagName("stroke")&&l.getElementsByTagName("stroke")[0],T=!1;!Q&&(T=Q=F("stroke"));if(i.stroke&&i.stroke!="none"||i["stroke-width"]||i["stroke-opacity"]!=null||i["stroke-dasharray"]||i["stroke-miterlimit"]||i["stroke-linejoin"]||i["stroke-linecap"])Q.on=!0;(i.stroke=="none"||i.stroke===null||Q.on==null||i.stroke==0||i["stroke-width"]==0)&&(Q.on=!1);var U=a.getRGB(i.stroke);Q.on&&i.stroke&&(Q.color=U.hex),P=((+m["stroke-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+U.o+1||2)-1);var V=(d(i["stroke-width"])||1)*.75;P=h(g(P,0),1),i["stroke-width"]==null&&(V=m["stroke-width"]),i["stroke-width"]&&(Q.weight=V),V&&V<1&&(P*=V)&&(Q.weight=1),Q.opacity=P,i["stroke-linejoin"]&&(Q.joinstyle=i["stroke-linejoin"]||"miter"),Q.miterlimit=i["stroke-miterlimit"]||8,i["stroke-linecap"]&&(Q.endcap=i["stroke-linecap"]=="butt"?"flat":i["stroke-linecap"]=="square"?"square":"round");if(i["stroke-dasharray"]){var W={"-":"shortdash",".":"shortdot","-.":"shortdashdot","-..":"shortdashdotdot",". ":"dot","- ":"dash","--":"longdash","- .":"dashdot","--.":"longdashdot","--..":"longdashdotdot"};Q.dashstyle=W[b](i["stroke-dasharray"])?W[i["stroke-dasharray"]]:o}T&&l.appendChild(Q)}if(t.type=="text"){t.paper.canvas.style.display=o;var X=t.paper.span,Y=100,Z=m.font&&m.font.match(/\d+(?:\.\d*)?(?=px)/);p=X.style,m.font&&(p.font=m.font),m["font-family"]&&(p.fontFamily=m["font-family"]),m["font-weight"]&&(p.fontWeight=m["font-weight"]),m["font-style"]&&(p.fontStyle=m["font-style"]),Z=d(m["font-size"]||Z&&Z[0])||10,p.fontSize=Z*Y+"px",t.textpath.string&&(X.innerHTML=c(t.textpath.string).replace(/</g,"&#60;").replace(/&/g,"&#38;").replace(/\n/g,"<br>"));var $=X.getBoundingClientRect();t.W=m.w=($.right-$.left)/Y,t.H=m.h=($.bottom-$.top)/Y,t.X=m.x,t.Y=m.y+t.H/2,("x"in i||"y"in i)&&(t.path.v=a.format("m{0},{1}l{2},{1}",f(m.x*u),f(m.y*u),f(m.x*u)+1));var _=["x","y","text","font","font-family","font-weight","font-style","font-size"];for(var ba=0,bb=_.length;ba<bb;ba++)if(_[ba]in i){t._.dirty=1;break}switch(m["text-anchor"]){case"start":t.textpath.style["v-text-align"]="left",t.bbx=t.W/2;break;case"end":t.textpath.style["v-text-align"]="right",t.bbx=-t.W/2;break;default:t.textpath.style["v-text-align"]="center",t.bbx=0}t.textpath.style["v-text-kern"]=!0}},C=function(b,f,g){b.attrs=b.attrs||{};var h=b.attrs,i=Math.pow,j,k,l="linear",m=".5 .5";b.attrs.gradient=f,f=c(f).replace(a._radial_gradient,function(a,b,c){l="radial",b&&c&&(b=d(b),c=d(c),i(b-.5,2)+i(c-.5,2)>.25&&(c=e.sqrt(.25-i(b-.5,2))*((c>.5)*2-1)+.5),m=b+n+c);return o}),f=f.split(/\s*\-\s*/);if(l=="linear"){var p=f.shift();p=-d(p);if(isNaN(p))return null}var q=a._parseDots(f);if(!q)return null;b=b.shape||b.node;if(q.length){b.removeChild(g),g.on=!0,g.method="none",g.color=q[0].color,g.color2=q[q.length-1].color;var r=[];for(var s=0,t=q.length;s<t;s++)q[s].offset&&r.push(q[s].offset+n+q[s].color);g.colors=r.length?r.join():"0% "+g.color,l=="radial"?(g.type="gradientTitle",g.focus="100%",g.focussize="0 0",g.focusposition=m,g.angle=0):(g.type="gradient",g.angle=(270-p)%360),b.appendChild(g)}return 1},D=function(b,c){this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.X=0,this.Y=0,this.attrs={},this.paper=c,this.matrix=a.matrix(),this._={transform:[],sx:1,sy:1,dx:0,dy:0,deg:0,dirty:1,dirtyT:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},E=a.el;D.prototype=E,E.constructor=D,E.transform=function(b){if(b==null)return this._.transform;var d=this.paper._viewBoxShift,e=d?"s"+[d.scale,d.scale]+"-1-1t"+[d.dx,d.dy]:o,f;d&&(f=b=c(b).replace(/\.{3}|\u2026/g,this._.transform||o)),a._extractTransform(this,e+b);var g=this.matrix.clone(),h=this.skew,i=this.node,j,k=~c(this.attrs.fill).indexOf("-"),l=!c(this.attrs.fill).indexOf("url(");g.translate(-0.5,-0.5);if(l||k||this.type=="image"){h.matrix="1 0 0 1",h.offset="0 0",j=g.split();if(k&&j.noRotation||!j.isSimple){i.style.filter=g.toFilter();var m=this.getBBox(),p=this.getBBox(1),q=m.x-p.x,r=m.y-p.y;i.coordorigin=q*-u+n+r*-u,z(this,1,1,q,r,0)}else i.style.filter=o,z(this,j.scalex,j.scaley,j.dx,j.dy,j.rotate)}else i.style.filter=o,h.matrix=c(g),h.offset=g.offset();f&&(this._.transform=f);return this},E.rotate=function(a,b,e){if(this.removed)return this;if(a!=null){a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this._.dirtyT=1,this.transform(this._.transform.concat([["r",a,b,e]]));return this}},E.translate=function(a,b){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this._.bbox&&(this._.bbox.x+=a,this._.bbox.y+=b),this.transform(this._.transform.concat([["t",a,b]]));return this},E.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3]),isNaN(e)&&(e=null),isNaN(f)&&(f=null)),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]])),this._.dirtyT=1;return this},E.hide=function(){!this.removed&&(this.node.style.display="none");return this},E.show=function(){!this.removed&&(this.node.style.display=o);return this},E._getBBox=function(){if(this.removed)return{};return{x:this.X+(this.bbx||0)-this.W/2,y:this.Y-this.H,width:this.W,height:this.H}},E.remove=function(){if(!this.removed&&!!this.node.parentNode){this.paper.__set__&&this.paper.__set__.exclude(this),a.eve.unbind("raphael.*.*."+this.id),a._tear(this,this.paper),this.node.parentNode.removeChild(this.node),this.shape&&this.shape.parentNode.removeChild(this.shape);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;this.removed=!0}},E.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c==j&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;var g=c.split(k),h={};for(var i=0,m=g.length;i<m;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return m-1?h:h[g[0]]}if(this.attrs&&d==null&&a.is(c,"array")){h={};for(i=0,m=c.length;i<m;i++)h[c[i]]=this.attr(c[i]);return h}var n;d!=null&&(n={},n[c]=d),d==null&&a.is(c,"object")&&(n=c);for(var o in n)l("raphael.attr."+o+"."+this.id,this,n[o]);if(n){for(o in this.paper.customAttributes)if(this.paper.customAttributes[b](o)&&n[b](o)&&a.is(this.paper.customAttributes[o],"function")){var p=this.paper.customAttributes[o].apply(this,[].concat(n[o]));this.attrs[o]=n[o];for(var q in p)p[b](q)&&(n[q]=p[q])}n.text&&this.type=="text"&&(this.textpath.string=n.text),B(this,n)}return this},E.toFront=function(){!this.removed&&this.node.parentNode.appendChild(this.node),this.paper&&this.paper.top!=this&&a._tofront(this,this.paper);return this},E.toBack=function(){if(this.removed)return this;this.node.parentNode.firstChild!=this.node&&(this.node.parentNode.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper));return this},E.insertAfter=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[b.length-1]),b.node.nextSibling?b.node.parentNode.insertBefore(this.node,b.node.nextSibling):b.node.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},E.insertBefore=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[0]),b.node.parentNode.insertBefore(this.node,b.node),a._insertbefore(this,b,this.paper);return this},E.blur=function(b){var c=this.node.runtimeStyle,d=c.filter;d=d.replace(r,o),+b!==0?(this.attrs.blur=b,c.filter=d+n+m+".Blur(pixelradius="+(+b||1.5)+")",c.margin=a.format("-{0}px 0 0 -{0}px",f(+b||1.5))):(c.filter=d,c.margin=0,delete this.attrs.blur)},a._engine.path=function(a,b){var c=F("shape");c.style.cssText=t,c.coordsize=u+n+u,c.coordorigin=b.coordorigin;var d=new D(c,b),e={fill:"none",stroke:"#000"};a&&(e.path=a),d.type="path",d.path=[],d.Path=o,B(d,e),b.canvas.appendChild(c);var f=F("skew");f.on=!0,c.appendChild(f),d.skew=f,d.transform(o);return d},a._engine.rect=function(b,c,d,e,f,g){var h=a._rectPath(c,d,e,f,g),i=b.path(h),j=i.attrs;i.X=j.x=c,i.Y=j.y=d,i.W=j.width=e,i.H=j.height=f,j.r=g,j.path=h,i.type="rect";return i},a._engine.ellipse=function(a,b,c,d,e){var f=a.path(),g=f.attrs;f.X=b-d,f.Y=c-e,f.W=d*2,f.H=e*2,f.type="ellipse",B(f,{cx:b,cy:c,rx:d,ry:e});return f},a._engine.circle=function(a,b,c,d){var e=a.path(),f=e.attrs;e.X=b-d,e.Y=c-d,e.W=e.H=d*2,e.type="circle",B(e,{cx:b,cy:c,r:d});return e},a._engine.image=function(b,c,d,e,f,g){var h=a._rectPath(d,e,f,g),i=b.path(h).attr({stroke:"none"}),k=i.attrs,l=i.node,m=l.getElementsByTagName(j)[0];k.src=c,i.X=k.x=d,i.Y=k.y=e,i.W=k.width=f,i.H=k.height=g,k.path=h,i.type="image",m.parentNode==l&&l.removeChild(m),m.rotate=!0,m.src=c,m.type="tile",i._.fillpos=[d,e],i._.fillsize=[f,g],l.appendChild(m),z(i,1,1,0,0,0);return i},a._engine.text=function(b,d,e,g){var h=F("shape"),i=F("path"),j=F("textpath");d=d||0,e=e||0,g=g||"",i.v=a.format("m{0},{1}l{2},{1}",f(d*u),f(e*u),f(d*u)+1),i.textpathok=!0,j.string=c(g),j.on=!0,h.style.cssText=t,h.coordsize=u+n+u,h.coordorigin="0 0";var k=new D(h,b),l={fill:"#000",stroke:"none",font:a._availableAttrs.font,text:g};k.shape=h,k.path=i,k.textpath=j,k.type="text",k.attrs.text=c(g),k.attrs.x=d,k.attrs.y=e,k.attrs.w=1,k.attrs.h=1,B(k,l),h.appendChild(j),h.appendChild(i),b.canvas.appendChild(h);var m=F("skew");m.on=!0,h.appendChild(m),k.skew=m,k.transform(o);return k},a._engine.setSize=function(b,c){var d=this.canvas.style;this.width=b,this.height=c,b==+b&&(b+="px"),c==+c&&(c+="px"),d.width=b,d.height=c,d.clip="rect(0 "+b+" "+c+" 0)",this._viewBox&&a._engine.setViewBox.apply(this,this._viewBox);return this},a._engine.setViewBox=function(b,c,d,e,f){a.eve("raphael.setViewBox",this,this._viewBox,[b,c,d,e,f]);var h=this.width,i=this.height,j=1/g(d/h,e/i),k,l;f&&(k=i/e,l=h/d,d*k<h&&(b-=(h-d*k)/2/k),e*l<i&&(c-=(i-e*l)/2/l)),this._viewBox=[b,c,d,e,!!f],this._viewBoxShift={dx:-b,dy:-c,scale:j},this.forEach(function(a){a.transform("...")});return this};var F;a._engine.initWin=function(a){var b=a.document;b.createStyleSheet().addRule(".rvml","behavior:url(#default#VML)");try{!b.namespaces.rvml&&b.namespaces.add("rvml","urn:schemas-microsoft-com:vml"),F=function(a){return b.createElement("<rvml:"+a+' class="rvml">')}}catch(c){F=function(a){return b.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="rvml">')}}},a._engine.initWin(a._g.win),a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b.container,d=b.height,e,f=b.width,g=b.x,h=b.y;if(!c)throw new Error("VML container not found.");var i=new a._Paper,j=i.canvas=a._g.doc.createElement("div"),k=j.style;g=g||0,h=h||0,f=f||512,d=d||342,i.width=f,i.height=d,f==+f&&(f+="px"),d==+d&&(d+="px"),i.coordsize=u*1e3+n+u*1e3,i.coordorigin="0 0",i.span=a._g.doc.createElement("span"),i.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;",j.appendChild(i.span),k.cssText=a.format("top:0;left:0;width:{0};height:{1};display:inline-block;position:relative;clip:rect(0 {0} {1} 0);overflow:hidden",f,d),c==1?(a._g.doc.body.appendChild(j),k.left=g+"px",k.top=h+"px",k.position="absolute"):c.firstChild?c.insertBefore(j,c.firstChild):c.appendChild(j),i.renderfix=function(){};return i},a.prototype.clear=function(){a.eve("raphael.clear",this),this.canvas.innerHTML=o,this.span=a._g.doc.createElement("span"),this.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;display:inline;",this.canvas.appendChild(this.span),this.bottom=this.top=null},a.prototype.remove=function(){a.eve("raphael.remove",this),this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;return!0};var G=a.st;for(var H in E)E[b](H)&&!G[b](H)&&(G[H]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(H))}(window.Raphael) \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 1bee55313b..5a1779bba5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -163,7 +163,7 @@ text-decoration: none;
background: url("arrow-right.png") no-repeat 0 3px transparent;
}
-.toggleContainer .toggle.open {
+.toggleContainer.open .toggle {
background: url("arrow-down.png") no-repeat 0 3px transparent;
}
@@ -171,6 +171,10 @@ text-decoration: none;
margin-top: 5px;
}
+.toggleContainer .showElement {
+ padding-left: 15px;
+}
+
.value #definition {
background-color: #2C475C; /* blue */
background-image:url('defbg-blue.gif');
@@ -329,15 +333,15 @@ div.members > ol > li:last-child {
color: darkgreen;
}
-.signature .symbol .shadowed {
- color: darkseagreen;
-}
-
.signature .symbol .params > .implicit {
font-style: italic;
}
-.signature .symbol .deprecated {
+.signature .symbol .implicit.deprecated {
+ text-decoration: line-through;
+}
+
+.signature .symbol .name.deprecated {
text-decoration: line-through;
}
@@ -798,4 +802,4 @@ div.fullcomment dl.paramcmts > dd {
#mbrsel .showall span {
color: #4C4C4C;
font-weight: bold;
-}*/
+}*/ \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index c418c3280b..33fbd83bee 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -8,8 +8,7 @@ $(document).ready(function(){
name == 'scala.Predef.any2stringfmt' ||
name == 'scala.Predef.any2stringadd' ||
name == 'scala.Predef.any2ArrowAssoc' ||
- name == 'scala.Predef.any2Ensuring' ||
- name == 'scala.collection.TraversableOnce.alternateImplicit'
+ name == 'scala.Predef.any2Ensuring'
};
$("#linearization li:gt(0)").filter(function(){
@@ -185,18 +184,21 @@ $(document).ready(function(){
});
/* Linear super types and known subclasses */
- function toggleShowContentFct(e){
- e.toggleClass("open");
- var content = $(".hiddenContent", e.parent().get(0));
- if (content.is(':visible')) {
+ function toggleShowContentFct(outerElement){
+ var content = $(".hiddenContent", outerElement);
+ var vis = $(":visible", content);
+ if (vis.length > 0) {
content.slideUp(100);
+ $(".showElement", outerElement).show();
+ $(".hideElement", outerElement).hide();
}
else {
content.slideDown(100);
+ $(".showElement", outerElement).hide();
+ $(".hideElement", outerElement).show();
}
};
-
- $(".toggle:not(.diagram-link)").click(function() {
+ $(".toggleContainer").click(function() {
toggleShowContentFct($(this));
});
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
deleted file mode 100644
index 88983254ce..0000000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
+++ /dev/null
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index 5d1413abd4..6488847049 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -10,7 +10,7 @@ package model
import scala.collection._
import comment._
-import diagram._
+
/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the
* compiler. Entities model the following Scala concepts:
@@ -86,20 +86,9 @@ trait TemplateEntity extends Entity {
/** Whether this template is a case class. */
def isCaseClass: Boolean
- /** The type of this entity, with type members */
- def ownType: TypeEntity
-
/** The self-type of this template, if it differs from the template type. */
def selfType : Option[TypeEntity]
- /** The full template name `[kind] qualifiedName` */
- def fullName =
- (if (isPackage) "package "
- else if (isCaseClass) "case class "
- else if (isClass) "class "
- else if (isTrait) "trait "
- else if (isObject) "object "
- else "") + qualifiedName
}
@@ -178,12 +167,8 @@ trait MemberEntity extends Entity {
/** Whether this member is abstract. */
def isAbstract: Boolean
- /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
- * signature and the complete parameter descriptions. */
- def useCaseOf: Option[MemberEntity] = None
-
/** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
- def byConversion: Option[ImplicitConversionInfo]
+ def byConversion: Option[ImplicitConversion]
}
object MemberEntity {
// Oh contravariance, contravariance, wherefore art thou contravariance?
@@ -221,10 +206,8 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* only if the `docsourceurl` setting has been set. */
def sourceUrl: Option[java.net.URL]
- /** The direct super-type of this template
- e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D
- NOTE: we are dropping the refinement here! */
- def parentTypes: List[(TemplateEntity, TypeEntity)]
+ /** The direct super-type of this template. */
+ def parentType: Option[TypeEntity]
@deprecated("Use `linearizationTemplates` and `linearizationTypes` instead", "2.9.0")
def linearization: List[(TemplateEntity, TypeEntity)]
@@ -237,13 +220,9 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* This template's linearization contains all of its direct and indirect super-types. */
def linearizationTypes: List[TypeEntity]
- /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def allSubClasses: List[DocTemplateEntity]
-
- /** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def directSubClasses: List[DocTemplateEntity]
+ /**All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
+ * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
+ def subClasses: List[DocTemplateEntity]
/** All members of this template. If this template is a package, only templates for which documentation is available
* in the universe (`DocTemplateEntity`) are listed. */
@@ -271,19 +250,6 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
/** The implicit conversions this template (class or trait, objects and packages are not affected) */
def conversions: List[ImplicitConversion]
-
- /** Classes that can be implcitly converted to this class */
- def incomingImplicitlyConvertedClasses: List[DocTemplateEntity]
-
- /** Classes to which this class can be implicitly converted to
- NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */
- def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity)]
-
- /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */
- def inheritanceDiagram: Option[Diagram]
-
- /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */
- def contentDiagram: Option[Diagram]
}
@@ -339,6 +305,10 @@ trait NonTemplateMemberEntity extends MemberEntity {
* It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
def isUseCase: Boolean
+ /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
+ * signature and the complete parameter descriptions. */
+ def useCaseOf: Option[MemberEntity]
+
/** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
* and should not appear in ScalaDoc. */
def isBridge: Boolean
@@ -457,15 +427,6 @@ trait ImplicitConversion {
/** The result type after the conversion */
def targetType: TypeEntity
- /** The result type after the conversion
- * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
- * types. Need to check it's not option!
- */
- def targetTemplate: Option[TemplateEntity]
-
- /** The components of the implicit conversion type parents */
- def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
-
/** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
def convertorMethod: Either[MemberEntity, String]
@@ -485,38 +446,11 @@ trait ImplicitConversion {
def members: List[MemberEntity]
}
-trait ImplicitConversionInfo {
- /** The implicit conversion this member originates from */
- def conversion: ImplicitConversion
-
- /** The shadowing information for this member */
- def shadowing: ImplicitMemberShadowing
-
- /* Quick getters */
- def isShadowed: Boolean = !shadowing.shadowingMembers.isEmpty
- def isAmbiguous: Boolean = !shadowing.ambiguatingMembers.isEmpty
-}
-
-/** Shadowing captures the information that the member is shadowed by some other members
- * There are two cases of implicitly added member shadowing:
- * 1) shadowing from a original class member (the class already has that member)
- * in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
- * the call arguments (or if they fit it will call the original class' method)
- * 2) shadowing from other possible implicit conversions ()
- * this will result in an ambiguous implicit converion error
- */
-trait ImplicitMemberShadowing {
- /** The members that shadow the current entry use .inTemplate to get to the template name */
- def shadowingMembers: List[MemberEntity]
-
- /** The members that ambiguate this implicit conversion
- Note: for ambiguatingMembers you have the following invariant:
- assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */
- def ambiguatingMembers: List[MemberEntity]
-}
-
/** A trait that encapsulates a constraint necessary for implicit conversion */
-trait Constraint
+trait Constraint {
+ // /** The implicit conversion during which this constraint appears */
+ // def conversion: ImplicitConversion
+}
/** A constraint involving a type parameter which must be in scope */
trait ImplicitInScopeConstraint extends Constraint {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index eb916333ab..3dd77d47da 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -6,8 +6,6 @@ package model
import comment._
-import diagram._
-
import scala.collection._
import scala.util.matching.Regex
@@ -19,7 +17,7 @@ import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
class ModelFactory(val global: Global, val settings: doc.Settings) {
- thisFactory: ModelFactory with ModelFactoryImplicitSupport with DiagramFactory with CommentFactory with TreeFactory =>
+ thisFactory: ModelFactory with ModelFactoryImplicitSupport with CommentFactory with TreeFactory =>
import global._
import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
@@ -91,7 +89,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isObject = sym.isModule && !sym.isPackage
def isCaseClass = sym.isCaseClass
def isRootPackage = false
- def ownType = makeType(sym.tpe, this)
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
}
@@ -99,7 +96,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isDocTemplate = false
}
- abstract class MemberImpl(sym: Symbol, implConv: ImplicitConversionInfoImpl = null, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ abstract class MemberImpl(sym: Symbol, implConv: ImplicitConversionImpl = null, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
lazy val comment =
if (inTpl == null) None else thisFactory.comment(sym, inTpl)
override def inTemplate = inTpl
@@ -173,7 +170,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case NullaryMethodType(res) => resultTpe(res)
case _ => tpe
}
- val tpe = if (implConv eq null) sym.tpe else implConv.conversion.toType memberInfo sym
+ val tpe = if (implConv eq null) sym.tpe else implConv.toType memberInfo sym
makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
}
def isDef = false
@@ -229,26 +226,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
else None
}
-
- def parentTemplates =
- if (sym.isPackage || sym == AnyClass)
- List()
- else
- sym.tpe.parents.flatMap { tpe: Type =>
- val tSym = tpe.typeSymbol
- if (tSym != NoSymbol)
- List(makeTemplate(tSym))
- else
- List()
- } filter (_.isInstanceOf[DocTemplateEntity])
-
- def parentTypes =
- if (sym.isPackage || sym == AnyClass) List() else {
+ def parentType = {
+ if (sym.isPackage || sym == AnyClass) None else {
val tps = sym.tpe.parents map { _.asSeenFrom(sym.thisType, sym) }
- makeParentTypes(RefinedType(tps, EmptyScope), inTpl)
+ Some(makeType(RefinedType(tps, EmptyScope), inTpl))
}
+ }
- protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
+ protected def linearizationFromSymbol(symbol: Symbol) = {
symbol.ancestors map { ancestor =>
val typeEntity = makeType(symbol.info.baseType(ancestor), this)
val tmplEntity = makeTemplate(ancestor) match {
@@ -263,7 +248,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def linearizationTemplates = linearization map { _._1 }
def linearizationTypes = linearization map { _._2 }
- /* Subclass cache */
private lazy val subClassesCache = (
if (noSubclassCache(sym)) null
else mutable.ListBuffer[DocTemplateEntity]()
@@ -272,47 +256,16 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (subClassesCache != null)
subClassesCache += sc
}
- def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
-
- /* Implcitly convertible class cache */
- private var implicitlyConvertibleClassesCache: mutable.ListBuffer[DocTemplateEntity] = null
- def registerImplicitlyConvertibleClass(sc: DocTemplateEntity): Unit = {
- if (implicitlyConvertibleClassesCache == null)
- implicitlyConvertibleClassesCache = mutable.ListBuffer[DocTemplateEntity]()
- implicitlyConvertibleClassesCache += sc
- }
-
- def incomingImplicitlyConvertedClasses: List[DocTemplateEntity] =
- if (implicitlyConvertibleClassesCache == null)
- List()
- else
- implicitlyConvertibleClassesCache.toList
+ def subClasses = if (subClassesCache == null) Nil else subClassesCache.toList
val conversions = if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
- val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity)] = conversions flatMap (conv =>
- if (!implicitExcluded(conv.conversionQualifiedName))
- conv.targetTypeComponents map {
- case pair@(template, tpe) =>
- template match {
- case d: DocTemplateImpl => d.registerImplicitlyConvertibleClass(this)
- case _ => // nothing
- }
- pair
- }
- else List()
- )
-
lazy val memberSyms =
// Only this class's constructors are part of its members, inherited constructors are not.
sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym) && !isPureBridge(sym) )
- // in members, we also take in the members from implicit conversions
- lazy val ownMembers = (memberSyms.flatMap(makeMember(_, null, this)))
- lazy val allOwnMembers = (ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl]))).distinct
-
- val members = ownMembers ::: (conversions.flatMap((_.members)))
+ val members = (memberSyms.flatMap(makeMember(_, null, this))) :::
+ (conversions.flatMap((_.members))) // also take in the members from implicit conversions
val templates = members collect { case c: DocTemplateEntity => c }
val methods = members collect { case d: Def => d }
@@ -327,10 +280,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Some(makeDocTemplate(comSym, inTpl))
case _ => None
}
-
- // We make the diagram a lazy val, since we're not sure we'll include the diagrams in the page
- lazy val inheritanceDiagram = makeInheritanceDiagram(this)
- lazy val contentDiagram = makeContentDiagram(this)
}
abstract class PackageImpl(sym: Symbol, inTpl: => PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
@@ -347,18 +296,18 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
- abstract class NonTemplateMemberImpl(sym: Symbol, implConv: ImplicitConversionInfoImpl, inTpl: => DocTemplateImpl) extends MemberImpl(sym, implConv, inTpl) with NonTemplateMemberEntity {
+ abstract class NonTemplateMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends MemberImpl(sym, implConv, inTpl) with NonTemplateMemberEntity {
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
lazy val definitionName =
if (implConv == null) optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
- else optimize(implConv.conversion.conversionQualifiedName + "#" + name)
+ else optimize(implConv.conversionQualifiedName + "#" + name)
def isUseCase = sym.isSynthetic
def isBridge = sym.isBridge
}
- abstract class NonTemplateParamMemberImpl(sym: Symbol, implConv: ImplicitConversionInfoImpl, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, implConv, inTpl) {
+ abstract class NonTemplateParamMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, implConv, inTpl) {
def valueParams = {
- val info = if (implConv eq null) sym.info else implConv.conversion.toType memberInfo sym
+ val info = if (implConv eq null) sym.info else implConv.toType memberInfo sym
info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
}}
@@ -386,6 +335,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def typeParams =
sym.typeParams map (makeTypeParam(_, inTemplate))
}
+
/* ============== MAKER METHODS ============== */
/** */
@@ -533,7 +483,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeMember(aSym: Symbol, implConv: ImplicitConversionInfoImpl, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+ // TODO: Should be able to override the type
+ def makeMember(aSym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl): List[MemberImpl] = {
def makeMember0(bSym: Symbol, _useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
@@ -553,10 +504,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (bSym == definitions.Object_synchronized) {
val cSymInfo = (bSym.info: @unchecked) match {
case PolyType(ts, MethodType(List(bp), mt)) =>
- val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info))
+ val cp = bp.cloneSymbol.setInfo(definitions.byNameType(bp.info))
PolyType(ts, MethodType(List(cp), mt))
}
- bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo)
+ bSym.cloneSymbol.setInfo(cSymInfo)
}
else bSym
}
@@ -668,20 +619,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
makeType(tpe, inTpl)
}
- /** Get the types of the parents of the current class, ignoring the refinements */
- def makeParentTypes(aType: Type, inTpl: => TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match {
- case RefinedType(parents, defs) =>
- val ignoreParents = Set[Symbol](AnyClass, ObjectClass)
- val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol))
- filtParents.map(parent => {
- val templateEntity = makeTemplate(parent.typeSymbol)
- val typeEntity = makeType(parent, inTpl)
- (templateEntity, typeEntity)
- })
- case _ =>
- List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl)))
- }
-
/** */
def makeType(aType: Type, inTpl: => TemplateImpl): TypeEntity = {
def templatePackage = closestPackage(inTpl.sym)
@@ -801,10 +738,4 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
-
- // the classes that are excluded from the index should also be excluded from the diagrams
- def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
-
- // the implicit conversions that are excluded from the pages should not appear in the diagram
- def implicitExcluded(convertorMethod: String): Boolean = settings.hardcoded.commonConversionTargets.contains(convertorMethod)
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index 64205104aa..c3525037cd 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -63,8 +63,8 @@ trait ModelFactoryImplicitSupport {
// debugging:
val DEBUG: Boolean = settings.docImplicitsDebug.value
val ERROR: Boolean = true // currently we show all errors
- @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg)
- @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg)
+ @inline final def debug(msg: => String) = if (DEBUG) println(msg)
+ @inline final def error(msg: => String) = if (ERROR) println(msg)
/** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
* For example, if an implicit conversion requires that there is a Numeric[T] in scope:
@@ -96,17 +96,6 @@ trait ModelFactoryImplicitSupport {
def targetType: TypeEntity = makeType(toType, inTpl)
- def targetTemplate: Option[TemplateEntity] = toType match {
- // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
- // such as refinement types because the template can't represent the type corectly (a template corresponds to a
- // package, class, trait or object)
- case t: TypeRef => Some(makeTemplate(t.sym))
- case RefinedType(parents, decls) => None
- case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
- }
-
- def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, inTpl)
-
def convertorOwner: TemplateEntity =
if (convSym != NoSymbol)
makeTemplate(convSym.owner)
@@ -137,15 +126,20 @@ trait ModelFactoryImplicitSupport {
lazy val constraints: List[Constraint] = constrs
- private val memberImpls: List[MemberImpl] = {
+ val members: List[MemberEntity] = {
// Obtain the members inherited by the implicit conversion
- val memberSyms = toType.members.filter(implicitShouldDocument(_))
- val existingSyms = sym.info.members
+ var memberSyms = toType.members.filter(implicitShouldDocument(_))
+ val existingMembers = sym.info.members
// Debugging part :)
debug(sym.nameString + "\n" + "=" * sym.nameString.length())
debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
+ // Members inherited by implicit conversions cannot override actual members
+ memberSyms = memberSyms.filterNot((sym1: Symbol) =>
+ existingMembers.exists(sym2 => sym1.name == sym2.name &&
+ !isDistinguishableFrom(toType.memberInfo(sym1), sym.info.memberInfo(sym2))))
+
debug(" -> full type: " + toType)
if (constraints.length != 0) {
debug(" -> constraints: ")
@@ -155,89 +149,10 @@ trait ModelFactoryImplicitSupport {
memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
debug("")
- memberSyms.flatMap({ aSym =>
- makeTemplate(aSym.owner) match {
- case d: DocTemplateImpl =>
- // we can't just pick up nodes from the previous template, although that would be very convenient:
- // they need the byConversion field to be attached to themselves -- this is design decision I should
- // revisit soon
- //
- // d.ownMembers.collect({
- // // it's either a member or has a couple of usecases it's hidden behind
- // case m: MemberImpl if m.sym == aSym =>
- // m // the member itself
- // case m: MemberImpl if m.useCaseOf.isDefined && m.useCaseOf.get.asInstanceOf[MemberImpl].sym == aSym =>
- // m.useCaseOf.get.asInstanceOf[MemberImpl] // the usecase
- // })
- makeMember(aSym, new ImplicitConversionInfoImpl(this), d)
- case _ =>
- // should only happen if the code for this template is not part of the scaladoc run =>
- // members won't have any comments
- makeMember(aSym, new ImplicitConversionInfoImpl(this), inTpl)
- }
- })
- }
-
- def members: List[MemberEntity] = memberImpls
-
- def populateShadowingTables(allConvs: List[ImplicitConversionImpl]): Unit = {
-
- // TODO: This is not clean, we need to put sym.info.members here instead of tpl.memberSyms to avoid
- // the localShouldDocument(_) filtering in ModelFactory
- val originalClassMembers = inTpl.memberSyms
- val otherConversions = allConvs.filterNot(_ == this)
- assert(otherConversions.length == allConvs.length - 1)
-
- for (member <- memberImpls) {
- // for each member in our list
- val sym1 = member.sym
- val tpe1 = toType.memberInfo(sym1)
-
- // check if it's shadowed by a member in the original class
- var shadowedBySyms: List[Symbol] = List()
- for (sym2 <- originalClassMembers)
- if (sym1.name == sym2.name) {
- val shadowed = !settings.docImplicitsSoundShadowing.value || {
- val tpe2 = inTpl.sym.info.memberInfo(sym2)
- !isDistinguishableFrom(tpe1, tpe2)
- }
- if (shadowed)
- shadowedBySyms ::= sym2
- }
-
- val shadowedByMembers = inTpl.allOwnMembers.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
-
- // check if it's shadowed by another member
- var ambiguousByMembers: List[MemberEntity] = List()
- for (conv <- otherConversions)
- for (member2 <- conv.memberImpls) {
- val sym2 = member2.sym
- if (sym1.name == sym2.name) {
- val tpe2 = conv.toType.memberInfo(sym2)
- // Ambiguity should be an equivalence relation
- val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
- if (ambiguated)
- ambiguousByMembers ::= member2
- }
- }
-
- // we finally have the shadowing info
- val shadowing = new ImplicitMemberShadowing {
- def shadowingMembers: List[MemberEntity] = shadowedByMembers
- def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
- }
-
- member.byConversion.get.asInstanceOf[ImplicitConversionInfoImpl].shadowing = shadowing
- }
+ memberSyms.flatMap((makeMember(_, this, inTpl)))
}
}
- class ImplicitConversionInfoImpl(
- val conversion: ImplicitConversionImpl) extends ImplicitConversionInfo {
- // this will be updated as a side effect
- var shadowing: ImplicitMemberShadowing = null
- }
-
/* ============== MAKER METHODS ============== */
/**
@@ -256,22 +171,18 @@ trait ModelFactoryImplicitSupport {
val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
- // also keep empty conversions, so they appear in diagrams
- // conversions = conversions.filter(!_.members.isEmpty)
+ conversions = conversions.filterNot(_.members.isEmpty)
// Filter out specialized conversions from array
if (sym == ArrayClass)
- conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
+ conversions = conversions.filterNot((conv: ImplicitConversion) =>
hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
// Filter out non-sensical conversions from value types
if (isPrimitiveValueType(sym.tpe))
- conversions = conversions.filter((ic: ImplicitConversionImpl) =>
+ conversions = conversions.filter((ic: ImplicitConversion) =>
hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
- // side-effecting and ugly shadowing table population -- ugly but effective
- conversions.map(_.populateShadowingTables(conversions))
-
// Put the class-specific conversions in front
val (ownConversions, commonConversions) =
conversions.partition(conv => !hardcoded.commonConversionTargets.contains(conv.conversionQualifiedName))
@@ -307,7 +218,7 @@ trait ModelFactoryImplicitSupport {
* - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
* appears as a constraint
*/
- def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: => DocTemplateImpl): List[ImplicitConversionImpl] =
+ def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
if (result.tree == EmptyTree) Nil
else {
// `result` will contain the type of the view (= implicit conversion method)
@@ -582,8 +493,8 @@ trait ModelFactoryImplicitSupport {
// - common methods (in Any, AnyRef, Object) as they are automatically removed
// - private and protected members (not accessible following an implicit conversion)
// - members starting with _ (usually reserved for internal stuff)
- localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) &&
- (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) &&
+ localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != ObjectClass) &&
+ (aSym.owner != AnyClass) && (aSym.owner != AnyRefClass) &&
(!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
(aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
(aSym.nameString != "getClass")
@@ -598,11 +509,12 @@ trait ModelFactoryImplicitSupport {
def isDistinguishableFrom(t1: Type, t2: Type): Boolean =
if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
- if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
- return true // if on the corresponding parameter you give a type that is in t1 but not in t2
- // def foo(a: Either[Int, Double]): Int = 3
- // def foo(b: Left[T1]): Int = 6
- // a.foo(Right(4.5d)) prints out 3 :)
+ if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
+ return true // if on the corresponding parameter you give a type that is in t1 but not in t2
+ // example:
+ // def foo(a: Either[Int, Double]): Int = 3
+ // def foo(b: Left[T1]): Int = 6
+ // a.foo(Right(4.5d)) prints out 3 :)
false
} else true // the member structure is different foo(3, 5) vs foo(3)(5)
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
index ecc3273903..ef4047cebf 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
@@ -67,8 +67,8 @@ final case class Bold(text: Inline) extends Inline
final case class Underline(text: Inline) extends Inline
final case class Superscript(text: Inline) extends Inline
final case class Subscript(text: Inline) extends Inline
-final case class EntityLink(target: String, template: () => Option[TemplateEntity]) extends Inline
final case class Link(target: String, title: Inline) extends Inline
+final case class EntityLink(target: TemplateEntity) extends Inline
final case class Monospace(text: Inline) extends Inline
final case class Text(text: String) extends Inline
final case class HtmlTag(data: String) extends Inline {
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
index 7b70683db5..914275dd8d 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
@@ -108,12 +108,6 @@ abstract class Comment {
/** A description for the primary constructor */
def constructor: Option[Body]
- /** A set of diagram directives for the inheritance diagram */
- def inheritDiagram: List[String]
-
- /** A set of diagram directives for the content diagram */
- def contentDiagram: List[String]
-
override def toString =
body.toString + "\n" +
(authors map ("@author " + _.toString)).mkString("\n") +
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index a46be37d60..996223b9f9 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -97,41 +97,37 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
/* Creates comments with necessary arguments */
def createComment (
- body0: Option[Body] = None,
- authors0: List[Body] = List.empty,
- see0: List[Body] = List.empty,
- result0: Option[Body] = None,
- throws0: Map[String,Body] = Map.empty,
- valueParams0: Map[String,Body] = Map.empty,
- typeParams0: Map[String,Body] = Map.empty,
- version0: Option[Body] = None,
- since0: Option[Body] = None,
- todo0: List[Body] = List.empty,
- deprecated0: Option[Body] = None,
- note0: List[Body] = List.empty,
- example0: List[Body] = List.empty,
- constructor0: Option[Body] = None,
- source0: Option[String] = None,
- inheritDiagram0: List[String] = List.empty,
- contentDiagram0: List[String] = List.empty
+ body0: Option[Body] = None,
+ authors0: List[Body] = List.empty,
+ see0: List[Body] = List.empty,
+ result0: Option[Body] = None,
+ throws0: Map[String,Body] = Map.empty,
+ valueParams0: Map[String,Body] = Map.empty,
+ typeParams0: Map[String,Body] = Map.empty,
+ version0: Option[Body] = None,
+ since0: Option[Body] = None,
+ todo0: List[Body] = List.empty,
+ deprecated0: Option[Body] = None,
+ note0: List[Body] = List.empty,
+ example0: List[Body] = List.empty,
+ constructor0: Option[Body] = None,
+ source0: Option[String] = None
) : Comment = new Comment{
- val body = if(body0 isDefined) body0.get else Body(Seq.empty)
- val authors = authors0
- val see = see0
- val result = result0
- val throws = throws0
- val valueParams = valueParams0
- val typeParams = typeParams0
- val version = version0
- val since = since0
- val todo = todo0
- val deprecated = deprecated0
- val note = note0
- val example = example0
- val constructor = constructor0
- val source = source0
- val inheritDiagram = inheritDiagram0
- val contentDiagram = contentDiagram0
+ val body = if(body0 isDefined) body0.get else Body(Seq.empty)
+ val authors = authors0
+ val see = see0
+ val result = result0
+ val throws = throws0
+ val valueParams = valueParams0
+ val typeParams = typeParams0
+ val version = version0
+ val since = since0
+ val todo = todo0
+ val deprecated = deprecated0
+ val note = note0
+ val example = example0
+ val constructor = constructor0
+ val source = source0
}
protected val endOfText = '\u0003'
@@ -190,10 +186,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
protected val safeTagMarker = '\u000E'
- /** A Scaladoc tag not linked to a symbol and not followed by text */
- protected val SingleTag =
- new Regex("""\s*@(\S+)\s*""")
-
/** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
protected val SimpleTag =
new Regex("""\s*@(\S+)\s+(.*)""")
@@ -314,11 +306,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
- case SingleTag(name) :: ls if (!inCodeBlock) =>
- val key = SimpleTagKey(name)
- val value = "" :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
case line :: ls if (lastTagKey.isDefined) =>
val key = lastTagKey.get
val value =
@@ -334,24 +321,9 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
case Nil =>
- // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
- val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
- val contentDiagramTag = SimpleTagKey("contentDiagram")
-
- val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
- case Some(list) => list
- case None => List.empty
- }
-
- val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
- case Some(list) => list
- case None => List.empty
- }
-
- val tagsWithoutDiagram = tags.filterNot(pair => pair._1 == inheritDiagramTag || pair._1 == contentDiagramTag)
val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWiki(_, pos))} toSeq: _*)
+ mutable.Map(tags mapValues {tag => tag map (parseWiki(_, pos))} toSeq: _*)
def oneTag(key: SimpleTagKey): Option[Body] =
((bodyTags remove key): @unchecked) match {
@@ -384,23 +356,21 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
val com = createComment (
- body0 = Some(parseWiki(docBody.toString, pos)),
- authors0 = allTags(SimpleTagKey("author")),
- see0 = allTags(SimpleTagKey("see")),
- result0 = oneTag(SimpleTagKey("return")),
- throws0 = allSymsOneTag(SimpleTagKey("throws")),
- valueParams0 = allSymsOneTag(SimpleTagKey("param")),
- typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
- version0 = oneTag(SimpleTagKey("version")),
- since0 = oneTag(SimpleTagKey("since")),
- todo0 = allTags(SimpleTagKey("todo")),
- deprecated0 = oneTag(SimpleTagKey("deprecated")),
- note0 = allTags(SimpleTagKey("note")),
- example0 = allTags(SimpleTagKey("example")),
- constructor0 = oneTag(SimpleTagKey("constructor")),
- source0 = Some(clean(src).mkString("\n")),
- inheritDiagram0 = inheritDiagramText,
- contentDiagram0 = contentDiagramText
+ body0 = Some(parseWiki(docBody.toString, pos)),
+ authors0 = allTags(SimpleTagKey("author")),
+ see0 = allTags(SimpleTagKey("see")),
+ result0 = oneTag(SimpleTagKey("return")),
+ throws0 = allSymsOneTag(SimpleTagKey("throws")),
+ valueParams0 = allSymsOneTag(SimpleTagKey("param")),
+ typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
+ version0 = oneTag(SimpleTagKey("version")),
+ since0 = oneTag(SimpleTagKey("since")),
+ todo0 = allTags(SimpleTagKey("todo")),
+ deprecated0 = oneTag(SimpleTagKey("deprecated")),
+ note0 = allTags(SimpleTagKey("note")),
+ example0 = allTags(SimpleTagKey("example")),
+ constructor0 = oneTag(SimpleTagKey("constructor")),
+ source0 = Some(clean(src).mkString("\n"))
)
for ((key, _) <- bodyTags)
@@ -716,6 +686,13 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
)
}
+ def entityLink(query: String): Inline = findTemplate(query) match {
+ case Some(tpl) =>
+ EntityLink(tpl)
+ case None =>
+ Text(query)
+ }
+
def link(): Inline = {
val SchemeUri = """([^:]+:.*)""".r
jump("[[")
@@ -740,8 +717,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
if (!qualName.contains(".") && !definitions.packageExists(qualName))
reportError(pos, "entity link to " + qualName + " should be a fully qualified name")
- // move the template resolution as late as possible
- EntityLink(qualName, () => findTemplate(qualName))
+ entityLink(qualName)
}
}
@@ -757,8 +733,8 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
nextChar()
}
- /**
- * Eliminates the (common) leading spaces in all lines, based on the first line
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
* For indented pieces of code, it reduces the indent to the least whitespace prefix:
* {{{
* indented example
@@ -781,11 +757,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
while (index < code.length) {
code(index) match {
case ' ' =>
- if (wsArea)
+ if (wsArea)
crtSkip += 1
case c =>
wsArea = (c == '\n')
- maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
+ maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
crtSkip = if (c == '\n') 0 else crtSkip
firstLine = if (c == '\n') false else firstLine
emptyLine = if (c == '\n') true else false
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
deleted file mode 100644
index 28a8c7d37d..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-
-/**
- * The diagram base classes
- *
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-abstract class Diagram {
- def nodes: List[Node]
- def edges: List[(Node, List[Node])]
- def isPackageDiagram = false
- def isClassDiagram = false
- def depthInfo: DepthInfo
-}
-
-case class PackageDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram {
- override def isPackageDiagram = true
- lazy val depthInfo = new PackageDiagramDepth(this)
-}
-
-/** A class diagram */
-case class ClassDiagram(thisNode: ThisNode,
- superClasses: List[/*Class*/Node],
- subClasses: List[/*Class*/Node],
- incomingImplicits: List[ImplicitNode],
- outgoingImplicits: List[ImplicitNode]) extends Diagram {
- def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits
- def edges = (thisNode -> (superClasses ::: outgoingImplicits)) ::
- (subClasses ::: incomingImplicits).map(_ -> List(thisNode))
-
- override def isClassDiagram = true
- lazy val depthInfo = new DepthInfo {
- def maxDepth = 3
- def nodeDepth(node: Node) =
- if (node == thisNode) 1
- else if (superClasses.contains(node)) 0
- else if (subClasses.contains(node)) 2
- else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
- else -1
- }
-}
-
-trait DepthInfo {
- /** Gives the maximum depth */
- def maxDepth: Int
- /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
- def nodeDepth(node: Node): Int
-}
-
-abstract class Node {
- def name = tpe.name
- def tpe: TypeEntity
- def tpl: Option[TemplateEntity]
- /** shortcut to get a DocTemplateEntity */
- def doctpl: Option[DocTemplateEntity] = tpl match {
- case Some(tpl) => tpl match {
- case d: DocTemplateEntity => Some(d)
- case _ => None
- }
- case _ => None
- }
- /* shortcuts to find the node type without matching */
- def isThisNode = false
- def isNormalNode = false
- def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false
- def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false
- def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false
- def isOtherNode = !(isClassNode || isTraitNode || isObjectNode)
- def isImplicitNode = false
- def isOutsideNode = false
-}
-
-// different matchers, allowing you to use the pattern matcher against any node
-// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to
-// case class specification -- thus a complete match would be:
-// node match {
-// case ThisNode(tpe, _) => /* case for this node, you can still use .isClass, .isTrait and .isOther */
-// case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */
-// case _ => node match {
-// case ClassNode(tpe, _) => /* case for a non-this, non-implicit Class node */
-// case TraitNode(tpe, _) => /* case for a non-this, non-implicit Trait node */
-// case OtherNode(tpe, _) => /* case for a non-this, non-implicit Other node */
-// }
-// }
-object Node { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) }
-object ClassNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode) Some((n.tpe, n.tpl)) else None }
-object TraitNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode) Some((n.tpe, n.tpl)) else None }
-object ObjectNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode) Some((n.tpe, n.tpl)) else None }
-object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None }
-object OtherNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode) Some((n.tpe, n.tpl)) else None }
-
-
-
-/** The node for the current class */
-case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity]) extends Node { override def isThisNode = true }
-
-/** The usual node */
-case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity]) extends Node { override def isNormalNode = true }
-
-/** A class or trait the thisnode can be converted to by an implicit conversion
- * TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes
- * since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate
- */
-case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity]) extends Node { override def isImplicitNode = true }
-
-/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to
- * its relation to a class in the package (and @contentDiagram showInheritedNodes annotation) */
-case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity]) extends Node { override def isOutsideNode = true }
-
-
-// Computing and offering node depth information
-class PackageDiagramDepth(pack: PackageDiagram) extends DepthInfo {
- private[this] var _maxDepth = 0
- private[this] var _nodeDepth = Map[Node, Int]()
- private[this] var seedNodes = Set[Node]()
- private[this] val invertedEdges: Map[Node, List[Node]] =
- pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil)
- private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil)
-
- // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles
- seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty)
-
- while (!seedNodes.isEmpty) {
- var newSeedNodes = Set[Node]()
- for (node <- seedNodes) {
- val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max
- if (depth != _nodeDepth.getOrElse(node, -1)) {
- _nodeDepth += (node -> depth)
- newSeedNodes ++= invertedEdges(node)
- if (depth > _maxDepth) _maxDepth = depth
- }
- }
- seedNodes = newSeedNodes
- }
-
- val maxDepth = _maxDepth
- def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
deleted file mode 100644
index c06b5d50b7..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ /dev/null
@@ -1,248 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-import comment.CommentFactory
-import java.util.regex.{Pattern, Matcher}
-import scala.util.matching.Regex
-
-// statistics
-import html.page.diagram.DiagramStats
-
-/**
- * This trait takes care of parsing @{inheritance, content}Diagram annotations
- *
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-trait DiagramDirectiveParser {
- this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
-
- ///// DIAGRAM FILTERS //////////////////////////////////////////////////////////////////////////////////////////////
-
- /**
- * The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed
- *
- * Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from
- * all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the
- * reason is you would break the separate scaladoc compilation:
- * If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B
- * A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the
- * instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the
- * diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary
- * information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class
- * file, could we? (Turns out we could, but that's another story)
- *
- * Any flaming for this decision should go to scala-internals@googlegroups.com
- */
- trait DiagramFilter {
- /** A flag to hide the diagram completely */
- def hideDiagram: Boolean
- /** Hide incoming implicit conversions (for type hierarchy diagrams) */
- def hideIncomingImplicits: Boolean
- /** Hide outgoing implicit conversions (for type hierarchy diagrams) */
- def hideOutgoingImplicits: Boolean
- /** Hide superclasses (for type hierarchy diagrams) */
- def hideSuperclasses: Boolean
- /** Hide subclasses (for type hierarchy diagrams) */
- def hideSubclasses: Boolean
- /** Show related classes from other objects/traits/packages (for content diagrams) */
- def showInheritedNodes: Boolean
- /** Hide a node from the diagram */
- def hideNode(clazz: TemplateEntity): Boolean
- /** Hide an edge from the diagram */
- def hideEdge(clazz1: TemplateEntity, clazz2: TemplateEntity): Boolean
- }
-
- /** Main entry point into this trait: generate the filter for inheritance diagrams */
- def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
- val defaultFilter = if (template.isClass || template.isTrait) FullDiagram else NoDiagramAtAll
- if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
- else
- defaultFilter
- }
-
- /** Main entry point into this trait: generate the filter for content diagrams */
- def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
- val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
- if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
- else
- defaultFilter
- }
-
- protected var tFilter = 0l
- protected var tModel = 0l
-
- /** Show the entire diagram, no filtering */
- case object FullDiagram extends DiagramFilter {
- val hideDiagram: Boolean = false
- val hideIncomingImplicits: Boolean = false
- val hideOutgoingImplicits: Boolean = false
- val hideSuperclasses: Boolean = false
- val hideSubclasses: Boolean = false
- val showInheritedNodes: Boolean = false
- def hideNode(clazz: TemplateEntity): Boolean = false
- def hideEdge(clazz1: TemplateEntity, clazz2: TemplateEntity): Boolean = false
- }
-
- /** Hide the diagram completely, no need for special filtering */
- case object NoDiagramAtAll extends DiagramFilter {
- val hideDiagram: Boolean = true
- val hideIncomingImplicits: Boolean = true
- val hideOutgoingImplicits: Boolean = true
- val hideSuperclasses: Boolean = true
- val hideSubclasses: Boolean = true
- val showInheritedNodes: Boolean = false
- def hideNode(clazz: TemplateEntity): Boolean = true
- def hideEdge(clazz1: TemplateEntity, clazz2: TemplateEntity): Boolean = true
- }
-
- /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation
- * TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */
- case class AnnotationDiagramFilter(hideDiagram: Boolean,
- hideIncomingImplicits: Boolean,
- hideOutgoingImplicits: Boolean,
- hideSuperclasses: Boolean,
- hideSubclasses: Boolean,
- showInheritedNodes: Boolean,
- hideNodesFilter: List[Pattern],
- hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter {
-
- def hideNode(clazz: TemplateEntity): Boolean = {
- val qualifiedName = clazz.qualifiedName
- for (hideFilter <- hideNodesFilter)
- if (hideFilter.matcher(qualifiedName).matches) {
- // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches)
- return true
- }
- false
- }
-
- def hideEdge(clazz1: TemplateEntity, clazz2: TemplateEntity): Boolean = {
- val clazz1Name = clazz1.qualifiedName
- val clazz2Name = clazz2.qualifiedName
- for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) {
- if (clazz1Filter.matcher(clazz1Name).matches &&
- clazz2Filter.matcher(clazz2Name).matches) {
- // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches)
- // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches)
- return true
- }
- }
- false
- }
- }
-
- // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier
- private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
- private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
- private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
- private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
- // And the composed regexes:
- private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
- private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
-
- private def makeDiagramFilter(template: DocTemplateImpl,
- directives: List[String],
- defaultFilter: DiagramFilter,
- isInheritanceDiagram: Boolean): DiagramFilter = directives match {
-
- // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll)
- case Nil =>
- defaultFilter
-
- // compute the exact filters. By including the annotation, the diagram is autmatically added
- case _ =>
- tFilter -= System.currentTimeMillis
- var hideDiagram0: Boolean = false
- var hideIncomingImplicits0: Boolean = false
- var hideOutgoingImplicits0: Boolean = false
- var hideSuperclasses0: Boolean = false
- var hideSubclasses0: Boolean = false
- var showInheritedNodes0: Boolean = false
- var hideNodesFilter0: List[Pattern] = Nil
- var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil
-
- def warning(message: String) = {
- // we need the position from the package object (well, ideally its comment, but yeah ...)
- val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
- assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
- global.reporter.warning(sym.pos, message)
- }
-
- def preparePattern(className: String) =
- "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$"
-
- // separate entries:
- val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim)
- for (entry <- entries)
- entry match {
- case "hideDiagram" =>
- hideDiagram0 = true
- case "hideIncomingImplicits" if isInheritanceDiagram =>
- hideIncomingImplicits0 = true
- case "hideOutgoingImplicits" if isInheritanceDiagram =>
- hideOutgoingImplicits0 = true
- case "hideSuperclasses" if isInheritanceDiagram =>
- hideSuperclasses0 = true
- case "hideSubclasses" if isInheritanceDiagram =>
- hideSubclasses0 = true
- case "showInheritedNodes" if !isInheritanceDiagram =>
- showInheritedNodes0 = true
- case HideNodesRegex(last) =>
- val matcher = NodeSpecPattern.matcher(entry)
- while (matcher.find()) {
- val classPattern = Pattern.compile(preparePattern(matcher.group()))
- hideNodesFilter0 ::= classPattern
- }
- case HideEdgesRegex(last) =>
- val matcher = NodeSpecPattern.matcher(entry)
- while (matcher.find()) {
- val class1Pattern = Pattern.compile(preparePattern(matcher.group()))
- assert(matcher.find()) // it's got to be there, just matched it!
- val class2Pattern = Pattern.compile(preparePattern(matcher.group()))
- hideEdgesFilter0 ::= ((class1Pattern, class2Pattern))
- }
- case "" =>
- // don't need to do anything about it
- case _ =>
- warning("Could not understand diagram annotation in " + template.fullName + ": unmatched entry \"" +
- entry + "\".\n" +
- " This could be because:\n" +
- " - you forgot to separate entries by commas\n" +
- " - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+
- " - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)")
- }
- val result =
- if (hideDiagram0)
- NoDiagramAtAll
- else if ((hideNodesFilter0.isEmpty) &&
- (hideEdgesFilter0.isEmpty) &&
- (hideIncomingImplicits0 == false) &&
- (hideOutgoingImplicits0 == false) &&
- (hideSuperclasses0 == false) &&
- (hideSubclasses0 == false) &&
- (showInheritedNodes0 == false) &&
- (hideDiagram0 == false))
- FullDiagram
- else
- AnnotationDiagramFilter(
- hideDiagram = hideDiagram0,
- hideIncomingImplicits = hideIncomingImplicits0,
- hideOutgoingImplicits = hideOutgoingImplicits0,
- hideSuperclasses = hideSuperclasses0,
- hideSubclasses = hideSubclasses0,
- showInheritedNodes = showInheritedNodes0,
- hideNodesFilter = hideNodesFilter0,
- hideEdgesFilter = hideEdgesFilter0)
-
- if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram)
- settings.printMsg(template.fullName + " filter: " + result)
- tFilter += System.currentTimeMillis
-
- result
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
deleted file mode 100644
index 4ae5e7a5cb..0000000000
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ /dev/null
@@ -1,197 +0,0 @@
-package scala.tools.nsc.doc
-package model
-package diagram
-
-import model._
-import comment.CommentFactory
-import collection.mutable
-
-// statistics
-import html.page.diagram.DiagramStats
-
-/**
- * This trait takes care of generating the diagram for classes and packages
- *
- * @author Damien Obrist
- * @author Vlad Ureche
- */
-trait DiagramFactory extends DiagramDirectiveParser {
- this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
-
- /** Create the inheritance diagram for this template */
- def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = {
-
- tFilter = 0
- tModel = -System.currentTimeMillis
-
- // the diagram filter
- val diagramFilter = makeInheritanceDiagramFilter(tpl)
-
- val result =
- if (diagramFilter == NoDiagramAtAll)
- None
- else {
- // the main node
- val thisNode = ThisNode(tpl.ownType, Some(tpl))
-
- // superclasses
- var superclasses = List[Node]()
- tpl.parentTypes.collect { case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => p } foreach {
- t: (TemplateEntity, TypeEntity) =>
- val n = NormalNode(t._2, Some(t._1))
- superclasses ::= n
- }
- val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses
-
- // incoming implcit conversions
- lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map(tpl => ImplicitNode(tpl.ownType, Some(tpl)))
- val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes
-
- // subclasses
- val subclasses = tpl.directSubClasses.flatMap {
- case d: TemplateEntity if !classExcluded(d) => List(NormalNode(d.ownType, Some(d)))
- case _ => Nil
- }
- val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses
-
- // outgoing implicit coversions
- lazy val implicitNodes = tpl.outgoingImplicitlyConvertedClasses.map(pair => ImplicitNode(pair._2, Some(pair._1)))
- val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else implicitNodes
-
- // final diagram filter
- filterDiagram(ClassDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter)
- }
-
- tModel += System.currentTimeMillis
- DiagramStats.addFilterTime(tFilter)
- DiagramStats.addModelTime(tModel-tFilter)
-
- result
- }
-
- /** Create the content diagram for this template */
- def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = {
-
- tFilter = 0
- tModel = -System.currentTimeMillis
-
- // the diagram filter
- val diagramFilter = makeContentDiagramFilter(pack)
-
- val result =
- if (diagramFilter == NoDiagramAtAll)
- None
- else {
- var mapNodes = Map[DocTemplateEntity, Node]()
- var nodesShown = Set[DocTemplateEntity]()
- var edgesAll = List[(DocTemplateEntity, List[DocTemplateEntity])]()
-
- // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram
- // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None)
- val dnodes = pack.members collect {
- case d: DocTemplateEntity if d.isClass || d.isTrait || (d.isObject && !d.companion.isDefined) &&
- ((d.inTemplate == pack) || diagramFilter.showInheritedNodes) => d
- }
-
- // for each node, add its subclasses
- for (node <- dnodes if !classExcluded(node)) {
- val superClasses = node.parentTypes.collect {
- case (tpl: DocTemplateEntity, tpe) if tpl.inTemplate == pack && !classExcluded(tpl) => tpl
- case (tpl: DocTemplateEntity, tpe) if tpl.inTemplate != pack && !classExcluded(tpl) && diagramFilter.showInheritedNodes && (pack.members contains tpl) => tpl
- }
-
- if (!superClasses.isEmpty) {
- nodesShown += node
- nodesShown ++= superClasses
- }
-
- edgesAll ::= node -> superClasses
- mapNodes += node -> (if (node.inTemplate == pack) NormalNode(node.ownType, Some(node)) else OutsideNode(node.ownType, Some(node)))
- }
-
- if (nodesShown.isEmpty)
- None
- else {
- val nodes = dnodes.filter(nodesShown.contains(_)).map(mapNodes(_))
- val edges = edgesAll.map(pair => (mapNodes(pair._1), pair._2.map(mapNodes(_)))).filterNot(pair => pair._2.isEmpty)
- filterDiagram(PackageDiagram(nodes, edges), diagramFilter)
- }
- }
-
- tModel += System.currentTimeMillis
- DiagramStats.addFilterTime(tFilter)
- DiagramStats.addModelTime(tModel-tFilter)
-
- result
- }
-
- /** Diagram filtering logic */
- private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = {
- tFilter -= System.currentTimeMillis
-
- val result =
- if (diagramFilter == FullDiagram)
- Some(diagram)
- else if (diagramFilter == NoDiagramAtAll)
- None
- else {
- // Final diagram, with the filtered nodes and edges
- diagram match {
- case ClassDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode.tpl.get) =>
- None
-
- case ClassDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) =>
-
- def hideIncoming(node: Node): Boolean =
- if (node.tpl.isDefined) diagramFilter.hideNode(node.tpl.get) || diagramFilter.hideEdge(node.tpl.get, thisNode.tpl.get)
- else false // hopefully we won't need to fallback here
-
- def hideOutgoing(node: Node): Boolean =
- if (node.tpl.isDefined) diagramFilter.hideNode(node.tpl.get) || diagramFilter.hideEdge(thisNode.tpl.get, node.tpl.get)
- else false // hopefully we won't need to fallback here
-
- // println(thisNode)
- // println(superClasses.map(cl => "super: " + cl + " " + hideOutgoing(cl)).mkString("\n"))
- // println(subClasses.map(cl => "sub: " + cl + " " + hideIncoming(cl)).mkString("\n"))
- Some(ClassDiagram(thisNode,
- superClasses.filterNot(hideOutgoing(_)),
- subClasses.filterNot(hideIncoming(_)),
- incomingImplicits.filterNot(hideIncoming(_)),
- outgoingImplicits.filterNot(hideOutgoing(_))))
-
- case PackageDiagram(nodes0, edges0) =>
- // Filter out all edges that:
- // (1) are sources of hidden classes
- // (2) are manually hidden by the user
- // (3) are destinations of hidden classes
- val edges: List[(Node, List[Node])] =
- diagram.edges.flatMap({
- case (source@Node(_, Some(tpl1)), dests) if !diagramFilter.hideNode(tpl1) =>
- val dests2 = dests.collect({ case node@Node(_, Some(tpl2)) if (!(diagramFilter.hideEdge(tpl1, tpl2) || diagramFilter.hideNode(tpl2))) => node })
- if (dests2 != Nil)
- List((source, dests2))
- else
- Nil
- case _ => Nil
- })
-
- // Only show the the non-isolated nodes
- // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!)
- // TODO: Does .distinct cause any stability issues?
- val sourceNodes = edges.map(_._1)
- val sinkNodes = edges.map(_._2).flatten
- val nodes = (sourceNodes ::: sinkNodes).distinct
- Some(PackageDiagram(nodes, edges))
- }
- }
-
- tFilter += System.currentTimeMillis
-
- // eliminate all empty diagrams
- if (result.isDefined && result.get.edges.forall(_._2.isEmpty))
- None
- else
- result
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index e2203e07b3..3797d32d8b 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -911,7 +911,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
while (in.token == SEMI) in.nextToken
- buf ++= typeDecl(modifiers(false))
+ if (in.token != EOF)
+ buf ++= typeDecl(modifiers(false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 72e6f32af1..16761144d7 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import ast.{ TreePrinters, Trees }
+import ast.{ Printers, Trees }
import java.io.{ StringWriter, PrintWriter }
import annotation.elidable
import language.postfixOps
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 04acba06e8..9b223a13ba 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -160,7 +160,7 @@ trait ScalaSettings extends AbsScalaSettings
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (util.Statistics.enabled = _)
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
val refinementMethodDispatch
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index 25d835790e..52e971f1e7 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -105,7 +105,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
*/
override def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
try {
- if (root.isEffectiveRoot) // RootClass or EmptyPackageClass
+ if (root.isEffectiveRoot || !src.name.endsWith(".scala")) // RootClass or EmptyPackageClass
super.enterToplevelsFromSource(root, name, src)
else
browseTopLevel(root, src)
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 15edac16d5..0c988ceae4 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
import reflect.internal.Flags._
import reflect.internal.MissingRequirementError
-import util.Statistics._
+import reflect.internal.util.Statistics
import scala.tools.nsc.io.{ AbstractFile, MsilFile }
/** This class ...
@@ -23,6 +23,7 @@ import scala.tools.nsc.io.{ AbstractFile, MsilFile }
abstract class SymbolLoaders {
val global: Global
import global._
+ import SymbolLoadersStats._
protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name)
@@ -236,7 +237,7 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
- val start = startTimer(classReadNanos)
+ val start = Statistics.startTimer(classReadNanos)
classfileParser.parse(classfile, root)
if (root.associatedFile eq null) {
root match {
@@ -248,7 +249,7 @@ abstract class SymbolLoaders {
debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass))
}
}
- stopTimer(classReadNanos, start)
+ Statistics.stopTimer(classReadNanos, start)
}
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
@@ -284,3 +285,8 @@ abstract class SymbolLoaders {
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
}
+
+object SymbolLoadersStats {
+ import reflect.internal.TypesStats.typerNanos
+ val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index d8bf23f4fe..046b177444 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -879,7 +879,7 @@ abstract class ClassfileParser {
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute")
- unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString)
+ unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
case tpnme.ScalaATTR =>
@@ -897,7 +897,7 @@ abstract class ClassfileParser {
case Some(san: AnnotationInfo) =>
val bytes =
san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
- unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString)
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
case None =>
throw new RuntimeException("Scala class file does not contain Scala annotation")
}
@@ -1013,9 +1013,16 @@ abstract class ClassfileParser {
} catch {
case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
case ex: Throwable =>
- debuglog("dropping annotation on " + sym + ", an error occured during parsing (e.g. annotation class not found)")
-
- None // ignore malformed annotations ==> t1135
+ // We want to be robust when annotations are unavailable, so the very least
+ // we can do is warn the user about the exception
+ // There was a reference to ticket 1135, but that is outdated: a reference to a class not on
+ // the classpath would *not* end up here. A class not found is signaled
+ // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
+ // and that should never be swallowed silently.
+ warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ if (settings.debug.value) ex.printStackTrace()
+
+ None // ignore malformed annotations
}
/**
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index bc4483923a..e5119eac71 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -323,7 +323,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// statements coming from the original class need retyping in the current context
debuglog("retyping " + stat2)
- val d = new specializeTypes.Duplicator
+ val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
d.retyped(localTyper.context1.asInstanceOf[d.Context],
stat2,
genericClazz,
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 1c97eaad8b..5f66cadbc9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -107,7 +107,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpe))
+ val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
def transform(clonedType: Type): Type = clonedType match {
case MethodType(params, restpe) =>
@@ -159,7 +159,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
.changeOwner((origMeth, extensionMeth))
extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpe)),
+ gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
List(This(currentOwner)))
val extensionCall = atOwner(origMeth) {
localTyper.typedPos(rhs.pos) {
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 4b488a6437..124d350385 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -450,7 +450,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
- params.filter(p => !p.isSpecialized || !isPrimitiveValueType(env(p)))
+ params filter {
+ p =>
+ !p.isSpecialized ||
+ !env.contains(p) ||
+ !isPrimitiveValueType(env(p))
+ }
/** Produces the symbols from type parameters `syms` of the original owner,
* in the given type environment `env`. The new owner is `nowner`.
@@ -1176,7 +1181,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
|| specializedTypeVars(t1).nonEmpty
|| specializedTypeVars(t2).nonEmpty)
}
-
+
env forall { case (tvar, tpe) =>
matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || {
if (warnings)
@@ -1192,10 +1197,58 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
+
+ def satisfiabilityConstraints(env: TypeEnv): Option[TypeEnv] = {
+ val noconstraints = Some(emptyEnv)
+ def matches(tpe1: Type, tpe2: Type): Option[TypeEnv] = {
+ val t1 = subst(env, tpe1)
+ val t2 = subst(env, tpe2)
+ // log("---------> " + tpe1 + " matches " + tpe2)
+ // log(t1 + ", " + specializedTypeVars(t1))
+ // log(t2 + ", " + specializedTypeVars(t2))
+ // log("unify: " + unify(t1, t2, env, false, false) + " in " + env)
+ if (t1 <:< t2) noconstraints
+ else if (specializedTypeVars(t1).nonEmpty) Some(unify(t1, t2, env, false, false) -- env.keys)
+ else if (specializedTypeVars(t2).nonEmpty) Some(unify(t2, t1, env, false, false) -- env.keys)
+ else None
+ }
+
+ env.foldLeft[Option[TypeEnv]](noconstraints) {
+ case (constraints, (tvar, tpe)) =>
+ val loconstraints = matches(tvar.info.bounds.lo, tpe)
+ val hiconstraints = matches(tpe, tvar.info.bounds.hi)
+ val allconstraints = for (c <- constraints; l <- loconstraints; h <- hiconstraints) yield c ++ l ++ h
+ allconstraints
+ }
+ }
- class Duplicator extends {
+ /** This duplicator additionally performs casts of expressions if that is allowed by the `casts` map. */
+ class Duplicator(casts: Map[Symbol, Type]) extends {
val global: SpecializeTypes.this.global.type = SpecializeTypes.this.global
- } with typechecker.Duplicators
+ } with typechecker.Duplicators {
+ private val (castfrom, castto) = casts.unzip
+ private object CastMap extends SubstTypeMap(castfrom.toList, castto.toList)
+
+ class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
+ override def castType(tree: Tree, pt: Type): Tree = {
+ // log(" expected type: " + pt)
+ // log(" tree type: " + tree.tpe)
+ tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+ // log(" tree type: " + tree.tpe)
+ val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
+ val casttpe = CastMap(tree.tpe)
+ if (casttpe <:< pt) gen.mkCast(tree, casttpe)
+ else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
+ else tree
+ } else tree
+ ntree.tpe = null
+ ntree
+ }
+ }
+
+ protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+
+ }
/** A tree symbol substituter that substitutes on type skolems.
* If a type parameter is a skolem, it looks for the original
@@ -1302,8 +1355,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
+
+ def reportError[T](body: =>T)(handler: TypeError => T): T =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(te.pos, te.msg)
+ handler(te)
+ }
- override def transform(tree: Tree): Tree = {
+ override def transform(tree: Tree): Tree =
+ reportError { transform1(tree) } {_ => tree}
+
+ def transform1(tree: Tree) = {
val symbol = tree.symbol
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
@@ -1329,14 +1393,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else None
}
- def reportError[T](body: =>T)(handler: TypeError => T): T =
- try body
- catch {
- case te: TypeError =>
- reporter.error(tree.pos, te.msg)
- handler(te)
- }
-
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -1448,31 +1504,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
+ def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
+
if (symbol.isConstructor) {
val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
- else // duplicate the original constructor
- duplicateBody(ddef, info(symbol).target)
+ else // duplicate the original constructor
+ reportTypeError(duplicateBody(ddef, info(symbol).target))
}
else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target)
+ }
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- debuglog("Normalized member: " + symbol + ", target: " + target)
- if (target.isDeferred || conflicting(typeEnv(symbol))) {
+ val constraints = satisfiabilityConstraints(typeEnv(symbol))
+ log("constraints: " + constraints)
+ if (target.isDeferred || constraints == None) {
deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
- }
- else {
+ } else {
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target, constraints.get)
+ }
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
}
@@ -1535,7 +1597,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
- val d = new Duplicator
+ val d = new Duplicator(emptyEnv)
val newValDef = d.retyped(
localTyper.context1.asInstanceOf[d.Context],
tree1,
@@ -1560,12 +1622,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
super.transform(tree)
}
}
-
- private def duplicateBody(tree: DefDef, source: Symbol) = {
+
+ /** Duplicate the body of the given method `tree` to the new symbol `source`.
+ *
+ * Knowing that the method can be invoked only in the `castmap` type environment,
+ * this method will insert casts for all the expressions of types mappend in the
+ * `castmap`.
+ */
+ private def duplicateBody(tree: DefDef, source: Symbol, castmap: TypeEnv = emptyEnv) = {
val symbol = tree.symbol
val meth = addBody(tree, source)
- val d = new Duplicator
+ val d = new Duplicator(castmap)
debuglog("-->d DUPLICATING: " + meth)
d.retyped(
localTyper.context1.asInstanceOf[d.Context],
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index a77df71312..2077ab0997 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import util.Statistics._
+import reflect.internal.util.Statistics
/** The main attribution phase.
*/
@@ -72,6 +72,7 @@ trait Analyzer extends AnyRef
}
object typerFactory extends SubComponent {
+ import reflect.internal.TypesStats.typerNanos
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
@@ -84,13 +85,13 @@ trait Analyzer extends AnyRef
// compiler run). This is good enough for the resident compiler, which was the most affected.
undoLog.clear()
override def run() {
- val start = startTimer(typerNanos)
+ val start = Statistics.startTimer(typerNanos)
global.echoPhaseSummary(this)
currentRun.units foreach applyPhase
undoLog.clear()
// need to clear it after as well or 10K+ accumulated entries are
// uncollectable the rest of the way.
- stopTimer(typerNanos, start)
+ Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index bcf2ba6b71..60cc9e5fb8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -90,7 +90,7 @@ trait ContextErrors {
import infer.setError
object TyperErrorGen {
- implicit val context0: Context = infer.getContext
+ implicit val contextTyperErrorGen: Context = infer.getContext
def UnstableTreeError(tree: Tree) = {
def addendum = {
@@ -222,7 +222,13 @@ trait ContextErrors {
NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name")
def SuperConstrArgsThisReferenceError(tree: Tree) =
- NormalTypeError(tree, "super constructor arguments cannot reference unconstructed `this`")
+ ConstrArgsThisReferenceError("super", tree)
+
+ def SelfConstrArgsThisReferenceError(tree: Tree) =
+ ConstrArgsThisReferenceError("self", tree)
+
+ private def ConstrArgsThisReferenceError(prefix: String, tree: Tree) =
+ NormalTypeError(tree, s"$prefix constructor arguments cannot reference unconstructed `this`")
def TooManyArgumentListsForConstructor(tree: Tree) = {
issueNormalTypeError(tree, "too many argument lists for constructor invocation")
@@ -642,7 +648,7 @@ trait ContextErrors {
object InferErrorGen {
- implicit val context0 = getContext
+ implicit val contextInferErrorGen = getContext
object PolyAlternativeErrorKind extends Enumeration {
type ErrorType = Value
@@ -828,7 +834,7 @@ trait ContextErrors {
object NamerErrorGen {
- implicit val context0 = context
+ implicit val contextNamerErrorGen = context
object SymValidateErrors extends Enumeration {
val ImplicitConstr, ImplicitNotTermOrClass, ImplicitAtToplevel,
@@ -863,7 +869,7 @@ trait ContextErrors {
case CyclicReference(sym, info: TypeCompleter) =>
issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
case _ =>
- context0.issue(TypeErrorWithUnderlyingTree(tree, ex))
+ contextNamerErrorGen.issue(TypeErrorWithUnderlyingTree(tree, ex))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index ac3c94c47a..bcf529ecd2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -100,6 +100,12 @@ trait Contexts { self: Analyzer =>
var outer: Context = _ // The next outer context
var enclClass: Context = _ // The next outer context whose tree is a
// template or package definition
+ @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ val saved = enclClass
+ enclClass = c
+ try a finally enclClass = saved
+ }
+
var enclMethod: Context = _ // The next outer context whose tree is a method
var variance: Int = _ // Variance relative to enclosing class
private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
@@ -638,11 +644,12 @@ trait Contexts { self: Analyzer =>
if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
if (!owner.isInitialized) return nextOuter.implicitss
// debuglog("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
- val savedEnclClass = enclClass
- this.enclClass = this
- val res = collectImplicits(owner.thisType.implicitMembers, owner.thisType)
- this.enclClass = savedEnclClass
- res
+ savingEnclClass(this) {
+ // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
+ // it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
+ // remedied nonetheless.
+ collectImplicits(owner.thisType.implicitMembers, owner.thisType)
+ }
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
debuglog("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope.toList, NoPrefix)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index b7a6ea677e..63d1bd0e9f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -21,7 +21,7 @@ abstract class Duplicators extends Analyzer {
def retyped(context: Context, tree: Tree): Tree = {
resetClassOwners
- (new BodyDuplicator(context)).typed(tree)
+ (newBodyDuplicator(context)).typed(tree)
}
/** Retype the given tree in the given context. Use this method when retyping
@@ -37,15 +37,17 @@ abstract class Duplicators extends Analyzer {
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
- (new BodyDuplicator(context)).typed(tree)
+ newBodyDuplicator(context).typed(tree)
}
+ protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+
def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree =
- (new BodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
+ (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
/** Return the special typer for duplicate method bodies. */
override def newTyper(context: Context): Typer =
- new BodyDuplicator(context)
+ newBodyDuplicator(context)
private def resetClassOwners() {
oldClassOwner = null
@@ -209,6 +211,11 @@ abstract class Duplicators extends Analyzer {
}
}
+ /** Optionally cast this tree into some other type, if required.
+ * Unless overridden, just returns the tree.
+ */
+ def castType(tree: Tree, pt: Type): Tree = tree
+
/** Special typer method for re-type checking trees. It expects a typed tree.
* Returns a typed tree that has fresh symbols for all definitions in the original tree.
*
@@ -319,10 +326,10 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1), mode, pt)
case This(_) =>
- // log("selection on this, plain: " + tree)
+ debuglog("selection on this, plain: " + tree)
tree.symbol = updateSym(tree.symbol)
- tree.tpe = null
- val tree1 = super.typed(tree, mode, pt)
+ val ntree = castType(tree, pt)
+ val tree1 = super.typed(ntree, mode, pt)
// log("plain this typed to: " + tree1)
tree1
/* no longer needed, because Super now contains a This(...)
@@ -358,16 +365,18 @@ abstract class Duplicators extends Analyzer {
case EmptyTree =>
// no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts
tree
-
+
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
+ debuglog(" ---> " + tree)
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
- tree.tpe = null
- super.typed(tree, mode, pt)
+ val ntree = castType(tree, pt)
+ super.typed(ntree, mode, pt)
}
}
+
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index e1fb683aa9..177d1ddf19 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -88,9 +88,11 @@ trait EtaExpansion { self: Analyzer =>
defs ++= stats
liftoutPrefix(fun)
case Apply(fn, args) =>
- val byName = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe))
- // zipAll: with repeated params, there might be more args than params
- val newArgs = args.zipAll(byName, EmptyTree, false) map { case (arg, byN) => liftout(arg, byN) }
+ val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift
+ val newArgs = mapWithIndex(args) { (arg, i) =>
+ // with repeated params, there might be more or fewer args than params
+ liftout(arg, byName(i).getOrElse(false))
+ }
treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 0ea46f1de4..68782379a6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -16,7 +16,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
-import util.Statistics._
+import scala.reflect.internal.util.Statistics
import language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
@@ -29,6 +29,7 @@ trait Implicits {
import global._
import definitions._
+ import ImplicitsStats._
import typeDebug.{ ptTree, ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
@@ -71,10 +72,10 @@ trait Implicits {
)
indentTyping()
- val rawTypeStart = startCounter(rawTypeImpl)
- val findMemberStart = startCounter(findMemberImpl)
- val subtypeStart = startCounter(subtypeImpl)
- val start = startTimer(implicitNanos)
+ val rawTypeStart = Statistics.startCounter(rawTypeImpl)
+ val findMemberStart = Statistics.startCounter(findMemberImpl)
+ val subtypeStart = Statistics.startCounter(subtypeImpl)
+ val start = Statistics.startTimer(implicitNanos)
if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
@@ -86,10 +87,10 @@ trait Implicits {
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
- stopTimer(implicitNanos, start)
- stopCounter(rawTypeImpl, rawTypeStart)
- stopCounter(findMemberImpl, findMemberStart)
- stopCounter(subtypeImpl, subtypeStart)
+ Statistics.stopTimer(implicitNanos, start)
+ Statistics.stopCounter(rawTypeImpl, rawTypeStart)
+ Statistics.stopCounter(findMemberImpl, findMemberStart)
+ Statistics.stopCounter(subtypeImpl, subtypeStart)
deindentTyping()
printTyping("Implicit search yielded: "+ result)
result
@@ -307,12 +308,12 @@ trait Implicits {
/** Is implicit info `info1` better than implicit info `info2`?
*/
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
- incCounter(improvesCount)
+ Statistics.incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
improvesCache get (info1, info2) match {
- case Some(b) => incCounter(improvesCachedCount); b
+ case Some(b) => Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
improvesCache((info1, info2)) = result
@@ -376,7 +377,7 @@ trait Implicits {
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- incCounter(implicitSearchCount)
+ Statistics.incCounter(implicitSearchCount)
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
@@ -390,9 +391,10 @@ trait Implicits {
* Detect infinite search trees for implicits.
*
* @param info The given implicit info describing the implicit definition
+ * @param isLocal Is the implicit in the local scope of the call site?
* @pre `info.tpe` does not contain an error
*/
- private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
+ private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
(context.openImplicits find { case (tp, tree1) => tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
//println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
@@ -401,7 +403,7 @@ trait Implicits {
try {
context.openImplicits = (pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked)
+ typedImplicit0(info, ptChecked, isLocal)
} catch {
case ex: DivergentImplicit =>
//println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
@@ -427,7 +429,7 @@ trait Implicits {
* This method is performance critical: 5-8% of typechecking time.
*/
private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = {
- val start = startTimer(matchesPtNanos)
+ val start = Statistics.startTimer(matchesPtNanos)
val result = normSubType(tp, pt) || isView && {
pt match {
case TypeRef(_, Function1.Sym, args) =>
@@ -436,7 +438,7 @@ trait Implicits {
false
}
}
- stopTimer(matchesPtNanos, start)
+ Statistics.stopTimer(matchesPtNanos, start)
result
}
private def matchesPt(info: ImplicitInfo): Boolean = (
@@ -534,8 +536,8 @@ trait Implicits {
case _ => false
}
- private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
- incCounter(plausiblyCompatibleImplicits)
+ private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+ Statistics.incCounter(plausiblyCompatibleImplicits)
printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
@@ -549,17 +551,24 @@ trait Implicits {
)
if (ptChecked || matchesPt(info))
- typedImplicit1(info)
+ typedImplicit1(info, isLocal)
else
SearchFailure
}
- private def typedImplicit1(info: ImplicitInfo): SearchResult = {
- incCounter(matchingImplicits)
+ private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
+ Statistics.incCounter(matchingImplicits)
val itree = atPos(pos.focus) {
- if (info.pre == NoPrefix) Ident(info.name)
- else {
+ // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+ val isScalaDoc = context.tree == EmptyTree
+
+ if (isLocal && !isScalaDoc) {
+ // SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
+ // rather than an attributed Select, to detect shadowing.
+ Ident(info.name)
+ } else {
+ assert(info.pre != NoPrefix, info)
// SI-2405 Not info.name, which might be an aliased import
val implicitMemberName = info.sym.name
Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
@@ -586,7 +595,7 @@ trait Implicits {
if (context.hasErrors)
return fail("typed implicit %s has errors".format(info.sym.fullLocationString))
- incCounter(typedImplicits)
+ Statistics.incCounter(typedImplicits)
printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
@@ -607,8 +616,8 @@ trait Implicits {
if (context.hasErrors)
fail("hasMatchingSymbol reported threw error(s)")
- else if (!hasMatchingSymbol(itree1))
- fail("candidate implicit %s is shadowed by other implicit %s".format(
+ else if (isLocal && !hasMatchingSymbol(itree1))
+ fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
else {
val tvars = undetParams map freshVar
@@ -669,7 +678,7 @@ trait Implicits {
fail("typing TypeApply reported errors for the implicit tree")
else {
val result = new SearchResult(itree2, subst)
- incCounter(foundImplicits)
+ Statistics.incCounter(foundImplicits)
printInference("[success] found %s for pt %s".format(result, ptInstantiated))
result
}
@@ -683,17 +692,6 @@ trait Implicits {
}
}
- // #3453: in addition to the implicit symbols that may shadow the implicit with
- // name `name`, this method tests whether there's a non-implicit symbol with name
- // `name` in scope. Inspired by logic in typedIdent.
- private def nonImplicitSynonymInScope(name: Name) = {
- // the implicit ones are handled by the `shadowed` set above
- context.scope.lookupEntry(name) match {
- case x: ScopeEntry => reallyExists(x.sym) && !x.sym.isImplicit
- case _ => false
- }
- }
-
/** Should implicit definition symbol `sym` be considered for applicability testing?
* This is the case if one of the following holds:
* - the symbol's type is initialized
@@ -737,19 +735,38 @@ trait Implicits {
/** Prune ImplicitInfos down to either all the eligible ones or the best one.
*
* @param iss list of list of infos
- * @param shadowed set in which to record names that are shadowed by implicit infos
- * If it is null, no shadowing.
+ * @param isLocal if true, `iss` represents in-scope implicits, which must respect the normal rules of
+ * shadowing. The head of the list `iss` must represent implicits from the closest
+ * enclosing scope, and so on.
*/
- class ImplicitComputation(iss: Infoss, shadowed: util.HashSet[Name]) {
+ class ImplicitComputation(iss: Infoss, isLocal: Boolean) {
+ abstract class Shadower {
+ def addInfos(infos: Infos)
+ def isShadowed(name: Name): Boolean
+ }
+ private val shadower: Shadower = {
+ /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */
+ final class LocalShadower extends Shadower {
+ val shadowed = util.HashSet[Name](512)
+ def addInfos(infos: Infos) {
+ shadowed addEntries infos.map(_.name)
+ }
+ def isShadowed(name: Name) = shadowed(name)
+ }
+ /** Used for the implicits of expected type, when no shadowing checks are needed. */
+ object NoShadower extends Shadower {
+ def addInfos(infos: Infos) {}
+ def isShadowed(name: Name) = false
+ }
+ if (isLocal) new LocalShadower else NoShadower
+ }
+
private var best: SearchResult = SearchFailure
- private def isShadowed(name: Name) = (
- (shadowed != null)
- && (shadowed(name) || nonImplicitSynonymInScope(name))
- )
+
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
|| isView && isPredefMemberNamed(info.sym, nme.conforms)
- || isShadowed(info.name)
+ || shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -788,9 +805,7 @@ trait Implicits {
val eligible = {
val matches = iss flatMap { is =>
val result = is filter (info => checkValid(info.sym) && survives(info))
- if (shadowed ne null)
- shadowed addEntries (is map (_.name))
-
+ shadower addInfos is
result
}
@@ -812,7 +827,7 @@ trait Implicits {
case Nil => acc
case i :: is =>
def tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, true)
+ try typedImplicit(i, ptChecked = true, isLocal)
catch divergenceHandler
tryImplicitInfo(i) match {
@@ -842,7 +857,7 @@ trait Implicits {
/** Returns all eligible ImplicitInfos and their SearchResults in a map.
*/
- def findAll() = mapFrom(eligible)(typedImplicit(_, false))
+ def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocal))
/** Returns the SearchResult of the best match.
*/
@@ -890,11 +905,11 @@ trait Implicits {
* @return map from infos to search results
*/
def applicableInfos(iss: Infoss, isLocal: Boolean): Map[ImplicitInfo, SearchResult] = {
- val start = startCounter(subtypeAppInfos)
- val computation = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null) { }
+ val start = Statistics.startCounter(subtypeAppInfos)
+ val computation = new ImplicitComputation(iss, isLocal) { }
val applicable = computation.findAll()
- stopCounter(subtypeAppInfos, start)
+ Statistics.stopCounter(subtypeAppInfos, start)
applicable
}
@@ -909,7 +924,7 @@ trait Implicits {
*/
def searchImplicit(implicitInfoss: Infoss, isLocal: Boolean): SearchResult =
if (implicitInfoss.forall(_.isEmpty)) SearchFailure
- else new ImplicitComputation(implicitInfoss, if (isLocal) util.HashSet[Name](128) else null) findBest()
+ else new ImplicitComputation(implicitInfoss, isLocal) findBest()
/** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to
* the implicit infos in the companion objects of these class symbols C.
@@ -1109,26 +1124,28 @@ trait Implicits {
* These are all implicits found in companion objects of classes C
* such that some part of `tp` has C as one of its superclasses.
*/
- private def implicitsOfExpectedType: Infoss = implicitsCache get pt match {
- case Some(implicitInfoss) =>
- incCounter(implicitCacheHits)
- implicitInfoss
- case None =>
- incCounter(implicitCacheMisses)
- val start = startTimer(subtypeETNanos)
-// val implicitInfoss = companionImplicits(pt)
- val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
-// val is1 = implicitInfoss.flatten.toSet
-// val is2 = implicitInfoss1.flatten.toSet
-// for (i <- is1)
-// if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
-// for (i <- is2)
-// if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
- stopTimer(subtypeETNanos, start)
- implicitsCache(pt) = implicitInfoss1
- if (implicitsCache.size >= sizeLimit)
- implicitsCache -= implicitsCache.keysIterator.next
- implicitInfoss1
+ private def implicitsOfExpectedType: Infoss = {
+ Statistics.incCounter(implicitCacheAccs)
+ implicitsCache get pt match {
+ case Some(implicitInfoss) =>
+ Statistics.incCounter(implicitCacheHits)
+ implicitInfoss
+ case None =>
+ val start = Statistics.startTimer(subtypeETNanos)
+ // val implicitInfoss = companionImplicits(pt)
+ val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
+ // val is1 = implicitInfoss.flatten.toSet
+ // val is2 = implicitInfoss1.flatten.toSet
+ // for (i <- is1)
+ // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ // for (i <- is2)
+ // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ Statistics.stopTimer(subtypeETNanos, start)
+ implicitsCache(pt) = implicitInfoss1
+ if (implicitsCache.size >= sizeLimit)
+ implicitsCache -= implicitsCache.keysIterator.next
+ implicitInfoss1
+ }
}
private def TagSymbols = TagMaterializers.keySet
@@ -1354,30 +1371,30 @@ trait Implicits {
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val failstart = startTimer(inscopeFailNanos)
- val succstart = startTimer(inscopeSucceedNanos)
+ val failstart = Statistics.startTimer(inscopeFailNanos)
+ val succstart = Statistics.startTimer(inscopeSucceedNanos)
var result = searchImplicit(context.implicitss, true)
if (result == SearchFailure) {
- stopTimer(inscopeFailNanos, failstart)
+ Statistics.stopTimer(inscopeFailNanos, failstart)
} else {
- stopTimer(inscopeSucceedNanos, succstart)
- incCounter(inscopeImplicitHits)
+ Statistics.stopTimer(inscopeSucceedNanos, succstart)
+ Statistics.incCounter(inscopeImplicitHits)
}
if (result == SearchFailure) {
val previousErrs = context.flushAndReturnBuffer()
- val failstart = startTimer(oftypeFailNanos)
- val succstart = startTimer(oftypeSucceedNanos)
+ val failstart = Statistics.startTimer(oftypeFailNanos)
+ val succstart = Statistics.startTimer(oftypeSucceedNanos)
result = implicitTagOrOfExpectedType(pt)
if (result == SearchFailure) {
context.updateBuffer(previousErrs)
- stopTimer(oftypeFailNanos, failstart)
+ Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- stopTimer(oftypeSucceedNanos, succstart)
- incCounter(oftypeImplicitHits)
+ Statistics.stopTimer(oftypeSucceedNanos, succstart)
+ Statistics.incCounter(oftypeImplicitHits)
}
}
@@ -1397,20 +1414,23 @@ trait Implicits {
def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
- def eligibleInfos(iss: Infoss, isLocal: Boolean) = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null).eligible
- val allEligibleInfos = (eligibleInfos(context.implicitss, true) ++ eligibleInfos(implicitsOfExpectedType, false)).toList
-
- allEligibleInfos flatMap { ii =>
+ def eligibleInfos(iss: Infoss, isLocal: Boolean) = {
+ val eligible = new ImplicitComputation(iss, isLocal).eligible
+ eligible.toList.flatMap {
+ (ii: ImplicitInfo) =>
// each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
// thus, start each type var off with a fresh for every typedImplicit
resetTVars()
// any previous errors should not affect us now
context.flushBuffer()
- val res = typedImplicit(ii, false)
+
+ val res = typedImplicit(ii, ptChecked = false, isLocal)
if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
else Nil
}
}
+ eligibleInfos(context.implicitss, isLocal = true) ++ eligibleInfos(implicitsOfExpectedType, isLocal = false)
+ }
}
object ImplicitNotFoundMsg {
@@ -1455,5 +1475,37 @@ trait Implicits {
}
}
}
+
+object ImplicitsStats {
+
+ import reflect.internal.TypesStats._
+
+ val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount)
+ val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
+ val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
+ val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
+ val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
+ val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
+ val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
+ val plausiblyCompatibleImplicits
+ = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
+ val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
+ val typedImplicits = Statistics.newSubCounter(" #typed", implicitSearchCount)
+ val foundImplicits = Statistics.newSubCounter(" #found", implicitSearchCount)
+ val improvesCount = Statistics.newSubCounter("implicit improves tests", implicitSearchCount)
+ val improvesCachedCount = Statistics.newSubCounter("#implicit improves cached ", implicitSearchCount)
+ val inscopeImplicitHits = Statistics.newSubCounter("#implicit inscope hits", implicitSearchCount)
+ val oftypeImplicitHits = Statistics.newSubCounter("#implicit oftype hits ", implicitSearchCount)
+ val implicitNanos = Statistics.newSubTimer ("time spent in implicits", typerNanos)
+ val inscopeSucceedNanos = Statistics.newSubTimer (" successful in scope", typerNanos)
+ val inscopeFailNanos = Statistics.newSubTimer (" failed in scope", typerNanos)
+ val oftypeSucceedNanos = Statistics.newSubTimer (" successful of type", typerNanos)
+ val oftypeFailNanos = Statistics.newSubTimer (" failed of type", typerNanos)
+ val subtypeETNanos = Statistics.newSubTimer (" assembling parts", typerNanos)
+ val matchesPtNanos = Statistics.newSubTimer (" matchesPT", typerNanos)
+ val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
+ val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
+}
+
class DivergentImplicit extends Exception
object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 688dcd91ac..e99c31374e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1104,7 +1104,9 @@ trait Infer {
try {
// debuglog("TVARS "+ (tvars map (_.constr)))
// look at the argument types of the primary constructor corresponding to the pattern
- val variances = undetparams map varianceInType(ctorTp.paramTypes.headOption getOrElse ctorTp)
+ val variances =
+ if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
+ else undetparams map varianceInTypes(ctorTp.paramTypes)
val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index ec14476d1a..d157666e47 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -7,7 +7,7 @@ import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
-import util.Statistics._
+import reflect.internal.util.Statistics
import scala.reflect.makro.util._
import java.lang.{Class => jClass}
import java.lang.reflect.{Array => jArray, Method => jMethod}
@@ -42,6 +42,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import global._
import definitions._
+ import MacrosStats._
def globalSettings = global.settings
val globalMacroCache = collection.mutable.Map[Any, Any]()
@@ -945,8 +946,8 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
this.fail(typer, tree, err.errPos, "failed to %s: %s".format(what, err.errMsg))
return expandee
}
- val start = startTimer(macroExpandNanos)
- incCounter(macroExpandCount)
+ val start = Statistics.startTimer(macroExpandNanos)
+ Statistics.incCounter(macroExpandCount)
try {
macroExpand1(typer, expandee) match {
case Success(expanded0) =>
@@ -993,7 +994,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
result
}
} finally {
- stopTimer(macroExpandNanos, start)
+ Statistics.stopTimer(macroExpandNanos, start)
}
}
@@ -1292,3 +1293,9 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
})
}.transform(expandee)
}
+
+object MacrosStats {
+ import reflect.internal.TypesStats.typerNanos
+ val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
+ val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 4e8f416b16..c466206192 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -14,7 +14,7 @@ import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.collection.mutable.HashSet
import scala.collection.mutable.HashMap
-import scala.tools.nsc.util.Statistics
+import reflect.internal.util.Statistics
/** Translate pattern matching.
*
@@ -38,6 +38,7 @@ import scala.tools.nsc.util.Statistics
*/
trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer =>
import Statistics._
+ import PatternMatchingStats._
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -122,7 +123,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def zero: M[Nothing]
def one[T](x: P[T]): M[T]
def guard[T](cond: P[Boolean], then: => P[T]): M[T]
- def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt
}
* P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
@@ -136,7 +136,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
- def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty
}
*/
@@ -183,7 +182,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => tp
}
- val start = startTimer(patmatNanos)
+ val start = Statistics.startTimer(patmatNanos)
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
@@ -210,7 +209,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
- stopTimer(patmatNanos, start)
+ Statistics.stopTimer(patmatNanos, start)
combined
}
@@ -1694,7 +1693,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
def removeVarEq(props: List[Prop], considerNull: Boolean = false): (Prop, List[Prop]) = {
- val start = startTimer(patmatAnaVarEq)
+ val start = Statistics.startTimer(patmatAnaVarEq)
val vars = new collection.mutable.HashSet[Var]
@@ -1766,7 +1765,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
// patmatDebug("pure:\n"+ cnfString(eqFreePropToSolvable(pure)))
- stopTimer(patmatAnaVarEq, start)
+ Statistics.stopTimer(patmatAnaVarEq, start)
(eqAxioms, pure)
}
@@ -1865,10 +1864,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- val start = startTimer(patmatCNF)
+ val start = Statistics.startTimer(patmatCNF)
val res = conjunctiveNormalForm(negationNormalForm(p))
- stopTimer(patmatCNF, start)
- patmatCNFSizes(res.size) += 1
+ Statistics.stopTimer(patmatCNF, start)
+ patmatCNFSizes(res.size).value += 1
// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
res
@@ -1945,7 +1944,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("dpll\n"+ cnfString(f))
- val start = startTimer(patmatAnaDPLL)
+ val start = Statistics.startTimer(patmatAnaDPLL)
val satisfiableWithModel: Model =
if (f isEmpty) EmptyModel
@@ -1983,7 +1982,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- stopTimer(patmatAnaDPLL, start)
+ Statistics.stopTimer(patmatAnaDPLL, start)
satisfiableWithModel
}
@@ -2005,7 +2004,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private val uniques = new collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
- class Var(val path: Tree, fullTp: Type, checked: Boolean = true) extends AbsVar {
+ class Var(val path: Tree, fullTp: Type) extends AbsVar {
private[this] val id: Int = Var.nextId
// private[this] var canModify: Option[Array[StackTraceElement]] = None
@@ -2027,26 +2026,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
- lazy val domain: Option[Set[Const]] =
- if (!checked) None
- else {
- val subConsts = enumerateSubtypes(fullTp).map{ tps =>
- tps.toSet[Type].map{ tp =>
- val domainC = TypeConst(tp)
- registerEquality(domainC)
- domainC
- }
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(fullTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
}
+ }
- val allConsts =
- if (! _considerNull) subConsts
- else {
- registerEquality(NullConst)
- subConsts map (_ + NullConst)
- }
+ val allConsts =
+ if (! _considerNull) subConsts
+ else {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ }
- observed; allConsts
- }
+ observed; allConsts
+ }
// accessing after calling considerNull will result in inconsistencies
lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
@@ -2156,6 +2153,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// the equals inherited from AnyRef does just this
}
+ // find most precise super-type of tp that is a class
+ // we skip non-class types (singleton types, abstract types) so that we can
+ // correctly compute how types relate in terms of the values they rule out
+ // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
+ // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
+ // (At least conceptually: `true` is an instance of class `Boolean`)
+ private def widenToClass(tp: Type) = {
+ // getOrElse to err on the safe side -- all BTS should end in Any, right?
+ val wideTp = tp.widen
+ val clsTp =
+ if (wideTp.typeSymbol.isClass) wideTp
+ else wideTp.baseTypeSeq.toList.find(_.typeSymbol.isClass).getOrElse(AnyClass.tpe)
+ // patmatDebug("Widening to class: "+ (tp, clsTp, tp.widen, tp.widen.baseTypeSeq, tp.widen.baseTypeSeq.toList.find(_.typeSymbol.isClass)))
+ clsTp
+ }
object TypeConst {
def apply(tp: Type) = {
@@ -2171,7 +2183,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
assert(!(tp =:= NullTp))
private[this] val id: Int = Const.nextTypeId
- val wideTp = tp.widen
+ val wideTp = widenToClass(tp)
override def toString = tp.toString //+"#"+ id
}
@@ -2190,10 +2202,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val tp = p.tpe.normalize
if (tp =:= NullTp) NullConst
else {
- val wideTp = {
- if (p.hasSymbol && p.symbol.isStable) tp.asSeenFrom(tp.prefix, p.symbol.owner).widen
- else tp.widen
- }
+ val wideTp = widenToClass(tp)
val narrowTp =
if (tp.isInstanceOf[SingletonType]) tp
@@ -2291,7 +2300,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def makeCondPessimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = makeCond(tm)(recurse)
}
- val start = startTimer(patmatAnaReach)
+ val start = Statistics.startTimer(patmatAnaReach)
// use the same approximator so we share variables,
// but need different conditions depending on whether we're conservatively looking for failure or success
@@ -2340,7 +2349,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- stopTimer(patmatAnaReach, start)
+ Statistics.stopTimer(patmatAnaReach, start)
if (reachable) None else Some(caseIndex)
} catch {
@@ -2353,14 +2362,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// exhaustivity
- // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- // TODO: domain of feasibly enumerable built-in types (enums, char?)
+ // TODO: domain of other feasibly enumerable built-in types (char?)
def enumerateSubtypes(tp: Type): Option[List[Type]] =
tp.typeSymbol match {
+ // TODO case _ if tp.isTupleType => // recurse into component types?
+ case UnitClass =>
+ Some(List(UnitClass.tpe))
case BooleanClass =>
// patmatDebug("enum bool "+ tp)
Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
// TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
// patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
None
@@ -2428,7 +2442,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// - back off (to avoid crying exhaustive too often) when:
// - there are guards -->
// - there are extractor calls (that we can't secretly/soundly) rewrite
- val start = startTimer(patmatAnaExhaust)
+ val start = Statistics.startTimer(patmatAnaExhaust)
var backoff = false
object exhaustivityApproximation extends TreeMakersToConds(prevBinder) {
def makeCondExhaustivity(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
@@ -2503,7 +2517,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
- stopTimer(patmatAnaExhaust, start)
+ Statistics.stopTimer(patmatAnaExhaust, start)
pruned
} catch {
case e : CNFBudgetExceeded =>
@@ -3186,3 +3200,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
}
+
+object PatternMatchingStats {
+ val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
+ val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
+ val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
+ val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
+ val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
+ val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
+ val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 26cf246ed7..119bb0852c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -766,7 +766,16 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
for (member <- clazz.info.decls)
if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
- unit.error(member.pos, member.toString() + " overrides nothing");
+
+ val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ nonMatching match {
+ case Nil =>
+ issueError("")
+ case ms =>
+ val superSigs = ms.map(m => m.defStringSeenAs(clazz.tpe memberType m)).mkString("\n")
+ issueError(s".\nNote: the super classes of ${member.owner} contain the following, non final members named ${member.name}:\n${superSigs}")
+ }
member resetFlag (OVERRIDE | ABSOVERRIDE) // Any Override
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index daae69590f..f67cec730b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -124,7 +124,15 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
"unless it is overridden by a member declared `abstract' and `override'");
+ } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
+ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
+ val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
+ intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach {
+ absSym =>
+ unit.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract")
+ }
}
+
if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner))
ensureAccessor(sel)
else sel
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 2bdae4164a..d251109dc4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -16,8 +16,7 @@ import scala.collection.mutable
import scala.reflect.internal.util.BatchSourceFile
import mutable.ListBuffer
import symtab.Flags._
-import util.Statistics
-import util.Statistics._
+import reflect.internal.util.Statistics
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -31,6 +30,7 @@ trait Typers extends Modes with Adaptations with Tags {
import global._
import definitions._
+ import TypersStats._
import patmat.DefaultOverrideMatchAttachment
final def forArgMode(fun: Tree, mode: Int) =
@@ -707,10 +707,16 @@ trait Typers extends Modes with Adaptations with Tags {
def silent[T](op: Typer => T,
reportAmbiguousErrors: Boolean = context.ambiguousErrors,
newtree: Tree = context.tree): SilentResult[T] = {
- val rawTypeStart = startCounter(rawTypeFailed)
- val findMemberStart = startCounter(findMemberFailed)
- val subtypeStart = startCounter(subtypeFailed)
- val failedSilentStart = startTimer(failedSilentNanos)
+ val rawTypeStart = Statistics.startCounter(rawTypeFailed)
+ val findMemberStart = Statistics.startCounter(findMemberFailed)
+ val subtypeStart = Statistics.startCounter(subtypeFailed)
+ val failedSilentStart = Statistics.startTimer(failedSilentNanos)
+ def stopStats() = {
+ Statistics.stopCounter(rawTypeFailed, rawTypeStart)
+ Statistics.stopCounter(findMemberFailed, findMemberStart)
+ Statistics.stopCounter(subtypeFailed, subtypeStart)
+ Statistics.stopTimer(failedSilentNanos, failedSilentStart)
+ }
try {
if (context.reportErrors ||
reportAmbiguousErrors != context.ambiguousErrors ||
@@ -724,8 +730,10 @@ trait Typers extends Modes with Adaptations with Tags {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- if (context1.hasErrors) SilentTypeError(context1.errBuffer.head)
- else SilentResultValue(result)
+ if (context1.hasErrors) {
+ stopStats()
+ SilentTypeError(context1.errBuffer.head)
+ } else SilentResultValue(result)
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
withSavedContext(context){
@@ -739,10 +747,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ex: TypeError =>
// fallback in case TypeError is still thrown
// @H this happens for example in cps annotation checker
- stopCounter(rawTypeFailed, rawTypeStart)
- stopCounter(findMemberFailed, findMemberStart)
- stopCounter(subtypeFailed, subtypeStart)
- stopTimer(failedSilentNanos, failedSilentStart)
+ stopStats()
SilentTypeError(TypeErrorWrapper(ex))
}
}
@@ -1828,7 +1833,7 @@ trait Typers extends Modes with Adaptations with Tags {
val params = fn.tpe.params
val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
else args.take(params.length - 1) :+ EmptyTree
- assert(sameLength(args2, params), "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
+ assert(sameLength(args2, params) || call.isErrorTyped, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
(superConstr, args1 ::: args2)
case Block(stats, expr) if !stats.isEmpty =>
decompose(stats.last)
@@ -1841,16 +1846,13 @@ trait Typers extends Modes with Adaptations with Tags {
val pending = ListBuffer[AbsTypeError]()
// an object cannot be allowed to pass a reference to itself to a superconstructor
// because of initialization issues; bug #473
- for (arg <- superArgs ; tree <- arg) {
- val sym = tree.symbol
- if (sym != null && (sym.info.baseClasses contains clazz)) {
- if (sym.isModule)
- pending += SuperConstrReferenceError(tree)
- tree match {
- case This(qual) =>
- pending += SuperConstrArgsThisReferenceError(tree)
- case _ => ()
- }
+ foreachSubTreeBoundTo(superArgs, clazz) { tree =>
+ if (tree.symbol.isModule)
+ pending += SuperConstrReferenceError(tree)
+ tree match {
+ case This(qual) =>
+ pending += SuperConstrArgsThisReferenceError(tree)
+ case _ => ()
}
}
@@ -1884,7 +1886,39 @@ trait Typers extends Modes with Adaptations with Tags {
pending.foreach(ErrorUtils.issueTypeError)
}
- /** Check if a structurally defined method violates implementation restrictions.
+ // Check for SI-4842.
+ private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol) {
+ val pending = ListBuffer[AbsTypeError]()
+ ddef.rhs match {
+ case Block(stats, expr) =>
+ val selfConstructorCall = stats.headOption.getOrElse(expr)
+ foreachSubTreeBoundTo(List(selfConstructorCall), clazz) {
+ case tree @ This(qual) =>
+ pending += SelfConstrArgsThisReferenceError(tree)
+ case _ => ()
+ }
+ case _ =>
+ }
+ pending.foreach(ErrorUtils.issueTypeError)
+ }
+
+ /**
+ * Run the provided function for each sub tree of `trees` that
+ * are bound to a symbol with `clazz` as a base class.
+ *
+ * @param f This function can assume that `tree.symbol` is non null
+ */
+ private def foreachSubTreeBoundTo[A](trees: List[Tree], clazz: Symbol)(f: Tree => Unit): Unit =
+ for {
+ tree <- trees
+ subTree <- tree
+ } {
+ val sym = subTree.symbol
+ if (sym != null && sym.info.baseClasses.contains(clazz))
+ f(subTree)
+ }
+
+ /** Check if a structurally defined method violates implementation restrictions.
* A method cannot be called if it is a non-private member of a refinement type
* and if its parameter's types are any of:
* - the self-type of the refinement
@@ -2002,11 +2036,14 @@ trait Typers extends Modes with Adaptations with Tags {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isPrimaryConstructor && meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
- // At this point in AnyVal there is no supercall, which will blow up
- // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) {
+ // At this point in AnyVal there is no supercall, which will blow up
+ // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isPrimaryConstructor)
computeParamAliases(meth.owner, vparamss1, rhs1)
- }
+ else
+ checkSelfConstructorArgs(ddef, meth.owner)
+ }
if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass)
rhs1 = checkDead(rhs1)
@@ -3066,66 +3103,67 @@ trait Typers extends Modes with Adaptations with Tags {
val otpe = fun.tpe
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ //
+ def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
+ case MethodType(param :: _, _) =>
+ (Nil, param.tpe)
+ case PolyType(tparams, restpe) =>
+ createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
+ // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
+ case OverloadedType(_, _) =>
+ OverloadedUnapplyError(fun)
+ (Nil, ErrorType)
+ case _ =>
+ UnapplyWithSingleArgError(fun)
+ (Nil, ErrorType)
+ }
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
+ val unapp = unapplyMember(otpe)
+ val unappType = otpe.memberType(unapp)
+ val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
+ val arg = Ident(argDummy) setType pt
val uncheckedTypeExtractor =
if (unappType.paramTypes.nonEmpty)
extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
else None
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
+ if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
+ //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
+ val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
- val typer1 = newTyper(unapplyContext)
+ val typer1 = newTyper(unapplyContext)
val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg.tpe = pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ arg.tpe = pattp.substSym(freeVars, skolems)
+ argDummy setInfo arg.tpe
+ }
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+ // setType null is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
- if (fun1.tpe.isErroneous) {
- duplErrTree
- } else {
- val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
- val formals1 = formalTypes(formals0, args.length)
- if (sameLength(formals1, args)) {
- val args1 = typedArgs(args, mode, formals0, formals1)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
- val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
+ if (fun1.tpe.isErroneous) duplErrTree
+ else {
+ val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
+ val formals1 = formalTypes(formals0, args.length)
+
+ if (!sameLength(formals1, args)) duplErrorTree(WrongNumberArgsPatternError(tree, fun))
+ else {
+ val args1 = typedArgs(args, mode, formals0, formals1)
+ // This used to be the following (failing) assert:
+ // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
+ // I modified as follows. See SI-1048.
+ val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+
+ val itype = glb(List(pt1, arg.tpe))
+ arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
// if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
@@ -3133,9 +3171,8 @@ trait Typers extends Modes with Adaptations with Tags {
// also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- } else
- duplErrorTree(WrongNumberArgsPatternError(tree, fun))
- }
+ }
+ }
}
def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
@@ -3481,9 +3518,9 @@ trait Typers extends Modes with Adaptations with Tags {
def isCapturedExistential(sym: Symbol) =
(sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = startTimer(isReferencedNanos)
+ val start = Statistics.startTimer(isReferencedNanos)
try !isReferencedFrom(context, sym)
- finally stopTimer(isReferencedNanos, start)
+ finally Statistics.stopTimer(isReferencedNanos, start)
}
def packCaptured(tpe: Type): Type = {
@@ -4109,10 +4146,10 @@ trait Typers extends Modes with Adaptations with Tags {
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = startTimer(failedApplyNanos)
+ val start = Statistics.startTimer(failedApplyNanos)
def onError(typeError: AbsTypeError): Tree = {
- stopTimer(failedApplyNanos, start)
+ Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
// See #4712 for a case where this situation arises,
@@ -4175,8 +4212,8 @@ trait Typers extends Modes with Adaptations with Tags {
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
- val appStart = startTimer(failedApplyNanos)
- val opeqStart = startTimer(failedOpEqNanos)
+ val appStart = Statistics.startTimer(failedApplyNanos)
+ val opeqStart = Statistics.startTimer(failedOpEqNanos)
def onError(reportError: => Tree): Tree = {
fun match {
@@ -4184,14 +4221,14 @@ trait Typers extends Modes with Adaptations with Tags {
if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
- stopTimer(failedOpEqNanos, opeqStart)
+ Statistics.stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args)
} else {
- stopTimer(failedApplyNanos, appStart)
+ Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
case _ =>
- stopTimer(failedApplyNanos, appStart)
+ Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
}
@@ -4200,7 +4237,7 @@ trait Typers extends Modes with Adaptations with Tags {
if ((mode & EXPRmode) != 0) tree else context.tree) match {
case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- incCounter(typedApplyCount)
+ Statistics.incCounter(typedApplyCount)
def isImplicitMethod(tpe: Type) = tpe match {
case mt: MethodType => mt.isImplicit
case _ => false
@@ -4825,6 +4862,16 @@ trait Typers extends Modes with Adaptations with Tags {
case Try(block, catches, finalizer) =>
var block1 = typed(block, pt)
var catches1 = typedCases(catches, ThrowableClass.tpe, pt)
+
+ for (cdef <- catches1 if cdef.guard.isEmpty) {
+ def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ cdef.pat match {
+ case Bind(name, Ident(_)) => warn(name)
+ case Ident(name) => warn(name)
+ case _ =>
+ }
+ }
+
val finalizer1 = if (finalizer.isEmpty) finalizer
else typed(finalizer, UnitClass.tpe)
val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
@@ -4930,7 +4977,7 @@ trait Typers extends Modes with Adaptations with Tags {
typedTypeApply(tree, mode, fun1, args1)
case Apply(Block(stats, expr), args) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args))), mode, pt)
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
case Apply(fun, args) =>
typedApply(fun, args) match {
@@ -4976,7 +5023,7 @@ trait Typers extends Modes with Adaptations with Tags {
typedSelect(qual1, nme.CONSTRUCTOR)
case Select(qual, name) =>
- incCounter(typedSelectCount)
+ Statistics.incCounter(typedSelectCount)
var qual1 = checkDead(typedQualifier(qual, mode))
if (name.isTypeName) qual1 = checkStable(qual1)
@@ -5008,7 +5055,7 @@ trait Typers extends Modes with Adaptations with Tags {
else tree1
case Ident(name) =>
- incCounter(typedIdentCount)
+ Statistics.incCounter(typedIdentCount)
if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
(name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
tree setType makeFullyDefined(pt)
@@ -5083,15 +5130,9 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
+ val startByType = Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass))
+ Statistics.incCounter(visitsByType, tree.getClass)
try {
- if (Statistics.enabled) {
- val t = currentTime()
- if (pendingTreeTypes.nonEmpty) {
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- }
- typerTime = t
- pendingTreeTypes = tree.getClass :: pendingTreeTypes
- }
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
tree.tpe = null
@@ -5145,14 +5186,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
-
- if (Statistics.enabled) {
- val t = currentTime()
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- visitsByType(pendingTreeTypes.head) += 1
- typerTime = t
- pendingTreeTypes = pendingTreeTypes.tail
- }
+ Statistics.popTimer(byTypeStack, startByType)
}
}
@@ -5328,3 +5362,23 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+object TypersStats {
+ import reflect.internal.TypesStats._
+ import reflect.internal.BaseTypeSeqsStats._
+ val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
+ val typedSelectCount = Statistics.newCounter("#typechecked selections")
+ val typedApplyCount = Statistics.newCounter("#typechecked applications")
+ val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
+ val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
+ val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos))
+ val byTypeStack = Statistics.newTimerStack()
+}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
deleted file mode 100644
index 087111a7ba..0000000000
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-class Statistics extends scala.reflect.internal.util.Statistics {
-
- var nodeByType = new ClassCounts
-
- var microsByType = new ClassCounts
- var visitsByType = new ClassCounts
- var pendingTreeTypes: List[Class[_]] = List()
- var typerTime: Long = 0L
-
- val typedApplyCount = new Counter
- val typedIdentCount = new Counter
- val typedSelectCount = new Counter
- val typerNanos = new Timer
- val classReadNanos = new Timer
-
- val failedApplyNanos = new Timer
- val failedOpEqNanos = new Timer
- val failedSilentNanos = new Timer
-
- val implicitSearchCount = new Counter
- val implicitNanos = new Timer
- val oftypeImplicitHits = new Counter
- val inscopeImplicitHits = new Counter
-
- val triedImplicits = new Counter
- val plausiblyCompatibleImplicits = new Counter
- val matchingImplicits = new Counter
- val typedImplicits = new Counter
- val foundImplicits = new Counter
-
- val inscopeSucceedNanos = new Timer
- val inscopeFailNanos = new Timer
- val oftypeSucceedNanos = new Timer
- val oftypeFailNanos = new Timer
- val implicitCacheHits = new Counter
- val implicitCacheMisses = new Counter
- val improvesCount = new Counter
- val improvesCachedCount = new Counter
- val subtypeAppInfos = new SubCounter(subtypeCount)
- val subtypeImprovCount = new SubCounter(subtypeCount)
- val subtypeETNanos = new Timer
- val matchesPtNanos = new Timer
- val isReferencedNanos = new Timer
- val ctr1 = new Counter
- val ctr2 = new Counter
- val ctr3 = new Counter
- val ctr4 = new Counter
- val counter1: SubCounter = new SubCounter(subtypeCount)
- val counter2: SubCounter = new SubCounter(subtypeCount)
- val timer1: Timer = new Timer
- val timer2: Timer = new Timer
-
- val macroExpandCount = new Counter
- val macroExpandNanos = new Timer
-
- val patmatNanos = new Timer
- val patmatAnaDPLL = new Timer
- val patmatAnaVarEq = new Timer
- val patmatCNF = new Timer
- val patmatAnaExhaust = new Timer
- val patmatAnaReach = new Timer
- val patmatCNFSizes = new collection.mutable.HashMap[Int, Int] withDefaultValue 0
-}
-
-object Statistics extends Statistics
-
-abstract class StatisticsInfo {
-
- import Statistics._
-
- val global: Global
- import global._
-
- var phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
-
- def countNodes(tree: Tree, counts: ClassCounts) {
- for (t <- tree) counts(t.getClass) += 1
- }
-
- def showRelative(base: Long)(value: Long) =
- value+showPercent(value, base)
-
- def showRelTyper(timer: Timer) =
- timer+showPercent(timer.nanos, typerNanos.nanos)
-
- def showRelPatmat(timer: Timer) =
- timer+showPercent(timer.nanos, patmatNanos.nanos)
-
- def showCounts[T](counts: scala.collection.mutable.Map[T, Int]) =
- counts.toSeq.sortWith(_._2 > _._2).map {
- case (cls: Class[_], cnt) =>
- cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
- case (o, cnt) =>
- o.toString +": "+cnt
- }
-
- def print(phase: Phase) = if (phasesShown contains phase.name) {
- inform("*** Cumulative statistics at phase " + phase)
- inform("#created tree nodes : " + nodeCount)
- inform("#created tree nodes by type: "+showCounts(nodeByType))
- if (phase.name != "parser") {
- val counts = new ClassCounts
- for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
- inform("#retained nodes : " + counts.values.sum)
- inform("#retained nodes by type : " + showCounts(counts))
- inform("#typechecked identifiers : " + typedIdentCount)
- inform("#typechecked selections : " + typedSelectCount)
- inform("#typechecked applications: " + typedApplyCount)
- inform("#raw type creations : " + rawTypeCount)
- inform(" of which in failed : " + rawTypeFailed)
- inform(" of which in implicits : " + rawTypeImpl)
- inform("#unique types : " + uniqueTypeCount)
- inform("#symbols : " + symbolCount)
- inform(" of which type symbols : " + typeSymbolCount)
- inform(" of which class symbols : " + classSymbolCount)
- inform("#base type seqs : " + baseTypeSeqCount)
- inform("avg base type seq length : " + baseTypeSeqLenTotal.value.toFloat / baseTypeSeqCount.value)
- inform("#singleton base type seqs: " + singletonBaseTypeSeqCount)
- inform("#compound base type seqs : " + compoundBaseTypeSeqCount)
- inform("#typeref base type seqs : " + typerefBaseTypeSeqCount)
- inform("#findMember ops : " + findMemberCount)
- inform(" of which in failed : " + findMemberFailed)
- inform(" of which in implicits : " + findMemberImpl)
- inform("#notfound member : " + noMemberCount)
- inform("#multiple member : " + multMemberCount)
- inform("#asSeenFrom ops : " + asSeenFromCount)
- inform("#subtype : " + subtypeCount)
- inform(" of which in failed : " + subtypeFailed)
- inform(" of which in implicits : " + subtypeImpl)
- inform(" of which in app impl : " + subtypeAppInfos)
- inform(" of which in improv : " + subtypeImprovCount)
- inform("#sametype : " + sametypeCount)
- inform("#toplevel lub : " + lubCount)
- inform("#all lub : " + nestedLubCount)
- inform("ms type-flow-analysis: " + analysis.timer.millis)
-
- if (phase.name == "typer") {
- inform("time spent typechecking : " + showRelTyper(typerNanos))
- inform("time classfilereading : " + showRelTyper(classReadNanos))
- inform("time spent in implicits : " + showRelTyper(implicitNanos))
- inform(" successful in scope : " + showRelTyper(inscopeSucceedNanos))
- inform(" failed in scope : " + showRelTyper(inscopeFailNanos))
- inform(" successful of type : " + showRelTyper(oftypeSucceedNanos))
- inform(" failed of type : " + showRelTyper(oftypeFailNanos))
- inform(" assembling parts : " + showRelTyper(subtypeETNanos))
- inform(" matchesPT : " + showRelTyper(matchesPtNanos))
- inform("implicit cache hits : " + showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
- inform("time spent in failed : " + showRelTyper(failedSilentNanos))
- inform(" failed apply : " + showRelTyper(failedApplyNanos))
- inform(" failed op= : " + showRelTyper(failedOpEqNanos))
- inform("time spent ref scanning : " + showRelTyper(isReferencedNanos))
- inform("time spent in lubs : " + showRelTyper(lubNanos))
- inform("micros by tree node : " + showCounts(microsByType))
- inform("#visits by tree node : " + showCounts(visitsByType))
- val average = new ClassCounts
- for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
- inform("avg micros by tree node : " + showCounts(average))
- inform("time spent in <:< : " + showRelTyper(subtypeNanos))
- inform("time spent in findmember : " + showRelTyper(findMemberNanos))
- inform("time spent in asSeenFrom : " + showRelTyper(asSeenFromNanos))
- inform("#implicit searches : " + implicitSearchCount)
- inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
- inform("#implicit improves tests : " + improvesCount)
- inform("#implicit improves cached : " + improvesCachedCount)
- inform("#implicit inscope hits : " + inscopeImplicitHits)
- inform("#implicit oftype hits : " + oftypeImplicitHits)
- inform("#macro expansions : " + macroExpandCount)
- inform("#time spent in macroExpand : " + showRelTyper(macroExpandNanos))
- }
-
- if (ctr1 != null) inform("#ctr1 : " + ctr1)
- if (ctr2 != null) inform("#ctr2 : " + ctr2)
- if (ctr3 != null) inform("#ctr3 : " + ctr3)
- if (ctr4 != null) inform("#ctr4 : " + ctr4)
- if (counter1 != null) inform("#counter1 : " + counter1)
- if (counter2 != null) inform("#counter2 : " + counter2)
- if (timer1 != null) inform("#timer1 : " + timer1)
- if (timer2 != null) inform("#timer2 : " + timer2)
- //for (t <- uniques.iterator) println("unique: "+t)
-
- if (phase.name == "patmat") {
- inform("time spent in patmat : " + patmatNanos )
- inform(" of which DPLL : " + showRelPatmat(patmatAnaDPLL ))
- inform("of which in CNF conversion : " + showRelPatmat(patmatCNF ))
- inform(" CNF size counts : " + showCounts(patmatCNFSizes ))
- inform("of which variable equality : " + showRelPatmat(patmatAnaVarEq ))
- inform(" of which in exhaustivity : " + showRelPatmat(patmatAnaExhaust))
- inform("of which in unreachability : " + showRelPatmat(patmatAnaReach ))
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
new file mode 100644
index 0000000000..f6a1ae1414
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -0,0 +1,38 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package util
+
+import reflect.internal.util.Statistics
+
+abstract class StatisticsInfo {
+
+ val global: Global
+ import global._
+ import reflect.internal.TreesStats.nodeByType
+
+ val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
+
+ val retainedCount = Statistics.newCounter("#retained tree nodes")
+ val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter(""))
+
+ def print(phase: Phase) = if (phasesShown contains phase.name) {
+ inform("*** Cumulative statistics at phase " + phase)
+ retainedCount.value = 0
+ for (c <- retainedByType.keys)
+ retainedByType(c).value = 0
+ for (u <- currentRun.units; t <- u.body) {
+ retainedCount.value += 1
+ retainedByType(t.getClass).value += 1
+ }
+
+ val quants =
+ if (phase.name == "parser") Seq(treeNodeCount, nodeByType, retainedCount, retainedByType)
+ else Statistics.allQuantities
+
+ for (q <- quants if q.showAt(phase.name)) inform(q.line)
+ }
+} \ No newline at end of file
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 862b19d0a4..a20ff1667b 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -4,6 +4,7 @@ package scala.tools.selectivecps
import scala.tools.nsc.Global
import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.MissingRequirementError
abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
@@ -356,8 +357,9 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* for a tree. All this should do is add annotations. */
override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ import scala.util.control._
if (!cpsEnabled) {
- if (hasCpsParamTypes(tpe))
+ if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
return tpe
}
diff --git a/src/eclipse/README b/src/eclipse/README
new file mode 100644
index 0000000000..58dbd83815
--- /dev/null
+++ b/src/eclipse/README
@@ -0,0 +1,23 @@
+Eclipse project files
+=====================
+
+Import all projects inside Eclipse by choosing File/Import Existing Projects
+and navigate to src/eclipse. Check all projects and click ok.
+
+IMPORTANT
+=========
+
+You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in
+Preferences/General/Workspace/Linked Resources. The value should be the absolute
+path to your scala checkout. All paths in project files are relative to this one,
+so nothing will work before you do so.
+
+DETAILS
+=======
+
+The compiler project depends on the library, reflect, asm and fjbg projects. The
+builder will take care of the correct ordering, and changes in one project will
+be picked up by the dependent projects.
+
+The output directory is set to be build/quick, so the runner scripts in quick
+work as they are (run an ant build to have the generated once) \ No newline at end of file
diff --git a/src/eclipse/asm/.classpath b/src/eclipse/asm/.classpath
new file mode 100644
index 0000000000..03d9e9788d
--- /dev/null
+++ b/src/eclipse/asm/.classpath
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="asm-quick-bin"/>
+</classpath>
diff --git a/src/eclipse/asm/.project b/src/eclipse/asm/.project
new file mode 100644
index 0000000000..c9051389af
--- /dev/null
+++ b/src/eclipse/asm/.project
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>asm</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>src</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/asm</locationURI>
+ </link>
+ <link>
+ <name>asm-quick-bin</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/asm/classes</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
new file mode 100644
index 0000000000..3e2f55f48a
--- /dev/null
+++ b/src/eclipse/fjbg/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="fjbg"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="libs-classes-fjbg"/>
+</classpath>
diff --git a/project.SAMPLE b/src/eclipse/fjbg/.project
index 0034c397ed..8acea9f5fe 100644
--- a/project.SAMPLE
+++ b/src/eclipse/fjbg/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>scala</name>
+ <name>fjbg</name>
<comment></comment>
<projects>
</projects>
@@ -15,4 +15,16 @@
<nature>org.scala-ide.sdt.core.scalanature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
+ <linkedResources>
+ <link>
+ <name>fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
+ </link>
+ <link>
+ <name>libs-classes-fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
+ </link>
+ </linkedResources>
</projectDescription>
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
new file mode 100644
index 0000000000..2a764d5142
--- /dev/null
+++ b/src/eclipse/reflect/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="reflect"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-reflect"/>
+</classpath>
diff --git a/src/eclipse/reflect/.project b/src/eclipse/reflect/.project
new file mode 100644
index 0000000000..1e5cbb4ed9
--- /dev/null
+++ b/src/eclipse/reflect/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>reflect</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/reflect</locationURI>
+ </link>
+ <link>
+ <name>reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/reflect</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
new file mode 100644
index 0000000000..ff3b63f3ca
--- /dev/null
+++ b/src/eclipse/scala-compiler/.classpath
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry kind="output" path="build-quick-compiler"/>
+</classpath>
diff --git a/src/eclipse/scala-compiler/.project b/src/eclipse/scala-compiler/.project
new file mode 100644
index 0000000000..cf8a68c8b6
--- /dev/null
+++ b/src/eclipse/scala-compiler/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-compiler</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/compiler</locationURI>
+ </link>
+ <link>
+ <name>compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/compiler</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath
new file mode 100644
index 0000000000..a3a4933d34
--- /dev/null
+++ b/src/eclipse/scala-library/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="library"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-lib"/>
+</classpath>
diff --git a/src/eclipse/scala-library/.project b/src/eclipse/scala-library/.project
new file mode 100644
index 0000000000..049cf75e0b
--- /dev/null
+++ b/src/eclipse/scala-library/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-library</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/library</locationURI>
+ </link>
+ <link>
+ <name>library</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/library</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index eadacd9209..e475865391 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -9,6 +9,8 @@
package scala.collection
import scala.reflect.ClassTag
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
@@ -552,4 +554,21 @@ trait GenTraversableOnce[+A] extends Any {
* containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+
+ /** Converts this $coll to a Vector.
+ * $willNotTerminateInf
+ * @return a vector containing all elements of this $coll.
+ */
+ def toVector: Vector[A]
+
+ /** Converts this $coll into another by copying all elements.
+ * @tparam Col The collection type to build.
+ * @return a new collection containing all elements of this $coll.
+ *
+ * @usecase def convertTo[Col[_]]: Col[A]
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a new collection containing all elements of this $coll.
+ */
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index c842475590..e0c8b21d09 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -44,7 +44,7 @@ trait IterableViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends super[TraversableViewLike].Transformed[B] with Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index b2bbc8d888..5f369de3b7 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -11,6 +11,8 @@ package scala.collection
import mutable.ArrayBuffer
import annotation.migration
import immutable.Stream
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -1138,6 +1140,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toStream: Stream[A] =
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
+
/** Converts this iterator to a string.
*
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index f64045c9f6..73f5dda11c 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -40,7 +40,7 @@ trait SeqViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends super[IterableViewLike].AbstractTransformed[B] with Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 1bef703053..f9e95230ea 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -83,6 +83,14 @@ self =>
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
}
+ /** Adds a number of elements provided by a traversable object
+ * and returns a new collection with the added elements.
+ *
+ * @param xs the traversable object.
+ */
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 3716a318d9..e5861f5760 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -616,6 +616,13 @@ trait TraversableLike[+A, +Repr] extends Any
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
+ // Override to provide size hint.
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b.sizeHint(this)
+ b ++= thisCollection
+ b.result
+ }
/** Converts this $coll to a string.
*
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 386ce2d95a..8dc6184d88 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,6 +9,7 @@
package scala.collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import generic.CanBuildFrom
import annotation.unchecked.{ uncheckedVariance => uV }
import language.{implicitConversions, higherKinds}
import reflect.ClassTag
@@ -239,17 +240,25 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = (new ListBuffer[A] ++= seq).toList
+ def toList: List[A] = convertTo[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq() ++ seq
+ def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq]
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
+ def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
+ def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]]
+
+ def toVector: Vector[A] = convertTo[Vector]
+
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b ++= seq
+ b.result
+ }
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
val b = immutable.Map.newBuilder[T, U]
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index eb2091a5f3..bf4f8205d6 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -117,7 +117,7 @@ trait TraversableViewLike[+A,
}
/** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
- private[collection] abstract class AbstractTransformed[+B] extends Transformed[B]
+ private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B]
trait EmptyView extends Transformed[Nothing] with super.EmptyView
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 08e9125bd8..2d8217551a 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -473,7 +473,11 @@ extends CNodeBase[K, V] {
private def computeSize(ct: TrieMap[K, V]): Int = {
var i = 0
var sz = 0
- val offset = math.abs(util.Random.nextInt()) % array.length
+ val offset =
+ if (array.length > 0)
+ //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
+ scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ else 0
while (i < array.length) {
val pos = (i + offset) % array.length
array(pos) match {
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index 039a57041c..e895c94599 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -18,17 +18,17 @@ import scala.collection.mutable.{ Builder, MapBuilder }
private[immutable] object IntMapUtils extends BitOperations.Int {
def branchMask(i: Int, j: Int) = highestOneBit(i ^ j)
- def join[T](p1 : Int, t1 : IntMap[T], p2 : Int, t2 : IntMap[T]) : IntMap[T] = {
- val m = branchMask(p1, p2);
- val p = mask(p1, m);
+ def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
if (zero(p1, m)) IntMap.Bin(p, m, t1, t2)
- else IntMap.Bin(p, m, t2, t1);
+ else IntMap.Bin(p, m, t2, t1)
}
- def bin[T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) : IntMap[T] = (left, right) match {
- case (left, IntMap.Nil) => left;
- case (IntMap.Nil, right) => right;
- case (left, right) => IntMap.Bin(prefix, mask, left, right);
+ def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match {
+ case (left, IntMap.Nil) => left
+ case (IntMap.Nil, right) => right
+ case (left, right) => IntMap.Bin(prefix, mask, left, right)
}
}
@@ -50,9 +50,9 @@ object IntMap {
}
def empty[T] : IntMap[T] = IntMap.Nil;
- def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value);
- def apply[T](elems : (Int, T)*) : IntMap[T] =
- elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2));
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+ def apply[T](elems: (Int, T)*): IntMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
private[immutable] case object Nil extends IntMap[Nothing] {
// Important! Without this equals method in place, an infinite
@@ -66,15 +66,15 @@ object IntMap {
}
}
- private[immutable] case class Tip[+T](key : Int, value : T) extends IntMap[T]{
+ private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{
def withValue[S](s: S) =
- if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]];
- else IntMap.Tip(key, s);
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]]
+ else IntMap.Tip(key, s)
}
- private[immutable] case class Bin[+T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) extends IntMap[T]{
- def bin[S](left : IntMap[S], right : IntMap[S]) : IntMap[S] = {
- if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]];
- else IntMap.Bin[S](prefix, mask, left, right);
+ private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] {
+ def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]]
+ else IntMap.Bin[S](prefix, mask, left, right)
}
}
@@ -83,60 +83,60 @@ object IntMap {
import IntMap._
// Iterator over a non-empty IntMap.
-private[immutable] abstract class IntMapIterator[V, T](it : IntMap[V]) extends AbstractIterator[T] {
+private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] {
// Basically this uses a simple stack to emulate conversion over the tree. However
// because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and
// one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
// depth is 33 and
- var index = 0;
- var buffer = new Array[AnyRef](33);
+ var index = 0
+ var buffer = new Array[AnyRef](33)
def pop = {
- index -= 1;
- buffer(index).asInstanceOf[IntMap[V]];
+ index -= 1
+ buffer(index).asInstanceOf[IntMap[V]]
}
- def push(x : IntMap[V]) {
- buffer(index) = x.asInstanceOf[AnyRef];
- index += 1;
+ def push(x: IntMap[V]) {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
}
- push(it);
+ push(it)
/**
* What value do we assign to a tip?
*/
- def valueOf(tip : IntMap.Tip[V]) : T;
+ def valueOf(tip: IntMap.Tip[V]): T
- def hasNext = index != 0;
- final def next : T =
+ def hasNext = index != 0
+ final def next: T =
pop match {
case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => {
- push(right);
- valueOf(t);
+ push(right)
+ valueOf(t)
}
case IntMap.Bin(_, _, left, right) => {
- push(right);
- push(left);
- next;
+ push(right)
+ push(left)
+ next
}
- case t@IntMap.Tip(_, _) => valueOf(t);
+ case t@IntMap.Tip(_, _) => valueOf(t)
// This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap
// and don't return an IntMapIterator for IntMap.Nil.
- case IntMap.Nil => sys.error("Empty maps not allowed as subtrees");
+ case IntMap.Nil => sys.error("Empty maps not allowed as subtrees")
}
}
-private[immutable] class IntMapEntryIterator[V](it : IntMap[V]) extends IntMapIterator[V, (Int, V)](it){
- def valueOf(tip : IntMap.Tip[V]) = (tip.key, tip.value);
+private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) {
+ def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value)
}
-private[immutable] class IntMapValueIterator[V](it : IntMap[V]) extends IntMapIterator[V, V](it){
- def valueOf(tip : IntMap.Tip[V]) = tip.value
+private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.value
}
-private[immutable] class IntMapKeyIterator[V](it : IntMap[V]) extends IntMapIterator[V, Int](it){
- def valueOf(tip : IntMap.Tip[V]) = tip.key
+private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.key
}
import IntMap._
@@ -145,7 +145,7 @@ import IntMap._
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
* by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
*
- * Note: This class is as of 2.8 largely superseded by HashMap.
+ * '''Note:''' This class is as of 2.8 largely superseded by HashMap.
*
* @tparam T type of the values associated with integer keys.
*
@@ -155,17 +155,16 @@ import IntMap._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-sealed abstract class IntMap[+T]
-extends AbstractMap[Int, T]
+sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
with Map[Int, T]
with MapLike[Int, T, IntMap[T]] {
- override def empty: IntMap[T] = IntMap.Nil;
+ override def empty: IntMap[T] = IntMap.Nil
override def toList = {
- val buffer = new scala.collection.mutable.ListBuffer[(Int, T)];
- foreach(buffer += _);
- buffer.toList;
+ val buffer = new scala.collection.mutable.ListBuffer[(Int, T)]
+ foreach(buffer += _)
+ buffer.toList
}
/**
@@ -173,109 +172,112 @@ extends AbstractMap[Int, T]
*
* @return an iterator over pairs of integer keys and corresponding values.
*/
- def iterator : Iterator[(Int, T)] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapEntryIterator(this);
+ def iterator: Iterator[(Int, T)] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapEntryIterator(this)
}
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f : ((Int, T)) => U) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); }
- case IntMap.Tip(key, value) => f((key, value));
- case IntMap.Nil => {};
+ override final def foreach[U](f: ((Int, T)) => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
+ case IntMap.Tip(key, value) => f((key, value))
+ case IntMap.Nil =>
}
- override def keysIterator : Iterator[Int] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapKeyIterator(this);
+ override def keysIterator: Iterator[Int] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapKeyIterator(this)
}
/**
- * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
* be more efficient.
*
* @param f The loop body
*/
- final def foreachKey(f : Int => Unit) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); }
- case IntMap.Tip(key, _) => f(key);
- case IntMap.Nil => {}
+ final def foreachKey(f: Int => Unit): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case IntMap.Tip(key, _) => f(key)
+ case IntMap.Nil =>
}
- override def valuesIterator : Iterator[T] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapValueIterator(this);
+ override def valuesIterator: Iterator[T] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapValueIterator(this)
}
/**
- * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
* be more efficient.
*
* @param f The loop body
*/
- final def foreachValue(f : T => Unit) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); }
- case IntMap.Tip(_, value) => f(value);
- case IntMap.Nil => {};
+ final def foreachValue(f: T => Unit): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case IntMap.Tip(_, value) => f(value)
+ case IntMap.Nil =>
}
override def stringPrefix = "IntMap"
- override def isEmpty = this == IntMap.Nil;
+ override def isEmpty = this == IntMap.Nil
- override def filter(f : ((Int, T)) => Boolean) : IntMap[T] = this match {
+ override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match {
case IntMap.Bin(prefix, mask, left, right) => {
- val (newleft, newright) = (left.filter(f), right.filter(f));
- if ((left eq newleft) && (right eq newright)) this;
- else bin(prefix, mask, newleft, newright);
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
}
case IntMap.Tip(key, value) =>
if (f((key, value))) this
- else IntMap.Nil;
- case IntMap.Nil => IntMap.Nil;
+ else IntMap.Nil
+ case IntMap.Nil => IntMap.Nil
}
- def transform[S](f : (Int, T) => S) : IntMap[S] = this match {
- case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f));
- case t@IntMap.Tip(key, value) => t.withValue(f(key, value));
- case IntMap.Nil => IntMap.Nil;
+ def transform[S](f: (Int, T) => S): IntMap[S] = this match {
+ case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t@IntMap.Tip(key, value) => t.withValue(f(key, value))
+ case IntMap.Nil => IntMap.Nil
}
- final override def size : Int = this match {
- case IntMap.Nil => 0;
- case IntMap.Tip(_, _) => 1;
- case IntMap.Bin(_, _, left, right) => left.size + right.size;
+ final override def size: Int = this match {
+ case IntMap.Nil => 0
+ case IntMap.Tip(_, _) => 1
+ case IntMap.Bin(_, _, left, right) => left.size + right.size
}
- final def get(key : Int) : Option[T] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key);
- case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None;
- case IntMap.Nil => None;
+ final def get(key: Int): Option[T] = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case IntMap.Nil => None
}
- final override def getOrElse[S >: T](key : Int, default : =>S) : S = this match {
- case IntMap.Nil => default;
- case IntMap.Tip(key2, value) => if (key == key2) value else default;
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default);
+ final override def getOrElse[S >: T](key: Int, default: => S): S = this match {
+ case IntMap.Nil => default
+ case IntMap.Tip(key2, value) => if (key == key2) value else default
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
}
- final override def apply(key : Int) : T = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key);
- case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found");
- case IntMap.Nil => sys.error("key not found");
+ final override def apply(key: Int): T = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found")
+ case IntMap.Nil => sys.error("key not found")
}
def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2)
- override def updated[S >: T](key : Int, value : S) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
- else IntMap.Bin(prefix, mask, left, right.updated(key, value));
- case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, value);
- else join(key, IntMap.Tip(key, value), key2, this);
- case IntMap.Nil => IntMap.Tip(key, value);
+ override def updated[S >: T](key: Int, value: S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
+ else IntMap.Bin(prefix, mask, left, right.updated(key, value))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, value)
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
}
/**
@@ -284,7 +286,7 @@ extends AbstractMap[Int, T]
* Equivalent to:
* {{{
* this.get(key) match {
- * case None => this.update(key, value);
+ * case None => this.update(key, value)
* case Some(oldvalue) => this.update(key, f(oldvalue, value)
* }
* }}}
@@ -295,24 +297,26 @@ extends AbstractMap[Int, T]
* @param f The function used to resolve conflicts.
* @return The updated map.
*/
- def updateWith[S >: T](key : Int, value : S, f : (T, S) => S) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
- else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f));
- case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, f(value2, value));
- else join(key, IntMap.Tip(key, value), key2, this);
- case IntMap.Nil => IntMap.Tip(key, value);
+ def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, f(value2, value))
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
}
- def - (key : Int) : IntMap[T] = this match {
+ def - (key: Int): IntMap[T] = this match {
case IntMap.Bin(prefix, mask, left, right) =>
- if (!hasMatch(key, prefix, mask)) this;
- else if (zero(key, mask)) bin(prefix, mask, left - key, right);
- else bin(prefix, mask, left, right - key);
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
case IntMap.Tip(key2, _) =>
- if (key == key2) IntMap.Nil;
- else this;
- case IntMap.Nil => IntMap.Nil;
+ if (key == key2) IntMap.Nil
+ else this
+ case IntMap.Nil => IntMap.Nil
}
/**
@@ -324,7 +328,7 @@ extends AbstractMap[Int, T]
* @param f The transforming function.
* @return The modified map.
*/
- def modifyOrRemove[S](f : (Int, T) => Option[S]) : IntMap[S] = this match {
+ def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match {
case IntMap.Bin(prefix, mask, left, right) =>
val newleft = left.modifyOrRemove(f)
val newright = right.modifyOrRemove(f)
@@ -350,25 +354,25 @@ extends AbstractMap[Int, T]
* @param f The function used to resolve conflicts between two mappings.
* @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
- def unionWith[S >: T](that : IntMap[S], f : (Int, S, S) => S) : IntMap[S] = (this, that) match{
+ def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{
case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1);
- else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f));
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f))
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2);
- else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f));
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f))
}
else {
- if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
+ if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
}
- case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x));
- case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
- case (IntMap.Nil, x) => x;
- case (x, IntMap.Nil) => x;
+ case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x))
+ case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (IntMap.Nil, x) => x
+ case (x, IntMap.Nil) => x
}
/**
@@ -382,13 +386,13 @@ extends AbstractMap[Int, T]
* @param f The combining function.
* @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
- def intersectionWith[S, R](that : IntMap[S], f : (Int, T, S) => R) : IntMap[R] = (this, that) match {
+ def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match {
case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) =>
if (shorter(m1, m2)) {
if (!hasMatch(p2, p1, m1)) IntMap.Nil
else if (zero(p2, m1)) l1.intersectionWith(that, f)
else r1.intersectionWith(that, f)
- } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f));
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
else {
if (!hasMatch(p1, p2, m2)) IntMap.Nil
else if (zero(p1, m2)) this.intersectionWith(l2, f)
@@ -413,15 +417,16 @@ extends AbstractMap[Int, T]
* @param that The map to intersect with.
* @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
- def intersection[R](that : IntMap[R]) : IntMap[T] = this.intersectionWith(that, (key : Int, value : T, value2 : R) => value);
+ def intersection[R](that: IntMap[R]): IntMap[T] =
+ this.intersectionWith(that, (key: Int, value: T, value2: R) => value)
- def ++[S >: T](that : IntMap[S]) =
+ def ++[S >: T](that: IntMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
/**
* The entry with the lowest key value considered in unsigned order.
*/
- final def firstKey : Int = this match {
+ final def firstKey: Int = this match {
case Bin(_, _, l, r) => l.firstKey
case Tip(k, v) => k
case IntMap.Nil => sys.error("Empty set")
@@ -430,7 +435,7 @@ extends AbstractMap[Int, T]
/**
* The entry with the highest key value considered in unsigned order.
*/
- final def lastKey : Int = this match {
+ final def lastKey: Int = this match {
case Bin(_, _, l, r) => r.lastKey
case Tip(k, v) => k
case IntMap.Nil => sys.error("Empty set")
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 8a316f37de..002027b162 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -18,17 +18,17 @@ import scala.collection.mutable.{ Builder, MapBuilder }
private[immutable] object LongMapUtils extends BitOperations.Long {
def branchMask(i: Long, j: Long) = highestOneBit(i ^ j)
- def join[T](p1 : Long, t1 : LongMap[T], p2 : Long, t2 : LongMap[T]) : LongMap[T] = {
- val m = branchMask(p1, p2);
- val p = mask(p1, m);
+ def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
if (zero(p1, m)) LongMap.Bin(p, m, t1, t2)
- else LongMap.Bin(p, m, t2, t1);
+ else LongMap.Bin(p, m, t2, t1)
}
- def bin[T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) : LongMap[T] = (left, right) match {
- case (left, LongMap.Nil) => left;
- case (LongMap.Nil, right) => right;
- case (left, right) => LongMap.Bin(prefix, mask, left, right);
+ def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match {
+ case (left, LongMap.Nil) => left
+ case (LongMap.Nil, right) => right
+ case (left, right) => LongMap.Bin(prefix, mask, left, right)
}
}
@@ -49,29 +49,29 @@ object LongMap {
def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B])
}
- def empty[T] : LongMap[T] = LongMap.Nil;
- def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value);
- def apply[T](elems : (Long, T)*) : LongMap[T] =
- elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2));
+ def empty[T]: LongMap[T] = LongMap.Nil
+ def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value)
+ def apply[T](elems: (Long, T)*): LongMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
private[immutable] case object Nil extends LongMap[Nothing] {
// Important, don't remove this! See IntMap for explanation.
override def equals(that : Any) = that match {
- case (that : AnyRef) if (this eq that) => true;
- case (that : LongMap[_]) => false; // The only empty LongMaps are eq Nil
- case that => super.equals(that);
+ case (that: AnyRef) if (this eq that) => true
+ case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil
+ case that => super.equals(that)
}
- };
+ }
- private[immutable] case class Tip[+T](key : Long, value : T) extends LongMap[T]{
- def withValue[S](s : S) =
- if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]];
- else LongMap.Tip(key, s);
+ private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] {
+ def withValue[S](s: S) =
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]]
+ else LongMap.Tip(key, s)
}
- private[immutable] case class Bin[+T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) extends LongMap[T]{
- def bin[S](left : LongMap[S], right : LongMap[S]) : LongMap[S] = {
- if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]];
- else LongMap.Bin[S](prefix, mask, left, right);
+ private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] {
+ def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]]
+ else LongMap.Bin[S](prefix, mask, left, right)
}
}
}
@@ -79,64 +79,62 @@ object LongMap {
import LongMap._
// Iterator over a non-empty LongMap.
-private[immutable] abstract class LongMapIterator[V, T](it : LongMap[V]) extends AbstractIterator[T] {
+private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
// Basically this uses a simple stack to emulate conversion over the tree. However
// because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and
// one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
// depth is 65
- var index = 0;
- var buffer = new Array[AnyRef](65);
+ var index = 0
+ var buffer = new Array[AnyRef](65)
def pop() = {
- index -= 1;
- buffer(index).asInstanceOf[LongMap[V]];
+ index -= 1
+ buffer(index).asInstanceOf[LongMap[V]]
}
- def push(x : LongMap[V]) {
- buffer(index) = x.asInstanceOf[AnyRef];
- index += 1;
+ def push(x: LongMap[V]) {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
}
push(it);
/**
* What value do we assign to a tip?
*/
- def valueOf(tip : LongMap.Tip[V]) : T;
+ def valueOf(tip: LongMap.Tip[V]): T
- def hasNext = index != 0;
- final def next : T =
+ def hasNext = index != 0
+ final def next: T =
pop() match {
case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => {
- push(right);
- valueOf(t);
+ push(right)
+ valueOf(t)
}
case LongMap.Bin(_, _, left, right) => {
- push(right);
- push(left);
- next;
+ push(right)
+ push(left)
+ next
}
- case t@LongMap.Tip(_, _) => valueOf(t);
+ case t@LongMap.Tip(_, _) => valueOf(t)
// This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap
// and don't return an LongMapIterator for LongMap.Nil.
- case LongMap.Nil => sys.error("Empty maps not allowed as subtrees");
+ case LongMap.Nil => sys.error("Empty maps not allowed as subtrees")
}
}
-private[immutable] class LongMapEntryIterator[V](it : LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
- def valueOf(tip : LongMap.Tip[V]) = (tip.key, tip.value);
+private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
+ def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value)
}
-private[immutable] class LongMapValueIterator[V](it : LongMap[V]) extends LongMapIterator[V, V](it){
- def valueOf(tip : LongMap.Tip[V]) = tip.value;
+private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.value
}
-private[immutable] class LongMapKeyIterator[V](it : LongMap[V]) extends LongMapIterator[V, Long](it){
- def valueOf(tip : LongMap.Tip[V]) = tip.key;
+private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.key
}
-import LongMap._;
-
/**
* Specialised immutable map structure for long keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
@@ -157,12 +155,12 @@ extends AbstractMap[Long, T]
with Map[Long, T]
with MapLike[Long, T, LongMap[T]] {
- override def empty: LongMap[T] = LongMap.Nil;
+ override def empty: LongMap[T] = LongMap.Nil
override def toList = {
- val buffer = new scala.collection.mutable.ListBuffer[(Long, T)];
- foreach(buffer += _);
- buffer.toList;
+ val buffer = new scala.collection.mutable.ListBuffer[(Long, T)]
+ foreach(buffer += _)
+ buffer.toList
}
/**
@@ -171,22 +169,22 @@ extends AbstractMap[Long, T]
* @return an iterator over pairs of long keys and corresponding values.
*/
def iterator: Iterator[(Long, T)] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapEntryIterator(this);
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapEntryIterator(this)
}
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f : ((Long, T)) => U) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); }
+ override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
case LongMap.Tip(key, value) => f((key, value));
- case LongMap.Nil => {};
+ case LongMap.Nil =>
}
- override def keysIterator : Iterator[Long] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapKeyIterator(this);
+ override def keysIterator: Iterator[Long] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapKeyIterator(this)
}
/**
@@ -195,15 +193,15 @@ extends AbstractMap[Long, T]
*
* @param f The loop body
*/
- final def foreachKey(f : Long => Unit) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); }
- case LongMap.Tip(key, _) => f(key);
- case LongMap.Nil => {}
+ final def foreachKey(f: Long => Unit): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case LongMap.Tip(key, _) => f(key)
+ case LongMap.Nil =>
}
- override def valuesIterator : Iterator[T] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapValueIterator(this);
+ override def valuesIterator: Iterator[T] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapValueIterator(this)
}
/**
@@ -212,67 +210,70 @@ extends AbstractMap[Long, T]
*
* @param f The loop body
*/
- final def foreachValue(f : T => Unit) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); }
- case LongMap.Tip(_, value) => f(value);
- case LongMap.Nil => {};
+ final def foreachValue(f: T => Unit): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case LongMap.Tip(_, value) => f(value)
+ case LongMap.Nil =>
}
override def stringPrefix = "LongMap"
- override def isEmpty = this == LongMap.Nil;
+ override def isEmpty = this == LongMap.Nil
- override def filter(f : ((Long, T)) => Boolean) : LongMap[T] = this match {
+ override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
- val (newleft, newright) = (left.filter(f), right.filter(f));
- if ((left eq newleft) && (right eq newright)) this;
- else bin(prefix, mask, newleft, newright);
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
}
case LongMap.Tip(key, value) =>
if (f((key, value))) this
- else LongMap.Nil;
- case LongMap.Nil => LongMap.Nil;
+ else LongMap.Nil
+ case LongMap.Nil => LongMap.Nil
}
- def transform[S](f : (Long, T) => S) : LongMap[S] = this match {
- case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f));
- case t@LongMap.Tip(key, value) => t.withValue(f(key, value));
- case LongMap.Nil => LongMap.Nil;
+ def transform[S](f: (Long, T) => S): LongMap[S] = this match {
+ case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t@LongMap.Tip(key, value) => t.withValue(f(key, value))
+ case LongMap.Nil => LongMap.Nil
}
- final override def size : Int = this match {
- case LongMap.Nil => 0;
- case LongMap.Tip(_, _) => 1;
- case LongMap.Bin(_, _, left, right) => left.size + right.size;
+ final override def size: Int = this match {
+ case LongMap.Nil => 0
+ case LongMap.Tip(_, _) => 1
+ case LongMap.Bin(_, _, left, right) => left.size + right.size
}
- final def get(key : Long) : Option[T] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key);
- case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None;
- case LongMap.Nil => None;
+ final def get(key: Long): Option[T] = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case LongMap.Nil => None
}
- final override def getOrElse[S >: T](key : Long, default : =>S) : S = this match {
- case LongMap.Nil => default;
- case LongMap.Tip(key2, value) => if (key == key2) value else default;
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default);
+ final override def getOrElse[S >: T](key: Long, default: => S): S = this match {
+ case LongMap.Nil => default
+ case LongMap.Tip(key2, value) => if (key == key2) value else default
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
}
- final override def apply(key : Long) : T = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key);
- case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found");
- case LongMap.Nil => sys.error("key not found");
+ final override def apply(key: Long): T = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found")
+ case LongMap.Nil => sys.error("key not found")
}
def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2)
- override def updated[S >: T](key : Long, value : S) : LongMap[S] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
- else LongMap.Bin(prefix, mask, left, right.updated(key, value));
- case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, value);
- else join(key, LongMap.Tip(key, value), key2, this);
- case LongMap.Nil => LongMap.Tip(key, value);
+ override def updated[S >: T](key: Long, value: S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
+ else LongMap.Bin(prefix, mask, left, right.updated(key, value))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, value)
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
}
/**
@@ -281,7 +282,7 @@ extends AbstractMap[Long, T]
* Equivalent to
* {{{
* this.get(key) match {
- * case None => this.update(key, value);
+ * case None => this.update(key, value)
* case Some(oldvalue) => this.update(key, f(oldvalue, value)
* }
* }}}
@@ -292,24 +293,26 @@ extends AbstractMap[Long, T]
* @param f The function used to resolve conflicts.
* @return The updated map.
*/
- def updateWith[S >: T](key : Long, value : S, f : (T, S) => S) : LongMap[S] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
- else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f));
- case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, f(value2, value));
- else join(key, LongMap.Tip(key, value), key2, this);
- case LongMap.Nil => LongMap.Tip(key, value);
+ def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, f(value2, value))
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
}
- def -(key : Long) : LongMap[T] = this match {
+ def -(key: Long): LongMap[T] = this match {
case LongMap.Bin(prefix, mask, left, right) =>
- if (!hasMatch(key, prefix, mask)) this;
- else if (zero(key, mask)) bin(prefix, mask, left - key, right);
- else bin(prefix, mask, left, right - key);
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
case LongMap.Tip(key2, _) =>
- if (key == key2) LongMap.Nil;
- else this;
- case LongMap.Nil => LongMap.Nil;
+ if (key == key2) LongMap.Nil
+ else this
+ case LongMap.Nil => LongMap.Nil
}
/**
@@ -321,21 +324,21 @@ extends AbstractMap[Long, T]
* @param f The transforming function.
* @return The modified map.
*/
- def modifyOrRemove[S](f : (Long, T) => Option[S]) : LongMap[S] = this match {
+ def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
- val newleft = left.modifyOrRemove(f);
- val newright = right.modifyOrRemove(f);
- if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]];
+ val newleft = left.modifyOrRemove(f)
+ val newright = right.modifyOrRemove(f)
+ if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]]
else bin(prefix, mask, newleft, newright)
}
case LongMap.Tip(key, value) => f(key, value) match {
- case None => LongMap.Nil;
+ case None => LongMap.Nil
case Some(value2) =>
//hack to preserve sharing
if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]]
- else LongMap.Tip(key, value2);
+ else LongMap.Tip(key, value2)
}
- case LongMap.Nil => LongMap.Nil;
+ case LongMap.Nil => LongMap.Nil
}
/**
@@ -346,25 +349,25 @@ extends AbstractMap[Long, T]
* @param f The function used to resolve conflicts between two mappings.
* @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
- def unionWith[S >: T](that : LongMap[S], f : (Long, S, S) => S) : LongMap[S] = (this, that) match{
+ def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{
case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1);
- else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f));
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f))
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
- else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2);
- else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f));
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f))
}
else {
- if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join[S](p1, this, p2, that); // TODO: remove [S] when SI-5548 is fixed
+ if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
}
- case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)); // TODO: remove [S] when SI-5548 is fixed
- case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
- case (LongMap.Nil, x) => x;
- case (x, LongMap.Nil) => x;
+ case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) // TODO: remove [S] when SI-5548 is fixed
+ case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (LongMap.Nil, x) => x
+ case (x, LongMap.Nil) => x
}
/**
@@ -378,27 +381,27 @@ extends AbstractMap[Long, T]
* @param f The combining function.
* @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
- def intersectionWith[S, R](that : LongMap[S], f : (Long, T, S) => R) : LongMap[R] = (this, that) match {
+ def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match {
case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) LongMap.Nil;
- else if (zero(p2, m1)) l1.intersectionWith(that, f);
- else r1.intersectionWith(that, f);
- } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f));
+ if (!hasMatch(p2, p1, m1)) LongMap.Nil
+ else if (zero(p2, m1)) l1.intersectionWith(that, f)
+ else r1.intersectionWith(that, f)
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
else {
- if (!hasMatch(p1, p2, m2)) LongMap.Nil;
- else if (zero(p1, m2)) this.intersectionWith(l2, f);
- else this.intersectionWith(r2, f);
+ if (!hasMatch(p1, p2, m2)) LongMap.Nil
+ else if (zero(p1, m2)) this.intersectionWith(l2, f)
+ else this.intersectionWith(r2, f)
}
case (LongMap.Tip(key, value), that) => that.get(key) match {
- case None => LongMap.Nil;
- case Some(value2) => LongMap.Tip(key, f(key, value, value2));
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value, value2))
}
case (_, LongMap.Tip(key, value)) => this.get(key) match {
- case None => LongMap.Nil;
- case Some(value2) => LongMap.Tip(key, f(key, value2, value));
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value2, value))
}
- case (_, _) => LongMap.Nil;
+ case (_, _) => LongMap.Nil
}
/**
@@ -409,9 +412,10 @@ extends AbstractMap[Long, T]
* @param that The map to intersect with.
* @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
- def intersection[R](that : LongMap[R]) : LongMap[T] = this.intersectionWith(that, (key : Long, value : T, value2 : R) => value);
+ def intersection[R](that: LongMap[R]): LongMap[T] =
+ this.intersectionWith(that, (key: Long, value: T, value2: R) => value)
- def ++[S >: T](that : LongMap[S]) =
+ def ++[S >: T](that: LongMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
}
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 0f28c4997b..4b573511d1 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -43,7 +43,7 @@ object RedBlackTree {
}
def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
- def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v))
+ def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
case (Some(from), Some(until)) => this.range(tree, from, until)
@@ -122,17 +122,18 @@ object RedBlackTree {
else
mkTree(isBlack, x, xv, a, r)
}
- private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
+ private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
RedTree(k, v, null, null)
} else {
val cmp = ordering.compare(k, tree.key)
- if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v), tree.right)
- else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v))
- else mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v, overwrite), tree.right)
+ else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v, overwrite))
+ else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
}
- // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
- // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
+ /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ * http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
if (isRedTree(tr)) {
@@ -216,7 +217,7 @@ object RedBlackTree {
if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
val newLeft = doFrom(tree.left, from)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -224,7 +225,7 @@ object RedBlackTree {
if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
val newRight = doTo(tree.right, to)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -232,7 +233,7 @@ object RedBlackTree {
if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
val newRight = doUntil(tree.right, until)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -242,8 +243,8 @@ object RedBlackTree {
val newLeft = doFrom(tree.left, from)
val newRight = doUntil(tree.right, until)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value);
- else if (newRight eq null) upd(newLeft, tree.key, tree.value);
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
else rebalance(tree, newLeft, newRight)
}
@@ -254,7 +255,7 @@ object RedBlackTree {
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -264,7 +265,7 @@ object RedBlackTree {
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
@@ -275,8 +276,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value)
- else if (newRight eq null) upd(newLeft, tree.key, tree.value)
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 4c1a5f2e03..51bc76efc3 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -131,7 +131,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value))
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -171,7 +171,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
assert(!RB.contains(tree, key))
- new TreeMap(RB.update(tree, key, value))
+ new TreeMap(RB.update(tree, key, value, true))
}
def - (key:A): TreeMap[A, B] =
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 882e828c5b..697da2bc4b 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -112,7 +112,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, ()))
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -122,7 +122,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
*/
def insert(elem: A): TreeSet[A] = {
assert(!RB.contains(tree, elem))
- newSet(RB.update(tree, elem, ()))
+ newSet(RB.update(tree, elem, (), false))
}
/** Creates a new `TreeSet` with the entry removed.
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1395a8f52d..d100bf93df 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -77,6 +77,8 @@ override def companion: GenericCompanion[Vector] = Vector
override def par = new ParVector(this)
+ override def toVector: Vector[A] = this
+
override def lengthCompare(len: Int): Int = length - len
private[collection] final def initIterator[B >: A](s: VectorIterator[B]) {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 01636eb54e..7a595f211d 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -64,7 +64,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 4150cf9eba..5643e070f8 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -49,7 +49,8 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
with Map[A, B]
with MapLike[A, B, LinkedHashMap[A, B]]
with HashTable[A, LinkedEntry[A, B]]
- with Serializable {
+ with Serializable
+{
override def empty = LinkedHashMap.empty[A, B]
override def size = tableSize
@@ -107,7 +108,25 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
else Iterator.empty.next
}
+
+ protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
+ protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
+
+ protected class DefaultKeySet extends super.DefaultKeySet {
+ override def empty = LinkedHashSet.empty
+ }
+
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet
+
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
private var cur = firstEntry
def hasNext = cur ne null
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a447f1b5e4..815253b537 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -841,7 +841,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
- override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T]
+ override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]]
override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]]
@@ -850,7 +850,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
+
+ override def toVector: Vector[T] = convertTo[Vector]
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
+ toParCollection[T, Col[T]](() => cbf().asCombiner)
+ } else seq.convertTo(cbf)
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index d8c42d74b0..349f4fa44c 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -34,6 +34,7 @@ extends collection/*.immutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ece663a1d..e4099f1809 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -62,6 +62,8 @@ extends ParSeq[T]
override def seq: Vector[T] = vector
+ override def toVector: Vector[T] = vector
+
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 700d21d0bb..b5747a31cf 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -29,7 +29,8 @@ import scala.collection.GenIterable
trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]] {
+ with ParIterableLike[T, ParIterable[T], Iterable[T]]
+ with Mutable {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
//protected[this] override def newBuilder = ParIterable.newBuilder[T]
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 292014706d..5a6d95c2ed 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -53,8 +53,6 @@ class SyncVar[A] {
value
}
- /** Waits for this SyncVar to become defined and returns
- * the result */
def take(): A = synchronized {
try get
finally unsetVal()
@@ -66,8 +64,7 @@ class SyncVar[A] {
* the SyncVar.
*
* @param timeout the amount of milliseconds to wait, 0 means forever
- * @return the value or a throws an exception if the timeout occurs
- * @throws NoSuchElementException on timeout
+ * @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise
*/
def take(timeout: Long): A = synchronized {
try get(timeout).get
@@ -75,28 +72,25 @@ class SyncVar[A] {
}
// TODO: this method should be private
- // [Heather] the reason why: it doesn't take into consideration
+ // [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
@deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
def set(x: A): Unit = setVal(x)
- /** Places a value in the SyncVar. If the SyncVar already has a stored value,
- * it waits until another thread takes it */
def put(x: A): Unit = synchronized {
while (isDefined) wait()
setVal(x)
}
- /** Checks whether a value is stored in the synchronized variable */
def isSet: Boolean = synchronized {
isDefined
}
// TODO: this method should be private
- // [Heather] the reason why: it doesn't take into consideration
+ // [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, unset has been
- // deprecated in order to eventually be able to make "unsetting" private
+ // deprecated in order to eventually be able to make "unsetting" private
@deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
def unset(): Unit = synchronized {
isDefined = false
@@ -104,7 +98,7 @@ class SyncVar[A] {
notifyAll()
}
- // `setVal` exists so as to retroactively deprecate `set` without
+ // `setVal` exists so as to retroactively deprecate `set` without
// deprecation warnings where we use `set` internally. The
// implementation of `set` was moved to `setVal` to achieve this
private def setVal(x: A): Unit = synchronized {
@@ -113,13 +107,13 @@ class SyncVar[A] {
notifyAll()
}
- // `unsetVal` exists so as to retroactively deprecate `unset` without
+ // `unsetVal` exists so as to retroactively deprecate `unset` without
// deprecation warnings where we use `unset` internally. The
// implementation of `unset` was moved to `unsetVal` to achieve this
private def unsetVal(): Unit = synchronized {
isDefined = false
value = None
- notifyAll()
+ notifyAll()
}
}
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index f753dfbcbb..860e7bac28 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -52,7 +52,7 @@ trait ClassTag[T] extends Equals with Serializable {
* `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
* is uncheckable, but we have an instance of `ClassTag[T]`.
*/
- def unapply(x: Any): Option[T] = if (runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
+ def unapply(x: Any): Option[T] = if (x != null && runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
/** case class accessories */
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 461eaa2e9e..490a9e8c03 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -451,7 +451,7 @@ class Base extends Universe { self =>
}
}
- def show(tree: Tree) = s"<tree ${tree.getClass}>"
+ def treeToString(tree: Tree) = s"<tree ${tree.getClass}>"
trait TermTree extends Tree
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
index 298d229570..2814450ae3 100644
--- a/src/library/scala/reflect/base/Trees.scala
+++ b/src/library/scala/reflect/base/Trees.scala
@@ -28,11 +28,11 @@ trait Trees { self: Universe =>
def isType: Boolean
/** Obtains string representation of a tree */
- override def toString: String = show(this)
+ override def toString: String = treeToString(this)
}
/** Obtains string representation of a tree */
- def show(tree: Tree): String
+ protected def treeToString(tree: Tree): String
/** Tree is the basis for scala's abstract syntax. The nodes are
* implemented as case classes, and the parameters which initialize
diff --git a/src/library/scala/reflect/compat.scala b/src/library/scala/reflect/compat.scala
deleted file mode 100644
index fc0e5fbf9c..0000000000
--- a/src/library/scala/reflect/compat.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-// [Eugene++] delete this once we merge with trunk and have a working IDE
-
-package scala.reflect {
- trait ArrayTag[T]
- trait ErasureTag[T]
- trait ConcreteTypeTag[T]
-}
-
-package scala.reflect.api {
- trait TypeTags {
- trait TypeTag[T]
- trait ConcreteTypeTag[T]
- }
-}
-
-package scala {
- import scala.reflect.base.{Universe => BaseUniverse}
-
- trait reflect_compat {
- lazy val mirror: BaseUniverse = ???
- }
-}
-
-package scala.reflect {
- import language.experimental.macros
- import scala.reflect.base.{Universe => BaseUniverse}
-
- trait internal_compat {
- private[scala] def materializeArrayTag[T](u: BaseUniverse): ArrayTag[T] = ???
- private[scala] def materializeErasureTag[T](u: BaseUniverse): ErasureTag[T] = ???
- private[scala] def materializeConcreteTypeTag[T](u: BaseUniverse): ConcreteTypeTag[T] = ???
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/makro/internal/package.scala b/src/library/scala/reflect/makro/internal/package.scala
index d31a0f0d14..78cb0ffb10 100644
--- a/src/library/scala/reflect/makro/internal/package.scala
+++ b/src/library/scala/reflect/makro/internal/package.scala
@@ -9,7 +9,7 @@ import scala.reflect.base.{Universe => BaseUniverse}
//
// todo. once we have implicit macros for tag generation, we can remove these anchors
// [Eugene++] how do I hide this from scaladoc?
-package object internal extends scala.reflect.internal_compat {
+package object internal {
private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = macro ???
private[scala] def materializeAbsTypeTag[T](u: BaseUniverse): u.AbsTypeTag[T] = macro ???
private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = macro ???
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 0ee58df2cd..2ebc82875e 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,6 +1,6 @@
package scala
-package object reflect extends reflect_compat {
+package object reflect {
lazy val basis: base.Universe = new base.Base
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index d7f5a57f50..accda5b8f7 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -41,8 +41,8 @@ class Breaks {
}
}
- trait TryBlock {
- def catchBreak(onBreak: => Unit): Unit
+ sealed trait TryBlock[T] {
+ def catchBreak(onBreak: =>T): T
}
/**
@@ -57,8 +57,8 @@ class Breaks {
* }
* }}}
*/
- def tryBreakable(op: => Unit) = new TryBlock {
- def catchBreak(onBreak: => Unit) = try {
+ def tryBreakable[T](op: =>T) = new TryBlock[T] {
+ def catchBreak(onBreak: =>T) = try {
op
} catch {
case ex: BreakControl =>
diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala
index 8cbe3064ef..64afb1f10f 100644
--- a/src/library/scala/util/control/ControlThrowable.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -24,8 +24,9 @@ package scala.util.control
* try {
* // Body might throw arbitrarily
* } catch {
- * case ce : ControlThrowable => throw ce // propagate
- * case t : Exception => log(t) // log and suppress
+ * case c: ControlThrowable => throw c // propagate
+ * case t: Exception => log(t) // log and suppress
+ * }
* }}}
*
* @author Miles Sabin
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index 2223a6db0f..2aa9a99054 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -336,7 +336,6 @@ import ILGenerator._
emitSpecialLabel(Label.Try)
val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
excStack.push(Label.Try, endExc)
- return endExc
}
/** Begins a catch block. */
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
index de5354d4a0..142f2baea5 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala
@@ -81,9 +81,9 @@ abstract class ScaladocModelTest extends DirectTest {
private[this] var settings: Settings = null
// create a new scaladoc compiler
- private[this] def newDocFactory: DocFactory = {
+ def newDocFactory: DocFactory = {
settings = new Settings(_ => ())
- settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
+ settings.reportModel = false // yaay, no more "model contains X documentable templates"!
val args = extraSettings + " " + scaladocSettings
val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
val docFact = new DocFactory(new ConsoleReporter(settings), settings)
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
new file mode 100644
index 0000000000..7f4ff8a7fb
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -0,0 +1,94 @@
+package scala.reflect
+package api
+
+import java.io.{ PrintWriter, StringWriter }
+
+trait Printers { self: Universe =>
+
+ trait TreePrinter {
+ def print(args: Any*)
+ protected var printTypes = false
+ protected var printIds = false
+ protected var printKinds = false
+ def withTypes: this.type = { printTypes = true; this }
+ def withoutTypes: this.type = { printTypes = false; this }
+ def withIds: this.type = { printIds = true; this }
+ def withoutIds: this.type = { printIds = false; this }
+ def withKinds: this.type = { printKinds = true; this }
+ def withoutKinds: this.type = { printKinds = false; this }
+ }
+
+ case class BooleanFlag(val value: Option[Boolean])
+ object BooleanFlag {
+ import language.implicitConversions
+ implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
+ implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
+ }
+
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String = {
+ val buffer = new StringWriter()
+ val writer = new PrintWriter(buffer)
+ var printer = mkPrinter(writer)
+ printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
+ printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
+ printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
+ printer.print(what)
+ writer.flush()
+ buffer.toString
+ }
+
+ /** By default trees are printed with `show` */
+ override protected def treeToString(tree: Tree) = show(tree)
+
+ /** Renders a prettified representation of a tree.
+ * Typically it looks very close to the Scala code it represents.
+ * This function is used in Tree.toString.
+ */
+ def show(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
+ render(tree, newTreePrinter(_), printTypes, printIds, printKinds)
+
+ /** Hook to define what `show(tree)` means.
+ */
+ def newTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders internal structure of a tree.
+ */
+ def showRaw(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
+ render(tree, newRawTreePrinter(_), printTypes, printIds, printKinds)
+
+ /** Hook to define what `showRaw(tree)` means.
+ */
+ def newRawTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders a prettified representation of a symbol.
+ */
+ def show(sym: Symbol): String = sym.toString
+
+ /** Renders internal structure of a symbol.
+ */
+ def showRaw(sym: Symbol): String = render(sym, newRawTreePrinter(_))
+
+ /** Renders a prettified representation of a type.
+ */
+ def show(tpe: Type): String = tpe.toString
+
+ /** Renders internal structure of a type.
+ */
+ def showRaw(tpe: Type): String = render(tpe, newRawTreePrinter(_))
+
+ /** Renders a prettified representation of a name.
+ */
+ def show(name: Name): String
+
+ /** Renders internal structure of a name.
+ */
+ def showRaw(name: Name): String = name.toString
+
+ /** Renders a prettified representation of a flag set.
+ */
+ def show(flags: FlagSet): String
+
+ /** Renders internal structure of a flag set.
+ */
+ def showRaw(flags: FlagSet): String = flags.toString
+}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 1d266dc778..eb9921a31a 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -116,11 +116,28 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isValue: Boolean
+ /** Does this symbol denote a stable value? */
+ def isStable: Boolean
+
/** Does this symbol represent a mutable value?
* If yes, `isTerm` and `isValue` are also guaranteed to be true.
*/
def isVariable: Boolean
+ /** Does this symbol represent a getter or a setter?
+ */
+ def isAccessor: Boolean
+
+ /** Does this symbol represent a getter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isGetter: Boolean
+
+ /** Does this symbol represent a setter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isSetter: Boolean
+
/** Does this symbol represent the definition of a package?
* If yes, `isTerm` is also guaranteed to be true.
*/
@@ -177,6 +194,25 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isErroneous : Boolean
+ /** Can this symbol be loaded by a reflective mirror?
+ *
+ * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
+ * Such annotations (also called "pickles") are applied on top-level classes and include information
+ * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
+ * are typically unreachable and information about them gets lost.
+ *
+ * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
+ * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
+ */
+ def isLocatable: Boolean
+
+ /** Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ */
+ def isStatic: Boolean
+
/** The type signature of this symbol seen as a member of given type `site`.
*/
def typeSignatureIn(site: Type): Type
diff --git a/src/reflect/scala/reflect/api/TreePrinters.scala b/src/reflect/scala/reflect/api/TreePrinters.scala
deleted file mode 100644
index 08a08e7b90..0000000000
--- a/src/reflect/scala/reflect/api/TreePrinters.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.reflect
-package api
-
-import java.io.{ PrintWriter, StringWriter }
-
-trait TreePrinters { self: Universe =>
-
- trait TreePrinter {
- def print(args: Any*)
- protected var typesPrinted = false
- protected var uniqueIds = false
- def withTypesPrinted: this.type = { typesPrinted = true; this }
- def withUniqueIds: this.type = { uniqueIds = true; this }
- }
-
- def show(tree: Tree): String = show(tree, newTreePrinter)
-
- def show(tree: Tree, mkPrinter: PrintWriter => TreePrinter): String = {
- val buffer = new StringWriter()
- val writer = new PrintWriter(buffer)
- val printer = mkPrinter(writer)
- printer.print(tree)
- writer.flush()
- buffer.toString
- }
-
- def showRaw(tree: Tree): String = show(tree, new RawTreePrinter(_))
-
- /** Hook to define what `show(tree)` means.
- */
- def newTreePrinter(out: PrintWriter): TreePrinter
-
- // emits more or less verbatim representation of the provided tree
- // [Eugene] todo. needs to be refined
- // http://groups.google.com/group/scala-user/browse_thread/thread/de5a5be2e083cf8e
- class RawTreePrinter(out: PrintWriter) extends TreePrinter {
- def print(args: Any*): Unit = args foreach {
- case EmptyTree =>
- print("EmptyTree")
- case tree @ TypeTree() =>
- print("TypeTree()")
- if (tree.tpe != null)
- print(".setType(", tree.tpe, ")")
- else if (tree.original != null)
- print(".setOriginal(", tree.original, ")")
- case Literal(Constant(s: String)) =>
- print("Literal(Constant(\"" + s + "\"))")
- case tree: Tree =>
- print(tree.productPrefix+"(")
- val it = tree.productIterator
- while (it.hasNext) {
- it.next() match {
- case name: Name if uniqueIds && tree.hasSymbol && tree.symbol != NoSymbol =>
- print(tree.symbol.name, "#", tree.symbol.id)
- case arg =>
- print(arg)
- }
- print(if (it.hasNext) ", " else "")
- }
- print(")")
- if (typesPrinted)
- print(".setType(", tree.tpe, ")")
- case list: List[_] =>
- print("List(")
- val it = list.iterator
- while (it.hasNext) {
- print(it.next())
- print(if (it.hasNext) ", " else "")
- }
- print(")")
- case mods: Modifiers =>
- val parts = collection.mutable.ListBuffer[String]()
- parts += mods.flagString
- if (mods.privateWithin.toString.nonEmpty)
- parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
- if (mods.annotations.nonEmpty)
- parts += mods.annotations map showRaw mkString ("List(", ", ", ")")
- print(parts mkString ("Modifiers(", ", ", ")"))
- case name: Name =>
- if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
- print(name.toString)
- print("\")")
- case arg =>
- out.print(arg)
- }
- }
-}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index b62a92cbd7..b797c71f6d 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -121,7 +121,7 @@ trait Types extends base.Types { self: Universe =>
* class C extends p.D[Int]
* T.asSeenFrom(ThisType(C), D) (where D is owner of m)
* = Int
- * }}}
+ * }}}
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type
@@ -171,6 +171,15 @@ trait Types extends base.Types { self: Universe =>
*/
def widen: Type
+ /** Map to a singleton type which is a subtype of this type.
+ * The fallback implemented here gives:
+ * {{{
+ * T.narrow = (T {}).this.type
+ * }}}
+ * Overridden where we know more about where types come from.
+ */
+ def narrow: Type
+
/** The string discriminator of this type; useful for debugging */
def kind: String
}
@@ -365,4 +374,3 @@ trait Types extends base.Types { self: Universe =>
*/
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type
}
-
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 002cd2e673..85d8adc44f 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -9,7 +9,7 @@ abstract class Universe extends base.Universe
with FlagSets
with Names
with Trees
- with TreePrinters
+ with Printers
with Constants
with Positions
with Mirrors
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 5f78671012..fa758edf05 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -8,7 +8,7 @@ package internal
// todo implement in terms of BitSet
import scala.collection.{ mutable, immutable }
import math.max
-import util.Statistics._
+import util.Statistics
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
@@ -28,6 +28,7 @@ import util.Statistics._
trait BaseTypeSeqs {
this: SymbolTable =>
import definitions._
+ import BaseTypeSeqsStats._
protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
@@ -38,8 +39,8 @@ trait BaseTypeSeqs {
*/
class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
- incCounter(baseTypeSeqCount)
- incCounter(baseTypeSeqLenTotal, elems.length)
+ Statistics.incCounter(baseTypeSeqCount)
+ Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
/** The number of types in the sequence */
def length: Int = elems.length
@@ -231,3 +232,8 @@ trait BaseTypeSeqs {
val CyclicInheritance = new Throwable
}
+
+object BaseTypeSeqsStats {
+ val baseTypeSeqCount = Statistics.newCounter("#base type seqs")
+ val baseTypeSeqLenTotal = Statistics.newRelCounter("avg base type seq length", baseTypeSeqCount)
+}
diff --git a/src/reflect/scala/reflect/internal/TreePrinters.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 6d035c8b9d..82a8c42f7c 100644
--- a/src/reflect/scala/reflect/internal/TreePrinters.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -11,7 +11,7 @@ package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
-trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
+trait Printers extends api.Printers { self: SymbolTable =>
//nsc import treeInfo.{ IsTrue, IsFalse }
@@ -62,8 +62,9 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
protected val indentStep = 2
protected var indentString = " " // 40
- typesPrinted = settings.printtypes.value
- uniqueIds = settings.uniqid.value
+ printTypes = settings.printtypes.value
+ printIds = settings.uniqid.value
+ printKinds = settings.Yshowsymkinds.value
protected def doPrintPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
@@ -320,7 +321,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case Function(vparams, body) =>
print("("); printValueParams(vparams); print(" => ", body, ")")
- if (uniqueIds && tree.symbol != null) print("#"+tree.symbol.id)
+ if (printIds && tree.symbol != null) print("#"+tree.symbol.id)
case Assign(lhs, rhs) =>
print(lhs, " = ", rhs)
@@ -429,7 +430,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
printColumn(whereClauses, " forSome { ", ";", "}")
// SelectFromArray is no longer visible in reflect.internal.
-// eliminated until we figure out what we will do with both TreePrinters and
+// eliminated until we figure out what we will do with both Printers and
// SelectFromArray.
// case SelectFromArray(qualifier, name, _) =>
// print(qualifier); print(".<arr>"); print(symName(tree, name))
@@ -437,7 +438,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (typesPrinted && tree.isTerm && !tree.isEmpty) {
+ if (printTypes && tree.isTerm && !tree.isEmpty) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -475,4 +476,167 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
def close = { /* do nothing */ }
def flush = { /* do nothing */ }
}
+
+ // provides footnotes for types
+ private var typeCounter = 0
+ private val typeMap = collection.mutable.WeakHashMap[Type, Int]()
+
+ def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
+ def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
+ def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
+
+ // emits more or less verbatim representation of the provided tree
+ class RawTreePrinter(out: PrintWriter) extends super.TreePrinter {
+ private var depth = 0
+ private var footnotes = collection.mutable.Map[Int, Type]()
+ private var printingFootnotes = false
+ private var printTypesInFootnotes = true
+
+ def print(args: Any*): Unit = {
+ if (depth == 0 && args.length == 1 && args(0) != null && args(0).isInstanceOf[Type])
+ printTypesInFootnotes = false
+
+ depth += 1
+ args foreach {
+ case EmptyTree =>
+ print("EmptyTree")
+ case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
+ print("emptyValDef")
+ case Literal(Constant(value)) =>
+ def print(s: String) = this.print("Literal(Constant(" + s + "))")
+ value match {
+ case s: String => print("\"" + s + "\"")
+ case null => print(null)
+ case _ => print(value.toString)
+ }
+ case tree: Tree =>
+ val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
+ val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+ printProduct(
+ tree,
+ preamble = _ => {
+ print(tree.productPrefix)
+ if (printTypes && tree.tpe != null) print(tree.tpe)
+ },
+ body = {
+ case name: Name =>
+ if (isError) {
+ if (isError) print("<")
+ print(name)
+ if (isError) print(": error>")
+ } else if (hasSymbol) {
+ tree match {
+ case _: Ident | _: Select | _: SelectFromTypeTree => print(tree.symbol)
+ case _ => print(tree.symbol.name)
+ }
+ } else {
+ print(name)
+ }
+ case arg =>
+ print(arg)
+ },
+ postamble = {
+ case tree @ TypeTree() if tree.original != null => print(".setOriginal(", tree.original, ")")
+ case _ => // do nothing
+ })
+ case sym: Symbol =>
+ if (sym.isStatic && (sym.isClass || sym.isModule)) print(sym.fullName)
+ else print(sym.name)
+ if (printIds) print("#", sym.id)
+ if (printKinds) print("#", sym.abbreviatedKindString)
+ case NoType =>
+ print("NoType")
+ case NoPrefix =>
+ print("NoPrefix")
+ case tpe: Type if printTypesInFootnotes && !printingFootnotes =>
+ val index = typeMap.getOrElseUpdate(tpe, { typeCounter += 1; typeCounter })
+ footnotes(index) = tpe
+ print("[", index, "]")
+ case mods: Modifiers =>
+ print("Modifiers(")
+ if (mods.flags != NoFlags || mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) print(show(mods.flags))
+ if (mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) { print(", "); print(mods.privateWithin) }
+ if (mods.annotations.nonEmpty) { print(", "); print(mods.annotations); }
+ print(")")
+ case name: Name =>
+ print(show(name))
+ case list: List[_] =>
+ print("List")
+ printIterable(list)
+ case product: Product =>
+ printProduct(product)
+ case arg =>
+ out.print(arg)
+ }
+ depth -= 1
+ if (depth == 0 && footnotes.nonEmpty && !printingFootnotes) {
+ printingFootnotes = true
+ out.println()
+ val typeIndices = footnotes.keys.toList.sorted
+ typeIndices.zipWithIndex foreach {
+ case (typeIndex, i) =>
+ print("[" + typeIndex + "] ")
+ print(footnotes(typeIndex))
+ if (i < typeIndices.length - 1) out.println()
+ }
+ }
+ }
+
+ def printProduct(
+ p: Product,
+ preamble: Product => Unit = p => print(p.productPrefix),
+ body: Any => Unit = print(_),
+ postamble: Product => Unit = p => print("")): Unit =
+ {
+ preamble(p)
+ printIterable(p.productIterator.toList, body = body)
+ postamble(p)
+ }
+
+ def printIterable(
+ iterable: List[_],
+ preamble: => Unit = print(""),
+ body: Any => Unit = print(_),
+ postamble: => Unit = print("")): Unit =
+ {
+ preamble
+ print("(")
+ val it = iterable.iterator
+ while (it.hasNext) {
+ body(it.next)
+ print(if (it.hasNext) ", " else "")
+ }
+ print(")")
+ postamble
+ }
+ }
+
+ def show(name: Name): String = name match {
+ // base.StandardNames
+ case tpnme.EMPTY => "tpnme.EMPTY"
+ case tpnme.ROOT => "tpnme.ROOT"
+ case tpnme.EMPTY_PACKAGE_NAME => "tpnme.EMPTY_PACKAGE_NAME"
+ case tpnme.WILDCARD => "tpnme.WILDCARD"
+ case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
+ case nme.NO_NAME => "nme.NO_NAME"
+ // api.StandardNames
+ case tpnme.ERROR => "tpnme.ERROR"
+ case nme.ERROR => "nme.ERROR"
+ case nme.EMPTY => "nme.EMPTY"
+ case tpnme.PACKAGE => "tpnme.PACKAGE"
+ case nme.PACKAGE => "nme.PACKAGE"
+ case _ =>
+ val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+ prefix + name.toString + "\")"
+ }
+
+ def show(flags: FlagSet): String = {
+ if (flags == NoFlags) nme.NoFlags.toString
+ else {
+ val s_flags = new collection.mutable.ListBuffer[String]
+ for (i <- 0 to 63 if (flags containsAll (1L << i)))
+ s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
+ s_flags mkString " | "
+ }
+ }
}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index cadd76b1ba..5ae8f22c64 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -28,7 +28,7 @@ abstract class SymbolTable extends makro.Universe
with AnnotationInfos
with AnnotationCheckers
with Trees
- with TreePrinters
+ with Printers
with Positions
with TypeDebugging
with Importers
@@ -40,7 +40,7 @@ abstract class SymbolTable extends makro.Universe
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
- val treeBuild = gen
+ lazy val treeBuild = gen
def log(msg: => AnyRef): Unit
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
@@ -129,11 +129,15 @@ abstract class SymbolTable extends makro.Universe
// sigh, this has to be public or atPhase doesn't inline.
var phStack: List[Phase] = Nil
- private var ph: Phase = NoPhase
- private var per = NoPeriod
+ private[this] var ph: Phase = NoPhase
+ private[this] var per = NoPeriod
final def atPhaseStack: List[Phase] = phStack
- final def phase: Phase = ph
+ final def phase: Phase = {
+ if (Statistics.hotEnabled)
+ Statistics.incCounter(SymbolTableStats.phaseCounter)
+ ph
+ }
def atPhaseStackMessage = atPhaseStack match {
case Nil => ""
@@ -330,3 +334,7 @@ abstract class SymbolTable extends makro.Universe
*/
def isCompilerUniverse = false
}
+
+object SymbolTableStats {
+ val phaseCounter = Statistics.newCounter("#phase calls")
+}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 288eb63332..a3893a0236 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -8,18 +8,17 @@ package internal
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
-import util.Statistics._
+import util.Statistics
import Flags._
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
+ import SymbolsStats._
protected var ids = 0
val emptySymbolArray = new Array[Symbol](0)
- def symbolCount = ids // statistics
-
protected def nextId() = { ids += 1; ids }
/** Used for deciding in the IDE whether we can interrupt the compiler */
@@ -646,6 +645,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def flags: Long = {
+ if (Statistics.hotEnabled) Statistics.incCounter(flagsCount)
val fs = _rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
@@ -666,7 +666,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isClassLocalToConstructor = false
final def isDerivedValueClass =
- isClass && !hasFlag(PACKAGE | TRAIT) &&
+ isClass && !hasFlag(PACKAGE | TRAIT) &&
info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
@@ -937,7 +937,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
- def owner: Symbol = rawowner
+ def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
+ rawowner
+ }
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
@@ -2325,7 +2329,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawname: TermName = initName
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
def name_=(name: Name) {
if (name != rawname) {
log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
@@ -2494,11 +2501,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def companionClass =
flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
- override def owner = (
+ override def owner = {
+ Statistics.incCounter(ownerCount)
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
- )
- override def name: TermName = (
+ }
+ override def name: TermName = {
+ Statistics.incCounter(nameCount)
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
@@ -2506,7 +2515,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
}
implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
@@ -2577,7 +2586,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// cloneSymbolImpl still abstract in TypeSymbol.
def rawname = _rawname
- def name = _rawname
+ def name = {
+ Statistics.incCounter(nameCount)
+ _rawname
+ }
final def asNameType(n: Name) = n.toTypeName
override def isNonClassType = true
@@ -2713,7 +2725,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- incCounter(typeSymbolCount)
+ Statistics.incCounter(typeSymbolCount)
}
implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
@@ -2889,10 +2901,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thisTypeCache
}
- override def owner: Symbol =
+ override def owner: Symbol = {
+ Statistics.incCounter(ownerCount)
if (needsFlatClasses) rawowner.owner else rawowner
+ }
- override def name: TypeName = (
+ override def name: TypeName = {
+ Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
@@ -2900,7 +2915,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
flatname
}
else rawname
- )
+ }
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -2929,7 +2944,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def children = childSet
override def addChild(sym: Symbol) { childSet = childSet + sym }
- incCounter(classSymbolCount)
+ Statistics.incCounter(classSymbolCount)
}
implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
@@ -3188,4 +3203,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
}
+
+ Statistics.newView("#symbols")(ids)
+}
+
+object SymbolsStats {
+ val typeSymbolCount = Statistics.newCounter("#type symbols")
+ val classSymbolCount = Statistics.newCounter("#class symbols")
+ val flagsCount = Statistics.newCounter("#flags ops")
+ val ownerCount = Statistics.newCounter("#owner ops")
+ val nameCount = Statistics.newCounter("#name ops")
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 75bb0e6d49..dd13dd4c4c 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -9,6 +9,7 @@ package internal
import Flags._
import base.Attachments
import collection.mutable.{ListBuffer, LinkedHashSet}
+import util.Statistics
trait Trees extends api.Trees { self: SymbolTable =>
@@ -18,6 +19,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
+ Statistics.incCounter(TreesStats.nodeByType, getClass)
+
@inline final def pos: Position = rawatt.pos
def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos)
def setPos(newpos: Position): this.type = { pos = newpos; this }
@@ -809,7 +812,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
- // Belongs in TreeInfo but then I can't reach it from TreePrinters.
+ // Belongs in TreeInfo but then I can't reach it from Printers.
def isReferenceToScalaMember(t: Tree, Id: Name) = t match {
case Ident(Id) => true
case Select(Ident(nme.scala_), Id) => true
@@ -1592,4 +1595,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
+
+ val treeNodeCount = Statistics.newView("#created tree nodes")(nodeCount)
+}
+
+object TreesStats {
+ // statistics
+ val nodeByType = Statistics.newByClass("#created tree nodes by type")(Statistics.newCounter(""))
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 8a4394bf1d..4cf2cceb81 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -13,8 +13,7 @@ import mutable.ListBuffer
import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
-import util.Statistics._
-import language.postfixOps
+import util.Statistics
/* A standard type pattern match:
case ErrorType =>
@@ -73,9 +72,7 @@ import language.postfixOps
trait Types extends api.Types { self: SymbolTable =>
import definitions._
-
- //statistics
- def uniqueTypeCount = if (uniques == null) 0 else uniques.size
+ import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
@@ -681,8 +678,8 @@ trait Types extends api.Types { self: SymbolTable =>
if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
- incCounter(asSeenFromCount)
- val start = startTimer(asSeenFromNanos)
+ Statistics.incCounter(asSeenFromCount)
+ val start = Statistics.pushTimer(typeOpsStack, asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -690,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
- stopTimer(asSeenFromNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
}
@@ -828,26 +825,26 @@ trait Types extends api.Types { self: SymbolTable =>
}
def stat_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
+ Statistics.incCounter(subtypeCount)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
def weak_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
+ Statistics.incCounter(subtypeCount)
+ val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- stopTimer(subtypeNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
result
}
@@ -1020,8 +1017,8 @@ trait Types extends api.Types { self: SymbolTable =>
// See (t0851) for a situation where this happens.
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
- incCounter(findMemberCount)
- val start = startTimer(findMemberNanos)
+ Statistics.incCounter(findMemberCount)
+ val start = Statistics.pushTimer(typeOpsStack, findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1048,7 +1045,7 @@ trait Types extends api.Types { self: SymbolTable =>
!sym.isPrivateLocal ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
- stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
@@ -1094,13 +1091,13 @@ trait Types extends api.Types { self: SymbolTable =>
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
- stopTimer(findMemberNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
- if (member == NoSymbol) incCounter(noMemberCount)
+ if (member == NoSymbol) Statistics.incCounter(noMemberCount)
member
} else {
- incCounter(multMemberCount)
+ Statistics.incCounter(multMemberCount)
baseClasses.head.newOverloaded(this, members.toList)
}
}
@@ -1185,7 +1182,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isVolatile = underlying.isVolatile
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
- incCounter(singletonBaseTypeSeqCount)
+ Statistics.incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
@@ -1536,12 +1533,18 @@ trait Types extends api.Types { self: SymbolTable =>
val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
- incCounter(compoundBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
- tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
- else
- compoundBaseTypeSeq(tpe)
+ Statistics.incCounter(compoundBaseTypeSeqCount)
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache =
+ if (tpe.typeSymbol.isRefinementClass)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(tpe)
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
// [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
// when compiling with
// scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
@@ -2392,9 +2395,14 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- incCounter(typerefBaseTypeSeqCount)
- tpe.baseTypeSeqCache = undetBaseTypeSeq
- tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ Statistics.incCounter(typerefBaseTypeSeqCount)
+ val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ } finally {
+ Statistics.popTimer(typeOpsStack, start)
+ }
}
}
if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
@@ -3702,7 +3710,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
- incCounter(rawTypeCount)
+ Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
uniqueRunId = currentRunId
@@ -5104,7 +5112,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** Do `tp1` and `tp2` denote equivalent types? */
def isSameType(tp1: Type, tp2: Type): Boolean = try {
- incCounter(sametypeCount)
+ Statistics.incCounter(sametypeCount)
subsametypeRecursions += 1
undoLog undoUnless {
isSameType1(tp1, tp2)
@@ -6308,14 +6316,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => NothingClass.tpe
case List(t) => t
case _ =>
- incCounter(lubCount)
- val start = startTimer(lubNanos)
+ Statistics.incCounter(lubCount)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6431,7 +6439,7 @@ trait Types extends api.Types { self: SymbolTable =>
indent = indent + " "
assert(indent.length <= 100)
}
- incCounter(nestedLubCount)
+ Statistics.incCounter(nestedLubCount)
val res = lub0(ts)
if (printLubs) {
indent = indent stripSuffix " "
@@ -6456,14 +6464,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => AnyClass.tpe
case List(t) => t
case ts0 =>
- incCounter(lubCount)
- val start = startTimer(lubNanos)
+ Statistics.incCounter(lubCount)
+ val start = Statistics.pushTimer(typeOpsStack, lubNanos)
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- stopTimer(lubNanos, start)
+ Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6578,7 +6586,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
// if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
- incCounter(nestedLubCount)
+ Statistics.incCounter(nestedLubCount)
val res = glb0(ts)
// if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
@@ -6871,4 +6879,29 @@ trait Types extends api.Types { self: SymbolTable =>
implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
implicit val TypeTagg = ClassTag[Type](classOf[Type])
+
+ Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size }
+}
+
+object TypesStats {
+ import BaseTypeSeqsStats._
+ val rawTypeCount = Statistics.newCounter ("#raw type creations")
+ val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops")
+ val subtypeCount = Statistics.newCounter ("#subtype ops")
+ val sametypeCount = Statistics.newCounter ("#sametype ops")
+ val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
+ val nestedLubCount = Statistics.newCounter ("#all lubs/glbs")
+ val findMemberCount = Statistics.newCounter ("#findMember ops")
+ val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
+ val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
+ val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
+ val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos)
+ val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos)
+ val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val typeOpsStack = Statistics.newTimerStack()
}
diff --git a/src/reflect/scala/reflect/internal/util/StatBase.scala b/src/reflect/scala/reflect/internal/util/StatBase.scala
deleted file mode 100644
index b033ff98bc..0000000000
--- a/src/reflect/scala/reflect/internal/util/StatBase.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package scala.reflect.internal.util
-
-class StatBase {
-
- private var _enabled = false
-
- def enabled = _enabled
- def enabled_=(cond: Boolean) = {
- if (cond && !_enabled) {
- val test = new Timer()
- val start = System.nanoTime()
- var total = 0L
- for (i <- 1 to 10000) {
- val time = System.nanoTime()
- total += System.nanoTime() - time
- }
- val total2 = System.nanoTime() - start
- println("Enabling statistics, measuring overhead = "+
- total/10000.0+"ns to "+total2/10000.0+"ns per timer")
- _enabled = true
- }
- }
-
- def currentTime() =
- if (_enabled) System.nanoTime() else 0L
-
- def showPercent(x: Double, base: Double) =
- if (base == 0) "" else " ("+"%2.1f".format(x / base * 100)+"%)"
-
- def incCounter(c: Counter) {
- if (_enabled) c.value += 1
- }
-
- def incCounter(c: Counter, delta: Int) {
- if (_enabled) c.value += delta
- }
-
- def startCounter(sc: SubCounter): IntPair =
- if (_enabled) sc.start() else null
-
- def stopCounter(sc: SubCounter, start: IntPair) {
- if (_enabled) sc.stop(start)
- }
-
- def startTimer(tm: Timer): LongPair =
- if (_enabled) tm.start() else null
-
- def stopTimer(tm: Timer, start: LongPair) {
- if (_enabled) tm.stop(start)
- }
-
- case class IntPair(x: Int, y: Int)
- case class LongPair(x: Long, y: Long)
-
- class Counter {
- var value: Int = 0
- override def toString = value.toString
- }
-
- class SubCounter(c: Counter) {
- var value: Int = 0
- def start(): IntPair =
- if (_enabled) IntPair(value, c.value) else null
- def stop(prev: IntPair) {
- if (_enabled) {
- val IntPair(value0, cvalue0) = prev
- value = value0 + c.value - cvalue0
- }
- }
- override def toString =
- value+showPercent(value, c.value)
- }
-
- class Timer {
- var nanos: Long = 0
- var timings = 0
- def start(): LongPair =
- if (_enabled) {
- timings += 1
- LongPair(nanos, System.nanoTime())
- } else null
- def stop(prev: LongPair) {
- if (_enabled) {
- val LongPair(nanos0, start) = prev
- nanos = nanos0 + System.nanoTime() - start
- timings += 1
- }
- }
- override def toString = (timings/2)+" spans, "+nanos.toString+"ns"
- }
-
- import Predef.Class
-
- class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] {
- override def default(key: Class[_]) = 0
- }
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index de0830aa3a..f69530c40d 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,37 +1,261 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
package scala.reflect.internal.util
-class Statistics extends StatBase {
- val singletonBaseTypeSeqCount = new Counter
- val compoundBaseTypeSeqCount = new Counter
- val typerefBaseTypeSeqCount = new Counter
- val findMemberCount = new Counter
- val noMemberCount = new Counter
- val multMemberCount = new Counter
- val findMemberNanos = new Timer
- val asSeenFromCount = new Counter
- val asSeenFromNanos = new Timer
- val subtypeCount = new Counter
- val subtypeNanos = new Timer
- val sametypeCount = new Counter
- val rawTypeCount = new Counter
- val rawTypeFailed = new SubCounter(rawTypeCount)
- val findMemberFailed = new SubCounter(findMemberCount)
- val subtypeFailed = new SubCounter(subtypeCount)
- val rawTypeImpl = new SubCounter(rawTypeCount)
- val findMemberImpl = new SubCounter(findMemberCount)
- val subtypeImpl = new SubCounter(subtypeCount)
- val baseTypeSeqCount = new Counter
- val baseTypeSeqLenTotal = new Counter
- val typeSymbolCount = new Counter
- val classSymbolCount = new Counter
- val lubCount = new Counter
- val nestedLubCount = new Counter
- val lubNanos = new Timer
-}
+import collection.mutable
+
+object Statistics {
+
+ type TimerSnapshot = (Long, Long)
+
+ /** If enabled, increment counter by one */
+ @inline final def incCounter(c: Counter) {
+ if (_enabled && c != null) c.value += 1
+ }
+
+ /** If enabled, increment counter by given delta */
+ @inline final def incCounter(c: Counter, delta: Int) {
+ if (_enabled && c != null) c.value += delta
+ }
+
+ /** If enabled, increment counter in map `ctrs` at index `key` by one */
+ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) =
+ if (_enabled && ctrs != null) ctrs(key).value += 1
+
+ /** If enabled, start subcounter. While active it will track all increments of
+ * its base counter.
+ */
+ @inline final def startCounter(sc: SubCounter): (Int, Int) =
+ if (_enabled && sc != null) sc.start() else null
+
+ /** If enabled, stop subcounter from tracking its base counter. */
+ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) {
+ if (_enabled && sc != null) sc.stop(start)
+ }
+
+ /** If enabled, start timer */
+ @inline final def startTimer(tm: Timer): TimerSnapshot =
+ if (_enabled && tm != null) tm.start() else null
+
+ /** If enabled, stop timer */
+ @inline final def stopTimer(tm: Timer, start: TimerSnapshot) {
+ if (_enabled && tm != null) tm.stop(start)
+ }
+
+ /** If enabled, push and start a new timer in timer stack */
+ @inline final def pushTimer(timers: TimerStack, timer: StackableTimer): TimerSnapshot =
+ if (_enabled && timers != null) timers.push(timer) else null
+
+ /** If enabled, stop and pop timer from timer stack */
+ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) {
+ if (_enabled && timers != null) timers.pop(prev)
+ }
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newCounter(prefix: String, phases: String*) = new Counter(prefix, phases)
+
+ /** Create a new relative counter that shows as `prefix` and is active
+ * in the same phases as its base counter. Relative counters print as percentages
+ * of their base counters.
+ */
+ def newRelCounter(prefix: String, ctr: Counter): Counter = new RelCounter(prefix, ctr)
+
+ /** Create a new subcounter that shows as `prefix` and is active
+ * in the same phases as its base counter. Subcounters can track
+ * increments of their base counters and print as percentages
+ * of their base counters.
+ */
+ def newSubCounter(prefix: String, ctr: Counter): SubCounter = new SubCounter(prefix, ctr)
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newTimer(prefix: String, phases: String*): Timer = new Timer(prefix, phases)
+
+ /** Create a new subtimer that shows as `prefix` and is active
+ * in the same phases as its base timer. Subtimers can track
+ * increments of their base timers and print as percentages
+ * of their base timers.
+ */
+ def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+
+ /** Create a new stackable that shows as `prefix` and is active
+ * in the same phases as its base timer. Stackable timers are subtimers
+ * that can be stacked ina timerstack, and that print aggregate, as well as specific
+ * durations.
+ */
+ def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer)
+
+ /** Create a new view that shows as `prefix` and is active in given phases.
+ * The view always reflects the current value of `quant` as a quantity.
+ */
+ def newView(prefix: String, phases: String*)(quant: => Any): View = new View(prefix, phases,
+quant)
-object Statistics extends Statistics
+ /** Create a new quantity map that shows as `prefix` and is active in given phases.
+ */
+ def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue)
+ /** Same as newQuantMap, where the key type is fixed to be Class[_] */
+ def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
+
+ /** Create a new timer stack */
+ def newTimerStack() = new TimerStack()
+
+ def allQuantities: Iterable[Quantity] =
+ for ((_, q) <- qs if q.underlying == q;
+ r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
+
+ private def showPercent(x: Double, base: Double) =
+ if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+
+ /** The base trait for quantities.
+ * Quantities with non-empty prefix are printed in the statistics info.
+ */
+ trait Quantity {
+ if (prefix.nonEmpty) {
+ val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+ qs(key) = this
+ }
+ val prefix: String
+ val phases: Seq[String]
+ def underlying: Quantity = this
+ def showAt(phase: String) = phases.isEmpty || (phases contains phase)
+ def line = f"$prefix%-30s: ${this}"
+ val children = new mutable.ListBuffer[Quantity]
+ }
+
+ trait SubQuantity extends Quantity {
+ protected def underlying: Quantity
+ underlying.children += this
+ }
+
+ class Counter(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Counter] {
+ var value: Int = 0
+ def compare(that: Counter): Int =
+ if (this.value < that.value) -1
+ else if (this.value > that.value) 1
+ else 0
+ override def toString = value.toString
+ }
+
+ class View(val prefix: String, val phases: Seq[String], quant: => Any) extends Quantity {
+ override def toString = quant.toString
+ }
+
+ private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ override def toString =
+ if (value == 0) "0"
+ else {
+ assert(underlying.value != 0, prefix+"/"+underlying.line)
+ f"${value.toFloat / underlying.value}%2.1f"
+ }
+ }
+
+ class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ def start() = (value, underlying.value)
+ def stop(prev: (Int, Int)) {
+ val (value0, uvalue0) = prev
+ value = value0 + underlying.value - uvalue0
+ }
+ override def toString =
+ value + showPercent(value, underlying.value)
+ }
+
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
+ var nanos: Long = 0
+ var timings = 0
+ def start() = {
+ (nanos, System.nanoTime())
+ }
+ def stop(prev: TimerSnapshot) {
+ val (nanos0, start) = prev
+ nanos = nanos0 + System.nanoTime() - start
+ timings += 1
+ }
+ protected def show(ns: Long) = s"${ns/1000}ms"
+ override def toString = s"$timings spans, ${show(nanos)}"
+ }
+
+ class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos)
+ }
+
+ class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] {
+ var specificNanos: Long = 0
+ def compare(that: StackableTimer): Int =
+ if (this.specificNanos < that.specificNanos) -1
+ else if (this.specificNanos > that.specificNanos) 1
+ else 0
+ override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
+ }
+
+ /** A mutable map quantity where missing elements are automatically inserted
+ * on access by executing `initValue`.
+ */
+ class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V)
+ extends scala.collection.mutable.HashMap[K, V] with Quantity {
+ override def default(key: K) = {
+ val elem = initValue
+ this(key) = elem
+ elem
+ }
+ override def toString =
+ this.toSeq.sortWith(_._2 > _._2).map {
+ case (cls: Class[_], elem) =>
+ s"${cls.toString.substring(cls.toString.lastIndexOf("$") + 1)}: $elem"
+ case (key, elem) =>
+ s"$key: $elem"
+ }.mkString(", ")
+ }
+
+ /** A stack of timers, all active, where a timer's specific "clock"
+ * is stopped as long as it is buried by some other timer in the stack, but
+ * its aggregate clock keeps on ticking.
+ */
+ class TimerStack {
+ private var elems: List[(StackableTimer, Long)] = Nil
+ /** Start given timer and push it onto the stack */
+ def push(t: StackableTimer): TimerSnapshot = {
+ elems = (t, 0L) :: elems
+ t.start()
+ }
+ /** Stop and pop top timer in stack
+ */
+ def pop(prev: TimerSnapshot) = {
+ val (nanos0, start) = prev
+ val duration = System.nanoTime() - start
+ val (topTimer, nestedNanos) :: rest = elems
+ topTimer.nanos = nanos0 + duration
+ topTimer.specificNanos += duration - nestedNanos
+ topTimer.timings += 1
+ elems = rest match {
+ case (outerTimer, outerNested) :: elems1 =>
+ (outerTimer, outerNested + duration) :: elems1
+ case Nil =>
+ Nil
+ }
+ }
+ }
+
+ private var _enabled = false
+ private val qs = new mutable.HashMap[String, Quantity]
+
+ def enabled = _enabled
+ def enabled_=(cond: Boolean) = {
+ if (cond && !_enabled) {
+ val test = new Timer("", Nil)
+ val start = System.nanoTime()
+ var total = 0L
+ for (i <- 1 to 10000) {
+ val time = System.nanoTime()
+ total += System.nanoTime() - time
+ }
+ val total2 = System.nanoTime() - start
+ println("Enabling statistics, measuring overhead = "+
+ total/10000.0+"ns to "+total2/10000.0+"ns per timer")
+ _enabled = true
+ }
+ }
+
+ /** replace rhs with enabled and rebuild to also count tiny but super-hot methods
+ * such as phase, flags, owner, name.
+ */
+ final val hotEnabled = false
+}
diff --git a/src/reflect/scala/reflect/makro/Universe.scala b/src/reflect/scala/reflect/makro/Universe.scala
index ffc4042a0a..98046be555 100644
--- a/src/reflect/scala/reflect/makro/Universe.scala
+++ b/src/reflect/scala/reflect/makro/Universe.scala
@@ -15,25 +15,6 @@ abstract class Universe extends scala.reflect.api.Universe {
// [Eugene++ to Martin] should we also add mutability methods here (similarly to what's done below for trees)?
// I'm talking about `setAnnotations` and friends
-
- /** Can this symbol be loaded by a reflective mirror?
- *
- * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
- * Such annotations (also called "pickles") are applied on top-level classes and include information
- * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
- * are typically unreachable and information about them gets lost.
- *
- * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
- * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
- */
- def isLocatable: Boolean
-
- /** Is this symbol static (i.e. with no outer instance)?
- * Q: When exactly is a sym marked as STATIC?
- * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
- * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
- */
- def isStatic: Boolean
}
// Tree extensions ---------------------------------------------------------------
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 7ce71166c9..629df76178 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -12,14 +12,6 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def picklerPhase = SomePhase
- type TreeGen = internal.TreeGen
-
- override type Position = scala.reflect.internal.util.Position
-
- override val gen = new TreeGen { val global: self.type = self }
-
- override val treeBuild = gen
-
lazy val settings = new Settings
def forInteractive = false
def forScaladoc = false
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 28e116b479..8c43cdaafe 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -178,9 +178,10 @@ object Arbitrary {
import java.math.MathContext._
val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
val bdGen = for {
- mc <- mcGen
- scale <- arbInt.arbitrary
x <- arbBigInt.arbitrary
+ mc <- mcGen
+ limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
+ scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
} yield BigDecimal(x, scale, mc)
Arbitrary(bdGen)
}
@@ -197,24 +198,37 @@ object Arbitrary {
}
/** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] =
+ implicit lazy val arbProp: Arbitrary[Prop] = {
+ import Prop._
+ val undecidedOrPassed = forAll { b: Boolean =>
+ b ==> true
+ }
Arbitrary(frequency(
- (5, Prop.proved),
- (4, Prop.falsified),
- (2, Prop.undecided),
- (1, Prop.exception(null))
+ (4, falsified),
+ (4, passed),
+ (3, proved),
+ (3, undecidedOrPassed),
+ (2, undecided),
+ (1, exception(null))
))
+ }
/** Arbitrary instance of test params */
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
- minSuccTests <- choose(10,150)
- maxDiscTests <- choose(100,500)
+ minSuccTests <- choose(10,200)
+ maxDiscardRatio <- choose(0.2f,10f)
minSize <- choose(0,500)
sizeDiff <- choose(0,500)
maxSize <- choose(minSize, minSize + sizeDiff)
ws <- choose(1,4)
- } yield Test.Params(minSuccTests,maxDiscTests,minSize,maxSize,workers = ws))
+ } yield Test.Params(
+ minSuccessfulTests = minSuccTests,
+ maxDiscardRatio = maxDiscardRatio,
+ minSize = minSize,
+ maxSize = maxSize,
+ workers = ws
+ ))
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
index 908bce2a81..8959211f09 100644
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ b/src/scalacheck/org/scalacheck/Arg.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 112dda28a7..5ad82c513d 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -46,13 +46,6 @@ trait Commands extends Prop {
def run(s: State): Any
def nextState(s: State): State
- /** @deprecated Use <code>preConditions += ...</code> instead. */
- @deprecated("Use 'preConditions += ...' instead.", "1.6")
- def preCondition_=(f: State => Boolean) = {
- preConditions.clear
- preConditions += f
- }
-
/** Returns all preconditions merged into a single function */
def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
@@ -62,20 +55,6 @@ trait Commands extends Prop {
* conditions to the precondition list */
val preConditions = new collection.mutable.ListBuffer[State => Boolean]
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.", "1.6")
- def postCondition_=(f: (State,Any) => Prop) = {
- postConditions.clear
- postConditions += ((s0,s1,r) => f(s0,r))
- }
-
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.", "1.6")
- def postCondition_=(f: (State,State,Any) => Prop) = {
- postConditions.clear
- postConditions += f
- }
-
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
index c3af6c83a3..93f1dc222e 100644
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -37,31 +37,6 @@ object ConsoleReporter {
* the given verbosity */
def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
- @deprecated("(v1.8)", "1.8")
- def propReport(s: Int, d: Int) = {
- if(d == 0) printf("\rPassed %s tests\r", s)
- else printf("\rPassed %s tests; %s discarded\r", s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)", "1.8")
- def propReport(pName: String, s: Int, d: Int) = {
- if(d == 0) printf("\r %s: Passed %s tests\r", pName, s)
- else printf("\r %s: Passed %s tests; %s discarded\r", pName, s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)", "1.8")
- def testReport(res: Test.Result) = {
- print(List.fill(78)(' ').mkString)
- val s = (if(res.passed) "+ " else "! ") + pretty(res, Params(0))
- printf("\r%s\n", format(s, "", "", 75))
- res
- }
-
- @deprecated("(v1.8)", "1.8")
- def testStatsEx(res: Test.Result): Unit = testStatsEx("", res)
-
def testStatsEx(msg: String, res: Test.Result) = {
lazy val m = if(msg.length == 0) "" else msg + ": "
res.status match {
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
index a253b040cd..64bb61c2d3 100644
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ b/src/scalacheck/org/scalacheck/Gen.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -59,6 +59,12 @@ object Choose {
}
}
+case class FiniteGenRes[+T](
+ r: T
+)
+
+sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
+
/** Class that represents a generator. */
sealed trait Gen[+T] {
@@ -150,13 +156,6 @@ sealed trait Gen[+T] {
/** Returns a new property that holds if and only if both this
* and the given generator generates the same result, or both
- * generators generate no result.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead", "1.7")
- def ===[U](g: Gen[U]): Prop = this == g
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
* generators generate no result. */
def ==[U](g: Gen[U]) = Prop(prms =>
(this(prms.genPrms), g(prms.genPrms)) match {
@@ -221,11 +220,6 @@ object Gen {
}
}
- /* Default generator parameters
- * @deprecated Use <code>Gen.Params()</code> instead */
- @deprecated("Use Gen.Params() instead", "1.8")
- val defaultParams = Params()
-
/* Generator factory method */
def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
def apply(p: Gen.Params) = g(p)
@@ -310,20 +304,6 @@ object Gen {
x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
} yield x
- /** Chooses one of the given values, with a weighted random distribution.
- * @deprecated Use <code>frequency</code> with constant generators
- * instead. */
- @deprecated("Use 'frequency' with constant generators instead.", "1.6")
- def elementsFreq[T](vs: (Int, T)*): Gen[T] =
- frequency(vs.map { case (w,v) => (w, value(v)) } : _*)
-
- /** A generator that returns a random element from a list
- * @deprecated Use <code>oneOf</code> with constant generators instead. */
- @deprecated("Use 'oneOf' with constant generators instead.", "1.6")
- def elements[T](xs: T*): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0,xs.length-1)
- } yield xs(i)
-
//// List Generators ////
@@ -368,12 +348,6 @@ object Gen {
* <code>containerOfN[List,T](n,g)</code>. */
def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
- /** Generates a list of the given length. This method is equal to calling
- * <code>containerOfN[List,T](n,g)</code>.
- * @deprecated Use the method <code>listOfN</code> instead. */
- @deprecated("Use 'listOfN' instead.", "1.6")
- def vectorOf[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
/** A generator that picks a random number of elements from a list */
def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
@@ -438,16 +412,6 @@ object Gen {
//// Number Generators ////
- /* Generates positive integers
- * @deprecated Use <code>posNum[Int]code> instead */
- @deprecated("Use posNum[Int] instead", "1.7")
- def posInt: Gen[Int] = sized(max => choose(1, max))
-
- /* Generates negative integers
- * @deprecated Use <code>negNum[Int]code> instead */
- @deprecated("Use negNum[Int] instead", "1.7")
- def negInt: Gen[Int] = sized(max => choose(-max, -1))
-
/** Generates positive numbers of uniform distribution, with an
* upper bound of the generation size parameter. */
def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index f59ac315c7..c40e4aa718 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -49,6 +49,8 @@ object Pretty {
implicit def prettyAny(t: Any) = Pretty { p => t.toString }
+ implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
+
implicit def prettyList(l: List[Any]) = Pretty { p =>
l.map("\""+_+"\"").mkString("List(", ", ", ")")
}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
index 3ae9f22234..dfd85a832a 100644
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ b/src/scalacheck/org/scalacheck/Prop.scala
@@ -5,12 +5,13 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
import util.{FreqMap,Buildable}
import scala.collection._
+import scala.annotation.tailrec
/** A property is a generator that generates a property result */
trait Prop {
@@ -102,15 +103,6 @@ trait Prop {
}
}
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead.", "1.7")
- def ===(p: Prop): Prop = this == p
-
override def toString = "Prop"
/** Put a label on the property to make test reports clearer */
@@ -201,7 +193,7 @@ object Prop {
case (_,Undecided) => r
case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, this.status)
+ case (Proof,_) => merge(this, r, r.status)
case (True,True) => merge(this, r, True)
}
@@ -337,15 +329,12 @@ object Prop {
/** A property that depends on the generator size */
def sizedProp(f: Int => Prop): Prop = Prop { prms =>
+ // provedToTrue since if the property is proved for
+ // one size, it shouldn't be regarded as proved for
+ // all sizes.
provedToTrue(f(prms.genPrms.size)(prms))
}
- /** Implication
- * @deprecated Use the implication operator of the Prop class instead
- */
- @deprecated("Use the implication operator of the Prop class instead", "1.7")
- def ==>(b: => Boolean, p: => Prop): Prop = (b: Prop) ==> p
-
/** Implication with several conditions */
def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
secure(if(f.isDefinedAt(x)) f(x) else undecided)
@@ -758,4 +747,17 @@ object Prop {
a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
+ /** Ensures that the property expression passed in completes within the given space of time. */
+ def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
+ @tailrec private def attempt(prms: Params, endTime: Long): Result = {
+ val result = wrappedProp.apply(prms)
+ if (System.currentTimeMillis > endTime) {
+ (if (result.failure) result else Result(False)).label("Timeout")
+ } else {
+ if (result.success) result
+ else attempt(prms, endTime)
+ }
+ }
+ def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
+ }
}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
index 8a5b3febc9..26059231d6 100644
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ b/src/scalacheck/org/scalacheck/Properties.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
index a077f21573..ae15bd9616 100644
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ b/src/scalacheck/org/scalacheck/Shrink.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
index 48b0a151a1..4368184823 100644
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ b/src/scalacheck/org/scalacheck/Test.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -19,12 +19,18 @@ object Test {
/** Test parameters */
case class Params(
minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = 500,
+
+ /** @deprecated Use maxDiscardRatio instead. */
+ @deprecated("Use maxDiscardRatio instead.", "1.10")
+ maxDiscardedTests: Int = -1,
+
minSize: Int = 0,
maxSize: Int = Gen.Params().size,
rng: java.util.Random = Gen.Params().rng,
workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
+ testCallback: TestCallback = new TestCallback {},
+ maxDiscardRatio: Float = 5,
+ customClassLoader: Option[ClassLoader] = None
)
/** Test statistics */
@@ -90,7 +96,7 @@ object Test {
import prms._
if(
minSuccessfulTests <= 0 ||
- maxDiscardedTests < 0 ||
+ maxDiscardRatio <= 0 ||
minSize < 0 ||
maxSize < minSize ||
workers <= 0
@@ -106,12 +112,13 @@ object Test {
val names = Set("minSuccessfulTests", "s")
val help = "Number of tests that must succeed in order to pass a property"
}
- object OptMaxDiscarded extends IntOpt {
- val default = Test.Params().maxDiscardedTests
- val names = Set("maxDiscardedTests", "d")
+ object OptMaxDiscardRatio extends FloatOpt {
+ val default = Test.Params().maxDiscardRatio
+ val names = Set("maxDiscardRatio", "r")
val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property"
+ "The maximum ratio between discarded and succeeded tests " +
+ "allowed before ScalaCheck stops testing a property. At " +
+ "least minSuccessfulTests will always be tested, though."
}
object OptMinSize extends IntOpt {
val default = Test.Params().minSize
@@ -135,45 +142,54 @@ object Test {
}
val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMinSize,
+ OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
OptMaxSize, OptWorkers, OptVerbosity
)
def parseParams(args: Array[String]) = parseArgs(args) {
optMap => Test.Params(
- optMap(OptMinSuccess),
- optMap(OptMaxDiscarded),
- optMap(OptMinSize),
- optMap(OptMaxSize),
- Test.Params().rng,
- optMap(OptWorkers),
- ConsoleReporter(optMap(OptVerbosity))
+ minSuccessfulTests = optMap(OptMinSuccess),
+ maxDiscardRatio = optMap(OptMaxDiscardRatio),
+ minSize = optMap(OptMinSize),
+ maxSize = optMap(OptMaxSize),
+ rng = Test.Params().rng,
+ workers = optMap(OptWorkers),
+ testCallback = ConsoleReporter(optMap(OptVerbosity))
)
}
}
/** Tests a property with the given testing parameters, and returns
* the test results. */
- def check(prms: Params, p: Prop): Result = {
+ def check(params: Params, p: Prop): Result = {
+
+ // maxDiscardedTests is deprecated, but if someone
+ // uses it let it override maxDiscardRatio
+ val mdr =
+ if(params.maxDiscardedTests < 0) params.maxDiscardRatio
+ else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
+ val prms = params.copy( maxDiscardRatio = mdr)
+
import prms._
- import actors.Futures.future
+ import scala.actors.Futures.future
assertParams(prms)
if(workers > 1)
assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- val iterations = minSuccessfulTests / workers
- val sizeStep = (maxSize-minSize) / (minSuccessfulTests: Float)
+ val iterations = math.ceil(minSuccessfulTests / (workers: Double))
+ val sizeStep = (maxSize-minSize) / (iterations*workers)
var stop = false
- def worker(workerdIdx: Int) = future {
- var n = 0
- var d = 0
- var size = minSize + (workerdIdx*sizeStep*iterations)
+ def worker(workerIdx: Int) = future {
+ params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ var n = 0 // passed tests
+ var d = 0 // discarded tests
var res: Result = null
var fm = FreqMap.empty[immutable.Set[Any]]
while(!stop && res == null && n < iterations) {
- val propPrms = Prop.Params(Gen.Params(size.round, prms.rng), fm)
+ val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
+ val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
secure(p(propPrms)) match {
case Right(e) => res =
Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
@@ -184,35 +200,48 @@ object Test {
propRes.status match {
case Prop.Undecided =>
d += 1
- testCallback.onPropEval("", workerdIdx, n, d)
- if(d >= maxDiscardedTests) res = Result(Exhausted, n, d, fm)
+ testCallback.onPropEval("", workerIdx, n, d)
+ // The below condition is kind of hacky. We have to have
+ // some margin, otherwise workers might stop testing too
+ // early because they have been exhausted, but the overall
+ // test has not.
+ if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
+ res = Result(Exhausted, n, d, fm)
case Prop.True =>
n += 1
- testCallback.onPropEval("", workerdIdx, n, d)
+ testCallback.onPropEval("", workerIdx, n, d)
case Prop.Proof =>
n += 1
res = Result(Proved(propRes.args), n, d, fm)
- case Prop.False => res =
- Result(Failed(propRes.args, propRes.labels), n, d, fm)
- case Prop.Exception(e) => res =
- Result(PropException(propRes.args, e, propRes.labels), n, d, fm)
+ stop = true
+ case Prop.False =>
+ res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
+ stop = true
+ case Prop.Exception(e) =>
+ res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
+ stop = true
}
}
- size += sizeStep
}
- if(res != null) stop = true
- else res = Result(Passed, n, d, fm)
- res
+ if (res == null) {
+ if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
+ else Result(Exhausted, n, d, fm)
+ } else res
}
- def mergeResults(r1: () => Result, r2: () => Result) = r1() match {
- case Result(Passed, s1, d1, fm1, t) => r2() match {
- case Result(Passed, s2, d2, fm2, t) if d1+d2 >= maxDiscardedTests =>
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, t)
- case Result(st, s2, d2, fm2, t) =>
- () => Result(st, s1+s2, d1+d2, fm1++fm2, t)
+ def mergeResults(r1: () => Result, r2: () => Result) = {
+ val Result(st1, s1, d1, fm1, _) = r1()
+ val Result(st2, s2, d2, fm2, _) = r2()
+ if (st1 != Passed && st1 != Exhausted)
+ () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
+ else if (st2 != Passed && st2 != Exhausted)
+ () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
+ else {
+ if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
+ () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
+ else
+ () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
}
- case r => () => r
}
val start = System.currentTimeMillis
@@ -237,78 +266,4 @@ object Test {
(name,res)
}
-
- // Deprecated methods //
-
- /** Default testing parameters
- * @deprecated Use <code>Test.Params()</code> instead */
- @deprecated("Use Test.Params() instead", "1.8")
- val defaultParams = Params()
-
- /** Property evaluation callback. Takes number of passed and
- * discarded tests, respectively */
- @deprecated("(v1.8)", "1.8")
- type PropEvalCallback = (Int,Int) => Unit
-
- /** Property evaluation callback. Takes property name, and number of passed
- * and discarded tests, respectively */
- @deprecated("(v1.8)", "1.8")
- type NamedPropEvalCallback = (String,Int,Int) => Unit
-
- /** Test callback. Takes property name, and test results. */
- @deprecated("(v1.8)", "1.8")
- type TestResCallback = (String,Result) => Unit
-
- /** @deprecated (v1.8) Use <code>check(prms.copy(testCallback = myCallback), p)</code> instead. */
- @deprecated("Use check(prms.copy(testCallback = myCallback), p) instead", "1.8")
- def check(prms: Params, p: Prop, propCallb: PropEvalCallback): Result = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(s,d)
- }
- check(prms copy (testCallback = testCallback), p)
- }
-
- /** Tests a property and prints results to the console. The
- * <code>maxDiscarded</code> parameter specifies how many
- * discarded tests that should be allowed before ScalaCheck
- * @deprecated (v1.8) Use <code>check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("Use check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p) instead.", "1.8")
- def check(p: Prop, maxDiscarded: Int): Result =
- check(Params(maxDiscardedTests = maxDiscarded, testCallback = ConsoleReporter()), p)
-
- /** Tests a property and prints results to the console
- * @deprecated (v1.8) Use <code>check(Params(testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("Use check(Params(testCallback = ConsoleReporter()), p) instead.", "1.8")
- def check(p: Prop): Result = check(Params(testCallback = ConsoleReporter()), p)
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results. <code>f</code> is a function which is called each
- * time a property is evaluted. <code>g</code> is a function called each
- * time a property has been fully tested.
- * @deprecated (v1.8) Use <code>checkProperties(prms.copy(testCallback = myCallback), ps)</code> instead. */
- @deprecated("Use checkProperties(prms.copy(testCallback = myCallback), ps) instead.", "1.8")
- def checkProperties(ps: Properties, prms: Params,
- propCallb: NamedPropEvalCallback, testCallb: TestResCallback
- ): Seq[(String,Result)] = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(n,s,d)
- override def onTestResult(n: String, r: Result) = testCallb(n,r)
- }
- checkProperties(prms copy (testCallback = testCallback), ps)
- }
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results.
- * @deprecated (v1.8) Use checkProperties(prms, ps) instead */
- @deprecated("Use checkProperties(prms, ps) instead", "1.8")
- def checkProperties(ps: Properties, prms: Params): Seq[(String,Result)] =
- checkProperties(ps, prms, (n,s,d) => (), (n,s) => ())
-
- /** Tests all properties with default testing parameters, and returns
- * the test results. The results are also printed on the console during
- * testing.
- * @deprecated (v1.8) Use <code>checkProperties(Params(), ps)</code> instead. */
- @deprecated("Use checkProperties(Params(), ps) instead.", "1.8")
- def checkProperties(ps: Properties): Seq[(String,Result)] =
- checkProperties(Params(), ps)
}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index 5c960c3ba8..221b8a61c3 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -31,7 +31,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit t: ClassTag[T]) =
+ implicit def buildableArray[T](implicit cm: ClassTag[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index a63e4ba10e..16ac1940b2 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -26,6 +26,7 @@ trait CmdLineParser extends Parsers {
}
trait Flag extends Opt[Unit]
trait IntOpt extends Opt[Int]
+ trait FloatOpt extends Opt[Float]
trait StrOpt extends Opt[String]
class OptMap {
@@ -68,11 +69,17 @@ trait CmdLineParser extends Parsers {
case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
})
+ private val floatVal: Parser[Float] = accept("float", {
+ case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
+ => s.toFloat
+ })
+
private case class OptVal[T](o: Opt[T], v: T)
private val optVal: Parser[OptVal[Any]] = opt into {
case o: Flag => success(OptVal(o, ()))
case o: IntOpt => intVal ^^ (v => OptVal(o, v))
+ case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
case o: StrOpt => strVal ^^ (v => OptVal(o, v))
}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
index 902c148d67..c7474d3b87 100644
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ b/src/scalacheck/org/scalacheck/util/FreqMap.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
index 4cc83a4172..317b0ccd10 100644
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ b/src/scalacheck/org/scalacheck/util/StdRand.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala
index 5a6191fb62..3f47dc23fd 100644
--- a/test/benchmarking/ParCtrie-size.scala
+++ b/test/benchmarking/ParCtrie-size.scala
@@ -2,16 +2,18 @@
-import collection.parallel.mutable.ParCtrie
+import collection.parallel.mutable.ParTrieMap
object Size extends testing.Benchmark {
val length = sys.props("length").toInt
val par = sys.props("par").toInt
- var parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ var parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
- collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ //collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ val ts = new collection.parallel.ForkJoinTaskSupport(new concurrent.forkjoin.ForkJoinPool(par))
+ parctrie.tasksupport = ts
def run = {
parctrie.size
@@ -21,7 +23,8 @@ object Size extends testing.Benchmark {
override def tearDown() {
iteration += 1
- if (iteration % 4 == 0) parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ if (iteration % 4 == 0) parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
+ parctrie.tasksupport = ts
}
}
diff --git a/test/files/continuations-neg/trycatch2.scala b/test/files/continuations-neg/trycatch2.scala
index d61419169b..d329a3b530 100644
--- a/test/files/continuations-neg/trycatch2.scala
+++ b/test/files/continuations-neg/trycatch2.scala
@@ -12,7 +12,7 @@ object Test {
fatal[Int]
cpsIntStringInt
} catch {
- case ex =>
+ case ex: Throwable =>
cpsIntStringInt
}
@@ -20,7 +20,7 @@ object Test {
fatal[Int]
cpsIntStringInt
} catch {
- case ex =>
+ case ex: Throwable =>
cpsIntStringInt
}
@@ -30,4 +30,4 @@ object Test {
println(reset { foo2; "3" })
}
-} \ No newline at end of file
+}
diff --git a/test/files/jvm/actmig-PinS.check b/test/files/jvm/actmig-PinS.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS.scala b/test/files/jvm/actmig-PinS.scala
new file mode 100644
index 0000000000..39f8f04b3b
--- /dev/null
+++ b/test/files/jvm/actmig-PinS.scala
@@ -0,0 +1,112 @@
+import scala.actors._
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+import scala.actors.Actor._
+
+/* PinS, Listing 32.1: A simple actor
+ */
+object SillyActor extends Actor {
+ def act() {
+ for (i <- 1 to 5)
+ println("I'm acting!")
+
+ println("Post stop")
+ }
+}
+
+object SeriousActor extends Actor {
+ def act() {
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver extends Actor {
+ import java.net.{InetAddress, UnknownHostException}
+
+ def act() {
+ react {
+ case (name: String, actor: Actor) =>
+ actor ! getIp(name)
+ act()
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ act()
+ }
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): Actor = actor {
+ while (true) {
+ receive {
+ case 'stop =>
+ exit()
+ case msg =>
+ println("received message: " + msg)
+ }
+ }
+ }
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): Actor = actor {
+ receive {
+ case x: Int => // I only want Ints
+ println("Got an Int: " + x)
+ }
+ }
+
+ actor {
+ self.trapExit = true
+ self.link(SillyActor)
+ SillyActor.start()
+ react {
+ case Exit(SillyActor, _) =>
+ self.link(SeriousActor)
+ SeriousActor.start()
+ react {
+ case Exit(SeriousActor, _) =>
+ val seriousPromise2 = Promise[Boolean]
+ // PinS, page 694
+ val seriousActor2 = actor {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ }
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ self.link(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ react {
+ case Exit(_, _) =>
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/actmig-PinS_1.check b/test/files/jvm/actmig-PinS_1.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_1.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS_1.scala b/test/files/jvm/actmig-PinS_1.scala
new file mode 100644
index 0000000000..1fb50567b9
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_1.scala
@@ -0,0 +1,135 @@
+import scala.actors._
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+object SillyActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SillyActor, "akka.actor.default-stash-dispatcher"))
+}
+
+/* PinS, Listing 32.1: A simple actor
+ */
+class SillyActor extends Actor {
+
+ def act() {
+ Await.ready(SillyActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("I'm acting!")
+
+ println("Post stop")
+ }
+}
+
+object SeriousActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SeriousActor, "akka.actor.default-stash-dispatcher"))
+}
+
+class SeriousActor extends Actor {
+ def act() {
+ // used to make this test deterministic
+ Await.ready(SeriousActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver extends Actor {
+ import java.net.{ InetAddress, UnknownHostException }
+
+ def act() {
+ react {
+ case (name: String, actor: Actor) =>
+ actor ! getIp(name)
+ act()
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ act()
+ }
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): ActorRef = MigrationSystem.actorOf(Props(() => new Actor {
+ def act() {
+ while (true) {
+ receive {
+ case 'stop =>
+ exit()
+ case msg =>
+ println("received message: " + msg)
+ }
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): ActorRef = MigrationSystem.actorOf(Props(() => new Actor {
+ def act() {
+ receive {
+ case x: Int => // I only want Ints
+ println("Got an Int: " + x)
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+
+ MigrationSystem.actorOf(Props(() => new Actor {
+ def act() {
+ trapExit = true
+ link(SillyActor.ref)
+ SillyActor.startPromise.success(true)
+ react {
+ case Exit(_: SillyActor, _) =>
+ link(SeriousActor.ref)
+ SeriousActor.startPromise.success(true)
+ react {
+ case Exit(_: SeriousActor, _) =>
+ val seriousPromise2 = Promise[Boolean]()
+ // PinS, page 694
+ val seriousActor2 = MigrationSystem.actorOf(Props(() =>
+ new Actor {
+ def act() {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ }
+ }
+ , "akka.actor.default-stash-dispatcher"))
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ link(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ react {
+ case Exit(_, _) =>
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ }
+ }
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+}
diff --git a/test/files/jvm/actmig-PinS_2.check b/test/files/jvm/actmig-PinS_2.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_2.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS_2.scala b/test/files/jvm/actmig-PinS_2.scala
new file mode 100644
index 0000000000..46277efd43
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_2.scala
@@ -0,0 +1,155 @@
+import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Props, Exit }
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+object SillyActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SillyActor, "default-stash-dispatcher"))
+}
+
+/* PinS, Listing 32.1: A simple actor
+ */
+class SillyActor extends StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ Await.ready(SillyActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("I'm acting!")
+
+ println("Post stop")
+ }
+}
+
+object SeriousActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SeriousActor, "default-stash-dispatcher"))
+}
+
+class SeriousActor extends StashingActor {
+ def receive = { case _ => println("Nop") }
+ override def act() {
+ Await.ready(SeriousActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver {
+ val ref = MigrationSystem.actorOf(Props(() => new NameResolver, "default-stash-dispatcher"))
+}
+
+class NameResolver extends StashingActor {
+ import java.net.{ InetAddress, UnknownHostException }
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ react {
+ case (name: String, actor: ActorRef) =>
+ actor ! getIp(name)
+ act()
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ act()
+ }
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): ActorRef = MigrationSystem.actorOf(Props(() =>
+ new StashingActor {
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ loop {
+ react {
+ case 'stop =>
+ exit()
+ case msg =>
+ println("received message: " + msg)
+ }
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): ActorRef = MigrationSystem.actorOf(Props(() =>new StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ react {
+ case x: Int => // I only want Ints
+ println("Got an Int: " + x)
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ trapExit = true
+ link(SillyActor.ref)
+ SillyActor.startPromise.success(true)
+ react {
+ case Exit(_: SillyActor, _) =>
+ link(SeriousActor.ref)
+ SeriousActor.startPromise.success(true)
+ react {
+ case Exit(_: SeriousActor, _) =>
+ val seriousPromise2 = Promise[Boolean]()
+ // PinS, page 694
+ val seriousActor2 = MigrationSystem.actorOf(Props(() =>{
+ new StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ link(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ react {
+ case Exit(_, _) =>
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ }
+ }
+ }
+ }
+ }, "default-stash-dispatcher"))
+}
diff --git a/test/files/jvm/actmig-PinS_3.check b/test/files/jvm/actmig-PinS_3.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_3.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS_3.scala b/test/files/jvm/actmig-PinS_3.scala
new file mode 100644
index 0000000000..321e99b1c2
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_3.scala
@@ -0,0 +1,161 @@
+import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Terminated, Props }
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+
+object SillyActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SillyActor, "default-stash-dispatcher"))
+}
+
+/* PinS, Listing 32.1: A simple actor
+ */
+class SillyActor extends StashingActor {
+ def receive = { case _ => println("Why are you not dead"); context.stop(self) }
+
+ override def preStart() {
+ Await.ready(SillyActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("I'm acting!")
+ context.stop(self)
+ }
+
+ override def postStop() {
+ println("Post stop")
+ }
+}
+
+object SeriousActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SeriousActor, "default-stash-dispatcher"))
+}
+
+class SeriousActor extends StashingActor {
+ def receive = { case _ => println("Nop") }
+ override def preStart() {
+ Await.ready(SeriousActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ context.stop(self)
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver {
+ val ref = MigrationSystem.actorOf(Props(() => new NameResolver, "default-stash-dispatcher"))
+}
+
+class NameResolver extends StashingActor {
+ import java.net.{ InetAddress, UnknownHostException }
+
+ def receive = {
+ case (name: String, actor: ActorRef) =>
+ actor ! getIp(name)
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ context.stop(self) // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): ActorRef = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { // how to handle receive
+ case 'stop =>
+ context.stop(self)
+ case msg =>
+ println("received message: " + msg)
+ }
+ }, "default-stash-dispatcher"))
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): ActorRef = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int => // I only want Ints
+ unstashAll()
+ println("Got an Int: " + x)
+ context.stop(self)
+ case _ => stash()
+ }
+ }, "default-stash-dispatcher"))
+
+ MigrationSystem.actorOf(Props(() => new StashingActor {
+ val silly = SillyActor.ref
+
+ override def preStart() {
+ context.watch(SillyActor.ref)
+ SillyActor.startPromise.success(true)
+ }
+
+ def receive = {
+ case Terminated(`silly`) =>
+ unstashAll()
+ val serious = SeriousActor.ref
+ context.watch(SeriousActor.ref)
+ SeriousActor.startPromise.success(true)
+ context.become {
+ case Terminated(`serious`) =>
+ val seriousPromise2 = Promise[Boolean]()
+ // PinS, page 694
+ val seriousActor2 = MigrationSystem.actorOf(Props(() => {
+ new StashingActor {
+
+ def receive = { case _ => context.stop(self) }
+
+ override def preStart() = {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ context.stop(self)
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ context.watch(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ context.become {
+ case Terminated(_) =>
+ unstashAll()
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ context.unbecome()
+ context.unbecome()
+ context.stop(self)
+ case m =>
+ println("Stash 1 " + m)
+ stash(m)
+ }
+ case m =>
+ println("Stash 2 " + m)
+ stash(m)
+ }
+ case m =>
+ println("Stash 3 " + m)
+ stash(m)
+ }
+ }, "default-stash-dispatcher"))
+} \ No newline at end of file
diff --git a/test/files/jvm/actmig-hierarchy.check b/test/files/jvm/actmig-hierarchy.check
new file mode 100644
index 0000000000..317e9677c3
--- /dev/null
+++ b/test/files/jvm/actmig-hierarchy.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/jvm/actmig-hierarchy_1.check b/test/files/jvm/actmig-hierarchy_1.check
new file mode 100644
index 0000000000..317e9677c3
--- /dev/null
+++ b/test/files/jvm/actmig-hierarchy_1.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/jvm/actmig-instantiation.check b/test/files/jvm/actmig-instantiation.check
new file mode 100644
index 0000000000..4c13d5c0a1
--- /dev/null
+++ b/test/files/jvm/actmig-instantiation.check
@@ -0,0 +1,8 @@
+OK error: java.lang.RuntimeException: In order to create StashingActor one must use actorOf.
+OK error: java.lang.RuntimeException: Only one actor can be created per actorOf call.
+0
+100
+200
+300
+400
+500
diff --git a/test/files/jvm/actmig-loop-react.check b/test/files/jvm/actmig-loop-react.check
new file mode 100644
index 0000000000..54cbe942c0
--- /dev/null
+++ b/test/files/jvm/actmig-loop-react.check
@@ -0,0 +1,15 @@
+do task
+do task
+do task
+do task
+working
+scala got exception
+working
+akka got exception
+do task 1
+do string I am a String
+do task 42
+after react
+do task 1
+do string I am a String
+do task 42
diff --git a/test/files/jvm/actmig-loop-react.scala b/test/files/jvm/actmig-loop-react.scala
new file mode 100644
index 0000000000..d714b26594
--- /dev/null
+++ b/test/files/jvm/actmig-loop-react.scala
@@ -0,0 +1,188 @@
+import scala.actors.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill }
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+
+object Test {
+ val finishedLWCR, finishedTNR, finishedEH = Promise[Boolean]
+ val finishedLWCR1, finishedTNR1, finishedEH1 = Promise[Boolean]
+
+ def testLoopWithConditionReact() = {
+ // Snippet showing composition of receives
+ // Loop with Condition Snippet - before
+ val myActor = actor {
+ var c = true
+ loopWhile(c) {
+ react {
+ case x: Int =>
+ // do task
+ println("do task")
+ if (x == 42) {
+ c = false
+ finishedLWCR1.success(true)
+ }
+ }
+ }
+ }
+
+ myActor.start()
+ myActor ! 1
+ myActor ! 42
+
+ Await.ready(finishedLWCR1.future, 5 seconds)
+
+ // Loop with Condition Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task")
+ if (x == 42) {
+ finishedLWCR.success(true)
+ context.stop(self)
+ }
+ }
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+ myAkkaActor ! 42
+ }
+
+ def testNestedReact() = {
+ // Snippet showing composition of receives
+ // Loop with Condition Snippet - before
+ val myActor = actor {
+ var c = true
+ loopWhile(c) {
+ react {
+ case x: Int =>
+ // do task
+ println("do task " + x)
+ if (x == 42) {
+ c = false
+ finishedTNR1.success(true)
+ } else
+ react {
+ case y: String =>
+ println("do string " + y)
+ }
+ println("after react")
+ }
+ }
+ }
+ myActor.start()
+
+ myActor ! 1
+ myActor ! "I am a String"
+ myActor ! 42
+
+ Await.ready(finishedTNR1.future, 5 seconds)
+
+ // Loop with Condition Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task " + x)
+ if (x == 42) {
+ finishedTNR.success(true)
+ context.stop(self)
+ } else
+ context.become(({
+ case y: String =>
+ println("do string " + y)
+ }: Receive).andThen(x => {
+ unstashAll()
+ context.unbecome()
+ }).orElse { case x => stash() })
+ }
+ }, "default-stashing-dispatcher"))
+
+ myAkkaActor ! 1
+ myAkkaActor ! "I am a String"
+ myAkkaActor ! 42
+
+ }
+
+ def exceptionHandling() = {
+ // Stashing actor with act and exception handler
+ val myActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { case _ => println("Dummy method.") }
+ override def act() = {
+ loop {
+ react {
+ case "fail" =>
+ throw new Exception("failed")
+ case "work" =>
+ println("working")
+ case "die" =>
+ finishedEH1.success(true)
+ exit()
+ }
+ }
+ }
+
+ override def exceptionHandler = {
+ case x: Exception => println("scala got exception")
+ }
+
+ }, "default-stashing-dispatcher"))
+
+ myActor ! "work"
+ myActor ! "fail"
+ myActor ! "die"
+
+ Await.ready(finishedEH1.future, 5 seconds)
+ // Stashing actor in Akka style
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ def receive = PFCatch({
+ case "fail" =>
+ throw new Exception("failed")
+ case "work" =>
+ println("working")
+ case "die" =>
+ finishedEH.success(true)
+ context.stop(self)
+ }, { case x: Exception => println("akka got exception") })
+ }, "default-stashing-dispatcher"))
+
+ myAkkaActor ! "work"
+ myAkkaActor ! "fail"
+ myAkkaActor ! "die"
+ }
+
+ def main(args: Array[String]) = {
+ testLoopWithConditionReact()
+ Await.ready(finishedLWCR.future, 5 seconds)
+ exceptionHandling()
+ Await.ready(finishedEH.future, 5 seconds)
+ testNestedReact()
+ Await.ready(finishedTNR.future, 5 seconds)
+ }
+
+}
+
+// As per Jim Mcbeath's blog (http://jim-mcbeath.blogspot.com/2008/07/actor-exceptions.html)
+class PFCatch(f: PartialFunction[Any, Unit], handler: PartialFunction[Exception, Unit])
+ extends PartialFunction[Any, Unit] {
+
+ def apply(x: Any) = {
+ try {
+ f(x)
+ } catch {
+ case e: Exception if handler.isDefinedAt(e) => handler(e)
+ }
+ }
+
+ def isDefinedAt(x: Any) = f.isDefinedAt(x)
+}
+
+object PFCatch {
+ def apply(f: PartialFunction[Any, Unit], handler: PartialFunction[Exception, Unit]) = new PFCatch(f, handler)
+}
diff --git a/test/files/jvm/actmig-public-methods.check b/test/files/jvm/actmig-public-methods.check
new file mode 100644
index 0000000000..bb6530c926
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods.check
@@ -0,0 +1,6 @@
+None
+Some(bang qmark after 1)
+bang
+bang qmark after 0
+bang qmark in future after 0
+typed bang qmark in future after 0
diff --git a/test/files/jvm/actmig-public-methods_1.check b/test/files/jvm/actmig-public-methods_1.check
new file mode 100644
index 0000000000..bb6530c926
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods_1.check
@@ -0,0 +1,6 @@
+None
+Some(bang qmark after 1)
+bang
+bang qmark after 0
+bang qmark in future after 0
+typed bang qmark in future after 0
diff --git a/test/files/jvm/actmig-public-methods_1.scala b/test/files/jvm/actmig-public-methods_1.scala
new file mode 100644
index 0000000000..7e5bc24210
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods_1.scala
@@ -0,0 +1,88 @@
+import scala.collection.mutable.ArrayBuffer
+import scala.actors.Actor._
+import scala.actors._
+import scala.util._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.concurrent.util.Duration
+import scala.actors.pattern._
+
+object Test {
+ val NUMBER_OF_TESTS = 6
+
+ // used for sorting non-deterministic output
+ val buff = ArrayBuffer[String]()
+ val latch = new CountDownLatch(NUMBER_OF_TESTS)
+ val toStop = ArrayBuffer[ActorRef]()
+
+ def append(v: String) = synchronized {
+ buff += v
+ }
+
+ def main(args: Array[String]) = {
+
+ val respActor = MigrationSystem.actorOf(Props(() => actor {
+ loop {
+ react {
+ case (x: String, time: Long) =>
+ Thread.sleep(time)
+ reply(x + " after " + time)
+ case str: String =>
+ append(str)
+ latch.countDown()
+ case x =>
+ exit()
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+
+ toStop += respActor
+
+ respActor ! "bang"
+
+ implicit val timeout = Timeout(Duration(500, TimeUnit.MILLISECONDS))
+ val msg = ("bang qmark", 0L)
+ val res1 = respActor.?(msg)(Timeout(Duration.Inf))
+ append(res1().toString)
+ latch.countDown()
+
+ val msg1 = ("bang qmark", 1L)
+ val res2 = respActor.?(msg1)(Timeout(Duration(500, TimeUnit.MILLISECONDS)))
+ append((res2() match {
+ case x: AskTimeoutException => None
+ case v => Some(v)
+ }).toString)
+ latch.countDown()
+
+ // this one should time out
+ val msg11 = ("bang qmark", 500L)
+ val res21 = respActor.?(msg11)(Timeout(Duration(1, TimeUnit.MILLISECONDS)))
+ append((res21() match {
+ case x: AskTimeoutException => None
+ case v => Some(v)
+ }).toString)
+ latch.countDown()
+
+ val msg2 = ("bang qmark in future", 0L)
+ val fut1 = respActor.?(msg2)(Duration.Inf)
+ append(fut1().toString())
+ latch.countDown()
+
+ val handler: PartialFunction[Any, String] = {
+ case x: String => x.toString
+ }
+
+ val msg3 = ("typed bang qmark in future", 0L)
+ val fut2 = (respActor.?(msg3)(Duration.Inf))
+ append(Futures.future { handler.apply(fut2()) }().toString)
+ latch.countDown()
+
+ // output
+ latch.await(200, TimeUnit.MILLISECONDS)
+ if (latch.getCount() > 0) {
+ println("Error: Tasks have not finished!!!")
+ }
+
+ buff.sorted.foreach(println)
+ toStop.foreach(_ ! PoisonPill)
+ }
+}
diff --git a/test/files/jvm/actmig-react-receive.check b/test/files/jvm/actmig-react-receive.check
new file mode 100644
index 0000000000..cc2a426e68
--- /dev/null
+++ b/test/files/jvm/actmig-react-receive.check
@@ -0,0 +1,16 @@
+do before
+do task
+do after
+do before
+do task
+do after
+do before
+do task
+do in between
+do string
+do after
+do before
+do task
+do in between
+do string
+do after
diff --git a/test/files/jvm/actmig-react-receive.scala b/test/files/jvm/actmig-react-receive.scala
new file mode 100644
index 0000000000..8464a2af79
--- /dev/null
+++ b/test/files/jvm/actmig-react-receive.scala
@@ -0,0 +1,111 @@
+import scala.actors.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill }
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+object Test {
+ val finishedRS, finishedRS1, finishedRSC, finishedRSC1 = Promise[Boolean]
+ def testComposition() = {
+ // Snippet showing composition of receives
+ // React Snippet - before
+ val myActor = actor {
+ // do before
+ println("do before")
+ receive {
+ case x: Int =>
+ // do task
+ println("do task")
+ }
+ println("do in between")
+ receive {
+ case x: String =>
+ // do string now
+ println("do string")
+ }
+ println("do after")
+ finishedRSC1.success(true)
+ }
+ myActor.start()
+ myActor ! 1
+ myActor ! "1"
+ Await.ready(finishedRSC1.future, 5 seconds)
+
+ // React Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ override def preStart() = {
+ println("do before")
+ }
+
+ def receive = ({
+ case x: Int =>
+ // do task
+ println("do task")
+ }: Receive) andThen { v =>
+ context.become {
+ case x: String =>
+ //do string
+ println("do string")
+ context.stop(self)
+ }
+ println("do in between")
+ }
+
+ override def postStop() = {
+ println("do after")
+ finishedRSC.success(true)
+ }
+
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+ myAkkaActor ! "1"
+ Await.ready(finishedRSC.future, 5 seconds)
+ }
+
+ def main(args: Array[String]) = {
+ // React Snippet - before
+ val myActor = actor {
+ // do before
+ println("do before")
+ receive {
+ case x: Int =>
+ // do task
+ println("do task")
+ }
+ println("do after")
+ finishedRS1.success(true)
+ }
+ myActor.start()
+ myActor ! 1
+
+ Await.ready(finishedRS1.future, 5 seconds)
+
+ // React Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ override def preStart() = {
+ println("do before")
+ }
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task")
+ context.stop(self)
+ }
+
+ override def postStop() = {
+ println("do after")
+ finishedRS.success(true)
+ }
+
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+
+ Await.ready(finishedRS.future, 5 seconds)
+ // Starting composition test
+ testComposition()
+
+ }
+}
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
index 2be7479415..e6ed543d73 100644
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ b/test/files/lib/scalacheck.jar.desired.sha1
@@ -1 +1 @@
-f8cd51e0f78e30b3ac444b741b0b2249ac8248bb ?scalacheck.jar
+b6f4dbb29f0c2ec1eba682414f60d52fea84f703 ?scalacheck.jar
diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check
new file mode 100644
index 0000000000..ab3d28777d
--- /dev/null
+++ b/test/files/neg/catch-all.check
@@ -0,0 +1,10 @@
+catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
+ try { "warn" } catch { case _ => }
+ ^
+catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+ try { "warn" } catch { case x => }
+ ^
+catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+ try { "warn" } catch { case _: RuntimeException => ; case x => }
+ ^
+three errors found
diff --git a/test/files/neg/catch-all.flags b/test/files/neg/catch-all.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/catch-all.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/catch-all.scala b/test/files/neg/catch-all.scala
new file mode 100644
index 0000000000..35a6d7af91
--- /dev/null
+++ b/test/files/neg/catch-all.scala
@@ -0,0 +1,19 @@
+object CatchAll {
+ try { "warn" } catch { case _ => }
+
+ try { "warn" } catch { case x => }
+
+ try { "warn" } catch { case _: RuntimeException => ; case x => }
+
+ try { "okay" } catch { case _: Throwable => }
+
+ try { "okay" } catch { case _: Exception => }
+
+ try { "okay" } catch { case okay: Throwable => }
+
+ try { "okay" } catch { case okay: Exception => }
+
+ try { "okay" } catch { case _ if "".isEmpty => }
+
+ "okay" match { case _ => "" }
+}
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.check b/test/files/neg/macro-invalidret-nonuniversetree.check
index 4fc06b5ceb..1b9487982f 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree.check
+++ b/test/files/neg/macro-invalidret-nonuniversetree.check
@@ -1,7 +1,7 @@
Macros_Test_2.scala:2: error: macro implementation has wrong shape:
required: (c: scala.reflect.makro.Context): c.Expr[Any]
- found : (c: scala.reflect.makro.Context): reflect.mirror.Literal
-type mismatch for return type: reflect.mirror.Literal does not conform to c.Expr[Any]
+ found : (c: scala.reflect.makro.Context): reflect.basis.Literal
+type mismatch for return type: reflect.basis.Literal does not conform to c.Expr[Any]
def foo = macro Impls.foo
^
one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
index 86b7c8d8d0..da0eb0ac83 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
+++ b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
@@ -1,5 +1,5 @@
import scala.reflect.makro.{Context => Ctx}
object Impls {
- def foo(c: Ctx) = scala.reflect.mirror.Literal(scala.reflect.mirror.Constant(42))
+ def foo(c: Ctx) = scala.reflect.basis.Literal(scala.reflect.basis.Constant(42))
}
diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check
new file mode 100644
index 0000000000..714816fd62
--- /dev/null
+++ b/test/files/neg/t2442.check
@@ -0,0 +1,9 @@
+t2442.scala:4: error: match may not be exhaustive.
+It would fail on the following input: THREE
+ def f(e: MyEnum) = e match {
+ ^
+t2442.scala:11: error: match may not be exhaustive.
+It would fail on the following input: BLUE
+ def g(e: MySecondEnum) = e match {
+ ^
+two errors found
diff --git a/test/files/neg/t2442.flags b/test/files/neg/t2442.flags
new file mode 100644
index 0000000000..32cf036c3d
--- /dev/null
+++ b/test/files/neg/t2442.flags
@@ -0,0 +1 @@
+-Xexperimental -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2442/MyEnum.java b/test/files/neg/t2442/MyEnum.java
new file mode 100644
index 0000000000..3ffbbb31b8
--- /dev/null
+++ b/test/files/neg/t2442/MyEnum.java
@@ -0,0 +1,3 @@
+public enum MyEnum {
+ ONE, TWO, THREE;
+} \ No newline at end of file
diff --git a/test/files/neg/t2442/MySecondEnum.java b/test/files/neg/t2442/MySecondEnum.java
new file mode 100644
index 0000000000..0f841286de
--- /dev/null
+++ b/test/files/neg/t2442/MySecondEnum.java
@@ -0,0 +1,6 @@
+public enum MySecondEnum {
+ RED(1), BLUE(2) { public void foo() {} };
+ MySecondEnum(int i) {}
+
+ public void foo() {}
+} \ No newline at end of file
diff --git a/test/files/neg/t2442/t2442.scala b/test/files/neg/t2442/t2442.scala
new file mode 100644
index 0000000000..b0a0f3cd41
--- /dev/null
+++ b/test/files/neg/t2442/t2442.scala
@@ -0,0 +1,15 @@
+class Test {
+ import MyEnum._
+
+ def f(e: MyEnum) = e match {
+ case ONE => println("one")
+ case TWO => println("two")
+ // missing case --> exhaustivity warning!
+ }
+
+ import MySecondEnum._
+ def g(e: MySecondEnum) = e match {
+ case RED => println("red")
+ // missing case --> exhaustivity warning!
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t4270.check b/test/files/neg/t4270.check
new file mode 100644
index 0000000000..cfe0a93e00
--- /dev/null
+++ b/test/files/neg/t4270.check
@@ -0,0 +1,4 @@
+t4270.scala:5: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one error found
diff --git a/test/files/neg/t4270.scala b/test/files/neg/t4270.scala
new file mode 100644
index 0000000000..2c7c71d8c2
--- /dev/null
+++ b/test/files/neg/t4270.scala
@@ -0,0 +1,6 @@
+object Test1 {
+ object A { implicit val x: Int = 1 }
+ import A.x
+ def x: Int = 0
+ implicitly[Int]
+}
diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check
new file mode 100644
index 0000000000..7bd8ff78f9
--- /dev/null
+++ b/test/files/neg/t4541.check
@@ -0,0 +1,7 @@
+t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int]
+ Access to protected method data not permitted because
+ prefix type Sparse[Int] does not conform to
+ class Sparse$mcI$sp where the access take place
+ that.data
+ ^
+one error found
diff --git a/test/files/neg/t4541.scala b/test/files/neg/t4541.scala
new file mode 100644
index 0000000000..744af1c288
--- /dev/null
+++ b/test/files/neg/t4541.scala
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+@SerialVersionUID(1L)
+final class Sparse[@specialized(Int) T](d: Array[T]) extends Serializable {
+ protected var data: Array[T] = d
+ def set(that: Sparse[T]) = {
+ that.data
+ }
+}
+
+
+
diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check
new file mode 100644
index 0000000000..8a52fd97f4
--- /dev/null
+++ b/test/files/neg/t4541b.check
@@ -0,0 +1,7 @@
+t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int]
+ Access to protected method data not permitted because
+ prefix type SparseArray[Int] does not conform to
+ class SparseArray$mcI$sp where the access take place
+ use(that.data.clone)
+ ^
+one error found
diff --git a/test/files/neg/t4541b.scala b/test/files/neg/t4541b.scala
new file mode 100644
index 0000000000..7a21ffc156
--- /dev/null
+++ b/test/files/neg/t4541b.scala
@@ -0,0 +1,16 @@
+
+
+
+
+
+@SerialVersionUID(1L)
+final class SparseArray[@specialized(Int) T](private var data: Array[T]) extends Serializable {
+ def use(inData: Array[T]) = {
+ data = inData;
+ }
+
+ def set(that: SparseArray[T]) = {
+ use(that.data.clone)
+ }
+}
+
diff --git a/test/files/neg/t4842.check b/test/files/neg/t4842.check
new file mode 100644
index 0000000000..b53bbdbd15
--- /dev/null
+++ b/test/files/neg/t4842.check
@@ -0,0 +1,7 @@
+t4842.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+ ^
+t4842.scala:6: error: self constructor arguments cannot reference unconstructed `this`
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+ ^
+two errors found
diff --git a/test/files/neg/t4842.scala b/test/files/neg/t4842.scala
new file mode 100644
index 0000000000..c6244efda7
--- /dev/null
+++ b/test/files/neg/t4842.scala
@@ -0,0 +1,7 @@
+class Foo (x: AnyRef) {
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+}
+
+class TypeArg[X](val x: X)(a: AnyRef) {
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+}
diff --git a/test/files/neg/t4989.check b/test/files/neg/t4989.check
new file mode 100644
index 0000000000..814507fc3f
--- /dev/null
+++ b/test/files/neg/t4989.check
@@ -0,0 +1,7 @@
+t4989.scala:14: error: method print in class A cannot be directly accessed from class C because class B redeclares it as abstract
+ override def print(): String = super.print() // should be an error
+ ^
+t4989.scala:18: error: method print in class A cannot be directly accessed from trait T because class B redeclares it as abstract
+ override def print(): String = super.print() // should be an error
+ ^
+two errors found
diff --git a/test/files/neg/t4989.scala b/test/files/neg/t4989.scala
new file mode 100644
index 0000000000..e7ff80ed74
--- /dev/null
+++ b/test/files/neg/t4989.scala
@@ -0,0 +1,68 @@
+abstract class A0 {
+ def print(): String
+}
+
+class A extends A0 {
+ def print(): String = "A"
+}
+
+abstract class B extends A {
+ def print() : String
+}
+
+class C extends B {
+ override def print(): String = super.print() // should be an error
+}
+
+trait T extends B {
+ override def print(): String = super.print() // should be an error
+}
+
+class D extends A {
+ override def print(): String = super.print() // okay
+}
+
+
+// it's okay do this when trait are in the mix, as the
+// suitable super accessor methods are used.
+object ConcreteMethodAndIntermediaryAreTraits {
+ trait T1 {
+ def print(): String = ""
+ }
+
+ trait T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ def print(): String = super.print() // okay
+ }
+}
+
+object IntermediaryIsTrait {
+ class T1 {
+ def print(): String = ""
+ }
+
+ trait T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ override def print(): String = super.print() // okay
+ }
+}
+
+object ConcreteMethodIsTrait {
+ trait T1 {
+ def print(): String = ""
+ }
+
+ abstract class T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ override def print(): String = super.print() // okay
+ }
+}
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
new file mode 100644
index 0000000000..96eb1fd364
--- /dev/null
+++ b/test/files/neg/t5148.check
@@ -0,0 +1,2 @@
+error: bad reference while unpickling Imports.class: term memberHandlers not found in scala.tools.nsc.interpreter.IMain
+one error found
diff --git a/test/files/neg/t5148.scala b/test/files/neg/t5148.scala
new file mode 100644
index 0000000000..fca64e57df
--- /dev/null
+++ b/test/files/neg/t5148.scala
@@ -0,0 +1,4 @@
+package scala.tools.nsc
+package interpreter
+
+class IMain extends Imports
diff --git a/test/files/neg/t5334_1.check b/test/files/neg/t5334_1.check
deleted file mode 100644
index eca854964a..0000000000
--- a/test/files/neg/t5334_1.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5334_1.scala:5: error: implementation restriction: cannot reify block of type C that involves a type declared inside the block being reified. consider casting the return value to a suitable type
- reify {
- ^
-one error found
diff --git a/test/files/neg/t5334_2.check b/test/files/neg/t5334_2.check
deleted file mode 100644
index e21f0d5967..0000000000
--- a/test/files/neg/t5334_2.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5334_2.scala:5: error: implementation restriction: cannot reify block of type List[(C, C)] that involves a type declared inside the block being reified. consider casting the return value to a suitable type
- reify {
- ^
-one error found
diff --git a/test/files/neg/t5376.check b/test/files/neg/t5376.check
new file mode 100644
index 0000000000..0376163c35
--- /dev/null
+++ b/test/files/neg/t5376.check
@@ -0,0 +1,11 @@
+t5376.scala:12: error: type mismatch;
+ found : String("a")
+ required: Int
+ "a": Int
+ ^
+t5376.scala:22: error: type mismatch;
+ found : String("a")
+ required: Int
+ "a": Int
+ ^
+two errors found
diff --git a/test/files/neg/t5376.scala b/test/files/neg/t5376.scala
new file mode 100644
index 0000000000..8da3868566
--- /dev/null
+++ b/test/files/neg/t5376.scala
@@ -0,0 +1,24 @@
+object Test {
+ object O1 { implicit def f(s: String): Int = 1 }
+ object O2 { implicit def f(s: String): Int = 2 }
+ object O3 { def f(s: String): Int = 3 }
+
+ // Import two implicits with the same name in the same scope.
+ def m1 = {
+ import O1._
+ import O2._
+
+ // Implicit usage compiles.
+ "a": Int
+ }
+
+ // Import one implict and one non-implicit method with the
+ // same name in the same scope.
+ def m2 = {
+ import O1._
+ import O3._
+
+ // Implicit usage compiles.
+ "a": Int
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check
index 1b89c59587..4350696bc8 100644
--- a/test/files/neg/t5429.check
+++ b/test/files/neg/t5429.check
@@ -46,14 +46,18 @@ t5429.scala:38: error: overriding method emptyArg in class A of type ()Int;
object emptyArg has incompatible type
override object emptyArg // fail
^
-t5429.scala:39: error: object oneArg overrides nothing
+t5429.scala:39: error: object oneArg overrides nothing.
+Note: the super classes of class C contain the following, non final members named oneArg:
+def oneArg(x: String): Int
override object oneArg // fail
^
t5429.scala:43: error: overriding lazy value lazyvalue in class A0 of type Any;
object lazyvalue must be declared lazy to override a concrete lazy value
override object lazyvalue // !!! this fails, but should succeed (lazy over lazy)
^
-t5429.scala:46: error: object oneArg overrides nothing
+t5429.scala:46: error: object oneArg overrides nothing.
+Note: the super classes of class C0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override object oneArg // fail
^
t5429.scala:50: error: overriding value value in class A of type Int;
@@ -76,7 +80,9 @@ t5429.scala:58: error: overriding lazy value lazyvalue in class A0 of type Any;
value lazyvalue must be declared lazy to override a concrete lazy value
override val lazyvalue = 0 // fail (non-lazy)
^
-t5429.scala:61: error: value oneArg overrides nothing
+t5429.scala:61: error: value oneArg overrides nothing.
+Note: the super classes of class D0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override val oneArg = 15 // fail
^
t5429.scala:65: error: overriding value value in class A of type Int;
@@ -103,7 +109,9 @@ t5429.scala:73: error: overriding lazy value lazyvalue in class A0 of type Any;
method lazyvalue needs to be a stable, immutable value
override def lazyvalue = 2 // fail
^
-t5429.scala:76: error: method oneArg overrides nothing
+t5429.scala:76: error: method oneArg overrides nothing.
+Note: the super classes of class E0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override def oneArg = 15 // fail
^
t5429.scala:80: error: overriding value value in class A of type Int;
@@ -126,7 +134,9 @@ t5429.scala:87: error: overriding value value in class A0 of type Any;
lazy value value cannot override a concrete non-lazy value
override lazy val value = 0 // fail (strict over lazy)
^
-t5429.scala:91: error: value oneArg overrides nothing
+t5429.scala:91: error: value oneArg overrides nothing.
+Note: the super classes of class F0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override lazy val oneArg = 15 // fail
^
34 errors found
diff --git a/test/files/neg/t5617.check b/test/files/neg/t5617.check
new file mode 100644
index 0000000000..79cc3a1e32
--- /dev/null
+++ b/test/files/neg/t5617.check
@@ -0,0 +1,8 @@
+t5617.scala:12: error: method foo overrides nothing.
+Note: the super classes of trait C contain the following, non final members named foo:
+def foo(u: Unit): Int
+def foo(x: Boolean): Int
+def foo(i: Int)(b: String): Int
+ override def foo(s: String): Int
+ ^
+one error found
diff --git a/test/files/neg/t5617.scala b/test/files/neg/t5617.scala
new file mode 100644
index 0000000000..41541b5e90
--- /dev/null
+++ b/test/files/neg/t5617.scala
@@ -0,0 +1,14 @@
+trait A {
+ def foo(i: Int)(b: String): Int
+ def foo(u: Unit): Int // not reported
+ def foo(x: Float): Int // not reported
+}
+trait B[X] extends A {
+ def foo(x: X): Int
+ def foo(u: Unit): Int
+ final def foo(x: Float): Int = 0 // not reported
+}
+trait C extends B[Boolean] {
+ override def foo(s: String): Int
+ val foo = 0 // not reported
+}
diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check
new file mode 100644
index 0000000000..89d766fe34
--- /dev/null
+++ b/test/files/neg/t5761.check
@@ -0,0 +1,16 @@
+t5761.scala:4: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:8: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]()) // no crash
+ ^
+t5761.scala:9: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:13: error: not found: type Tread
+ new Tread("sth") { }.run()
+ ^
+four errors found
diff --git a/test/files/neg/t5761.scala b/test/files/neg/t5761.scala
new file mode 100644
index 0000000000..040c4eb75a
--- /dev/null
+++ b/test/files/neg/t5761.scala
@@ -0,0 +1,16 @@
+class D[-A](x: A) { }
+
+object Test1 {
+ println(new D[Int]{}) // crash
+}
+
+object Test2 {
+ println(new D[Int]()) // no crash
+ println(new D[Int]{}) // crash
+}
+
+object Test3 {
+ new Tread("sth") { }.run()
+}
+
+
diff --git a/test/files/neg/t5839.check b/test/files/neg/t5839.check
new file mode 100644
index 0000000000..d4b125bd1e
--- /dev/null
+++ b/test/files/neg/t5839.check
@@ -0,0 +1,6 @@
+t5839.scala:5: error: type mismatch;
+ found : (x: String => String)Int <and> (x: Int)Int
+ required: Int => String
+ val x: String = goo(foo _)
+ ^
+one error found
diff --git a/test/files/neg/t5839.scala b/test/files/neg/t5839.scala
new file mode 100644
index 0000000000..d3a5d4b25f
--- /dev/null
+++ b/test/files/neg/t5839.scala
@@ -0,0 +1,7 @@
+object Test {
+ def goo[T](x: Int => T): T = x(1)
+ implicit def f(x: Int): String = ""
+ def foo(x: Int): Int = x + 1
+ val x: String = goo(foo _)
+ def foo(x: String => String) = 1
+}
diff --git a/test/files/pos/rangepos-anonapply.flags b/test/files/pos/rangepos-anonapply.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/rangepos-anonapply.scala b/test/files/pos/rangepos-anonapply.scala
new file mode 100644
index 0000000000..2f3e4ad6cd
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.scala
@@ -0,0 +1,9 @@
+class Test {
+ trait PropTraverser {
+ def apply(x: Int): Unit = {}
+ }
+
+ def gather(x: Int) {
+ (new PropTraverser {})(x)
+ }
+}
diff --git a/test/files/pos/spec-params-new.scala b/test/files/pos/spec-params-new.scala
index 661e686f0e..959ce1591c 100644
--- a/test/files/pos/spec-params-new.scala
+++ b/test/files/pos/spec-params-new.scala
@@ -31,4 +31,4 @@ class Foo[@specialized A: ClassTag] {
val xs = new Array[A](1)
xs(0) = x
}
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t4842.scala b/test/files/pos/t4842.scala
new file mode 100644
index 0000000000..17ff684833
--- /dev/null
+++ b/test/files/pos/t4842.scala
@@ -0,0 +1,26 @@
+class Foo (x: AnyRef) {
+ def this() = {
+ this(new { } ) // okay
+ }
+}
+
+
+class Blerg (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
+}
+
+
+class Outer {
+ class Inner (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
+
+ def this(x: Boolean) = {
+ this(new { println(Outer.this) } ) // okay
+ }
+ }
+}
+
diff --git a/test/files/pos/t5899.flags b/test/files/pos/t5899.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5899.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5899.scala b/test/files/pos/t5899.scala
new file mode 100644
index 0000000000..b16f1f84fe
--- /dev/null
+++ b/test/files/pos/t5899.scala
@@ -0,0 +1,19 @@
+import scala.tools.nsc._
+
+trait Foo {
+ val global: Global
+ import global.{Name, Symbol, nme}
+
+ case class Bippy(name: Name)
+
+ def f(x: Bippy, sym: Symbol): Int = {
+ // no warning (!) for
+ // val Stable = sym.name.toTermName
+
+ val Stable = sym.name
+ Bippy(Stable) match {
+ case Bippy(nme.WILDCARD) => 1
+ case Bippy(Stable) => 2 // should not be considered unreachable
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t5910.java b/test/files/pos/t5910.java
new file mode 100644
index 0000000000..e007a1fbb5
--- /dev/null
+++ b/test/files/pos/t5910.java
@@ -0,0 +1,2 @@
+class Foo {
+};;;;;;; \ No newline at end of file
diff --git a/test/files/pos/t5953.scala b/test/files/pos/t5953.scala
new file mode 100644
index 0000000000..90e7d84646
--- /dev/null
+++ b/test/files/pos/t5953.scala
@@ -0,0 +1,16 @@
+import scala.collection.{ mutable, immutable, generic, GenTraversableOnce }
+
+package object foo {
+ @inline implicit class TravOps[A, CC[A] <: GenTraversableOnce[A]](val coll: CC[A]) extends AnyVal {
+ def build[CC2[X]](implicit cbf: generic.CanBuildFrom[Nothing, A, CC2[A]]): CC2[A] = {
+ cbf() ++= coll.toIterator result
+ }
+ }
+}
+
+package foo {
+ object Test {
+ def f1[T](xs: Traversable[T]) = xs.convertTo[immutable.Vector]
+ def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector]
+ }
+}
diff --git a/test/files/pos/t5968.flags b/test/files/pos/t5968.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5968.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5968.scala b/test/files/pos/t5968.scala
new file mode 100644
index 0000000000..0093f84fc0
--- /dev/null
+++ b/test/files/pos/t5968.scala
@@ -0,0 +1,8 @@
+object X {
+ def f(e: Either[Int, X.type]) = e match {
+ case Left(i) => i
+ case Right(X) => 0
+ // SI-5986 spurious exhaustivity warning here
+ }
+}
+
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index e813ce119b..9d4674d7c7 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -3,7 +3,7 @@ reload: CrashOnLoad.scala
askTypeCompletion at CrashOnLoad.scala(6,12)
================================================================================
[response] aksTypeCompletion at (6,12)
-retrieved 124 members
+retrieved 126 members
[accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator`
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
@@ -25,6 +25,7 @@ retrieved 124 members
[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
[accessible: true] `method contains(elem: Any)Boolean`
+[accessible: true] `method convertTo[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
@@ -109,6 +110,7 @@ retrieved 124 members
[accessible: true] `method toStream=> scala.collection.immutable.Stream[B]`
[accessible: true] `method toString()String`
[accessible: true] `method toTraversable=> Traversable[B]`
+[accessible: true] `method toVector=> Vector[B]`
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check
new file mode 100644
index 0000000000..5e43d25f7e
--- /dev/null
+++ b/test/files/run/collection-conversions.check
@@ -0,0 +1,126 @@
+-- Testing iterator ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing Vector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing List ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing Buffer ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing ParVector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing ParArray ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing Set ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing SetView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing BufferView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK \ No newline at end of file
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
new file mode 100644
index 0000000000..6d90caee4b
--- /dev/null
+++ b/test/files/run/collection-conversions.scala
@@ -0,0 +1,64 @@
+import collection._
+import mutable.Buffer
+import parallel.immutable.ParVector
+import parallel.mutable.ParArray
+import reflect.ClassTag
+
+object Test {
+
+ def printResult[A,B](msg: String, obj: A, expected: B)(implicit tag: ClassTag[A], tag2: ClassTag[B]) = {
+ print(" :" + msg +": ")
+ val isArray = obj match {
+ case x: Array[Int] => true
+ case _ => false
+ }
+ val expectedEquals =
+ if(isArray) obj.asInstanceOf[Array[Int]].toSeq == expected.asInstanceOf[Array[Int]].toSeq
+ else obj == expected
+ val tagEquals = tag == tag2
+ if(expectedEquals && tagEquals) print("OK")
+ else print("FAILED")
+ if(!expectedEquals) print(", " + obj + " != " + expected)
+ if(!tagEquals) print(", " + tag + " != " + tag2)
+ println("")
+ }
+
+ val testVector = Vector(1,2,3)
+ val testBuffer = Buffer(1,2,3)
+ val testGenSeq = GenSeq(1,2,3)
+ val testSeq = Seq(1,2,3)
+ val testStream = Stream(1,2,3)
+ val testArray = Array(1,2,3)
+ val testParVector = ParVector(1,2,3)
+ val testParArray = ParArray(1,2,3)
+
+ def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = {
+ val tmp = col
+ println("-- Testing " + name + " ---")
+ printResult("[Direct] Vector ", col.toVector, testVector)
+ printResult("[Copy] Vector ", col.convertTo[Vector], testVector)
+ printResult("[Direct] Buffer ", col.toBuffer, testBuffer)
+ printResult("[Copy] Buffer ", col.convertTo[Buffer], testBuffer)
+ printResult("[Direct] GenSeq ", col.toSeq, testGenSeq)
+ printResult("[Copy] GenSeq ", col.convertTo[GenSeq], testGenSeq)
+ printResult("[Copy] Seq ", col.convertTo[Seq], testSeq)
+ printResult("[Direct] Stream ", col.toStream, testStream)
+ printResult("[Copy] Stream ", col.convertTo[Stream], testStream)
+ printResult("[Direct] Array ", col.toArray, testArray)
+ printResult("[Copy] Array ", col.convertTo[Array], testArray)
+ printResult("[Copy] ParVector", col.convertTo[ParVector], testParVector)
+ printResult("[Copy] ParArray ", col.convertTo[ParArray], testParArray)
+ }
+
+ def main(args: Array[String]): Unit = {
+ testConversion("iterator", (1 to 3).iterator)
+ testConversion("Vector", Vector(1,2,3))
+ testConversion("List", List(1,2,3))
+ testConversion("Buffer", Buffer(1,2,3))
+ testConversion("ParVector", ParVector(1,2,3))
+ testConversion("ParArray", ParArray(1,2,3))
+ testConversion("Set", Set(1,2,3))
+ testConversion("SetView", Set(1,2,3).view)
+ testConversion("BufferView", Buffer(1,2,3).view)
+ }
+}
diff --git a/test/files/run/macro-reify-splice-splice/Macros_1.scala b/test/files/run/macro-reify-splice-splice/Macros_1.scala
index 030a0a217e..4f1b600f63 100644
--- a/test/files/run/macro-reify-splice-splice/Macros_1.scala
+++ b/test/files/run/macro-reify-splice-splice/Macros_1.scala
@@ -1,5 +1,4 @@
import scala.reflect.makro.{Context => Ctx}
-import scala.reflect.{mirror => mr}
object Macros {
def foo = macro Impls.foo
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
new file mode 100644
index 0000000000..83055f2b70
--- /dev/null
+++ b/test/files/run/showraw_mods.check
@@ -0,0 +1 @@
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(java.lang.Object)), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
diff --git a/test/files/run/showraw_mods.scala b/test/files/run/showraw_mods.scala
new file mode 100644
index 0000000000..a10e4821dc
--- /dev/null
+++ b/test/files/run/showraw_mods.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree = reify{trait C { private[this] val x = 2; var y = x; lazy val z = y }}
+ println(showRaw(tree.tree))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
new file mode 100644
index 0000000000..82724cae44
--- /dev/null
+++ b/test/files/run/showraw_tree.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree.scala b/test/files/run/showraw_tree.scala
new file mode 100644
index 0000000000..3624a24d6a
--- /dev/null
+++ b/test/files/run/showraw_tree.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree))
+ println(showRaw(tree2.tree))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
new file mode 100644
index 0000000000..c6dbd6f1ce
--- /dev/null
+++ b/test/files/run/showraw_tree_ids.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#1903), List(Ident(java.lang.String#129), Ident(java.lang.String#129)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#1908), List(Ident(java.lang.String#129), Ident(java.lang.String#129)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.scala b/test/files/run/showraw_tree_ids.scala
new file mode 100644
index 0000000000..b56b8b4476
--- /dev/null
+++ b/test/files/run/showraw_tree_ids.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printIds = true))
+ println(showRaw(tree2.tree, printIds = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
new file mode 100644
index 0000000000..a12e21c611
--- /dev/null
+++ b/test/files/run/showraw_tree_kinds.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Ident(java.lang.String#CLS), Ident(java.lang.String#CLS)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Ident(java.lang.String#CLS), Ident(java.lang.String#CLS)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_kinds.scala b/test/files/run/showraw_tree_kinds.scala
new file mode 100644
index 0000000000..0ca5a387da
--- /dev/null
+++ b/test/files/run/showraw_tree_kinds.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printKinds = true))
+ println(showRaw(tree2.tree, printKinds = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
new file mode 100644
index 0000000000..02e7aeed7c
--- /dev/null
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -0,0 +1,10 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#1903), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)))))), nme.CONSTRUCTOR#1913), List())
+[1] TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List())
+[4] TypeRef(ThisType(java.lang#128), java.lang.String#129, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#1908), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)))))), nme.CONSTRUCTOR#2231), List())
+[4] TypeRef(ThisType(java.lang#128), java.lang.String#129, List())
+[5] TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List())
diff --git a/test/files/run/showraw_tree_types_ids.scala b/test/files/run/showraw_tree_types_ids.scala
new file mode 100644
index 0000000000..cb2c2bfb0f
--- /dev/null
+++ b/test/files/run/showraw_tree_types_ids.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printIds = true, printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printIds = true, printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
new file mode 100644
index 0000000000..60176c7192
--- /dev/null
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -0,0 +1,10 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String)), TypeTree[4]().setOriginal(Ident[4](java.lang.String)))))), nme.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
+[4] TypeRef(ThisType(java.lang), java.lang.String, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String)), TypeTree[4]().setOriginal(Ident[4](java.lang.String)))))), nme.CONSTRUCTOR), List())
+[4] TypeRef(ThisType(java.lang), java.lang.String, List())
+[5] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_typed.scala b/test/files/run/showraw_tree_types_typed.scala
new file mode 100644
index 0000000000..d7ccc84ea3
--- /dev/null
+++ b/test/files/run/showraw_tree_types_typed.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
new file mode 100644
index 0000000000..82724cae44
--- /dev/null
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_untyped.scala b/test/files/run/showraw_tree_types_untyped.scala
new file mode 100644
index 0000000000..4df2eb66b2
--- /dev/null
+++ b/test/files/run/showraw_tree_types_untyped.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printTypes = true))
+ println(showRaw(tree2.tree, printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
new file mode 100644
index 0000000000..0b409554a0
--- /dev/null
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -0,0 +1,10 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#1903#CLS), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)))))), nme.CONSTRUCTOR#1913#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List())
+[4] TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#1908#CLS), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)))))), nme.CONSTRUCTOR#2231#CTOR), List())
+[4] TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())
+[5] TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List())
diff --git a/test/files/run/showraw_tree_ultimate.scala b/test/files/run/showraw_tree_ultimate.scala
new file mode 100644
index 0000000000..dfd7abde52
--- /dev/null
+++ b/test/files/run/showraw_tree_ultimate.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printIds = true, printKinds = true, printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printIds = true, printKinds = true, printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/t3326.check b/test/files/run/t3326.check
new file mode 100644
index 0000000000..d0e11cebf7
--- /dev/null
+++ b/test/files/run/t3326.check
@@ -0,0 +1,8 @@
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World)
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World) \ No newline at end of file
diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala
new file mode 100644
index 0000000000..f70cb01504
--- /dev/null
+++ b/test/files/run/t3326.scala
@@ -0,0 +1,74 @@
+
+
+
+import scala.math.Ordering
+
+
+
+/** The heart of the problem - we want to retain the ordering when
+ * using `++` on sorted maps.
+ *
+ * There are 2 `++` overloads - a generic one in traversables and
+ * a map-specific one in `MapLike` - which knows about the ordering.
+ *
+ * The problem here is that the expected return type for the expression
+ * in which `++` appears drives the decision of the overload that needs
+ * to be taken.
+ * The `collection.SortedMap` does not have `++` overridden to return
+ * `SortedMap`, but `immutable.Map` instead.
+ * This is why `collection.SortedMap` used to resort to the generic
+ * `TraversableLike.++` which knows nothing about the ordering.
+ *
+ * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`,
+ * we override the `MapLike.++` overload in `collection.SortedMap` to return
+ * the proper type `SortedMap`.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ testCollectionSorted()
+ testImmutableSorted()
+ }
+
+ def testCollectionSorted() {
+ import collection._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+
+ def testImmutableSorted() {
+ import collection.immutable._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+}
diff --git a/test/files/run/t4809.scala b/test/files/run/t4809.scala
new file mode 100644
index 0000000000..b30d80562f
--- /dev/null
+++ b/test/files/run/t4809.scala
@@ -0,0 +1,34 @@
+
+
+import scala.util.control.Breaks._
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val x = tryBreakable {
+ break
+ 2
+ } catchBreak {
+ 3
+ }
+ assert(x == 3, x)
+
+ val y = tryBreakable {
+ 2
+ } catchBreak {
+ 3
+ }
+ assert(y == 2, y)
+
+ val z = tryBreakable {
+ break
+ 1.0
+ } catchBreak {
+ 2
+ }
+ assert(z == 2.0, z)
+ }
+
+}
diff --git a/test/files/run/t4935.check b/test/files/run/t4935.check
new file mode 100644
index 0000000000..ef0493b275
--- /dev/null
+++ b/test/files/run/t4935.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
new file mode 100644
index 0000000000..ac14fe5dbd
--- /dev/null
+++ b/test/files/run/t4935.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t4935.scala b/test/files/run/t4935.scala
new file mode 100644
index 0000000000..18631e2041
--- /dev/null
+++ b/test/files/run/t4935.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ for (i <- 0 to 1) {
+ val a = Foo
+ }
+}
+
+object Foo {
+ println("hello")
+}
diff --git a/test/files/run/t4954.scala b/test/files/run/t4954.scala
new file mode 100644
index 0000000000..b4916e651d
--- /dev/null
+++ b/test/files/run/t4954.scala
@@ -0,0 +1,45 @@
+
+
+import collection._
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val m = scala.collection.mutable.LinkedHashMap("one" -> 1, "two" -> 2, "three" -> 3, "four" -> 4, "five" -> 5)
+ val expected = List("one", "two", "three", "four", "five")
+ assert(m.keys.iterator.toList == expected)
+ assert(m.keys.drop(0).iterator.toList == expected)
+ assert(m.keys.drop(1).iterator.toList == expected.drop(1))
+ assert(m.keys.drop(2).iterator.toList == expected.drop(2))
+ assert(m.keys.drop(3).iterator.toList == expected.drop(3))
+ assert(m.keys.drop(4).iterator.toList == expected.drop(4))
+ assert(m.keys.drop(5).iterator.toList == expected.drop(5))
+
+ val expvals = List(1, 2, 3, 4, 5)
+ assert(m.values.iterator.toList == expvals)
+ assert(m.values.drop(0).iterator.toList == expvals)
+ assert(m.values.drop(1).iterator.toList == expvals.drop(1))
+ assert(m.values.drop(2).iterator.toList == expvals.drop(2))
+ assert(m.values.drop(3).iterator.toList == expvals.drop(3))
+ assert(m.values.drop(4).iterator.toList == expvals.drop(4))
+ assert(m.values.drop(5).iterator.toList == expvals.drop(5))
+
+ val pred = (x: String) => x.length < 6
+ val filtered = m.filterKeys(pred)
+ assert(filtered.drop(0).keys.toList == expected.filter(pred))
+ assert(filtered.drop(1).keys.toList == expected.filter(pred).drop(1))
+ assert(filtered.drop(2).keys.toList == expected.filter(pred).drop(2))
+ assert(filtered.drop(3).keys.toList == expected.filter(pred).drop(3))
+ assert(filtered.drop(4).keys.toList == expected.filter(pred).drop(4))
+
+ val mapped = m.mapValues(-_)
+ assert(mapped.drop(0).keys.toList == expected)
+ assert(mapped.drop(1).keys.toList == expected.drop(1))
+ assert(mapped.drop(2).keys.toList == expected.drop(2))
+ assert(mapped.drop(3).keys.toList == expected.drop(3))
+ assert(mapped.drop(4).keys.toList == expected.drop(4))
+ assert(mapped.drop(5).keys.toList == expected.drop(5))
+ }
+
+}
diff --git a/test/files/run/t5284.check b/test/files/run/t5284.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/run/t5284.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5284.scala b/test/files/run/t5284.scala
new file mode 100644
index 0000000000..ba0845fb8e
--- /dev/null
+++ b/test/files/run/t5284.scala
@@ -0,0 +1,25 @@
+
+
+
+
+
+/** Here we have a situation where a normalized method parameter `W`
+ * is used in a position which accepts an instance of type `T` - we know we can
+ * safely cast `T` to `W` whenever type bounds on `W` hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val a = Blarg(Array(1, 2, 3))
+ println(a.m((x: Int) => x + 1))
+ }
+}
+
+
+object Blarg {
+ def apply[T: Manifest](a: Array[T]) = new Blarg(a)
+}
+
+
+class Blarg[@specialized(Int) T: Manifest](val a: Array[T]) {
+ def m[@specialized(Int) W >: T, @specialized(Int) S](f: W => S) = f(a(0))
+}
diff --git a/test/files/run/t5284b.check b/test/files/run/t5284b.check
new file mode 100644
index 0000000000..98d9bcb75a
--- /dev/null
+++ b/test/files/run/t5284b.check
@@ -0,0 +1 @@
+17
diff --git a/test/files/run/t5284b.scala b/test/files/run/t5284b.scala
new file mode 100644
index 0000000000..a9282a895f
--- /dev/null
+++ b/test/files/run/t5284b.scala
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+/** Here we have a situation where a normalized method parameter `W`
+ * is used in a position which expects a type `T` - we know we can
+ * safely cast `W` to `T` whenever typebounds of `W` hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val foo = Foo.createUnspecialized[Int]
+ println(foo.bar(17))
+ }
+}
+
+
+object Foo {
+ def createUnspecialized[T] = new Foo[T]
+}
+
+
+class Foo[@specialized(Int) T] {
+ val id: T => T = x => x
+
+ def bar[@specialized(Int) W <: T, @specialized(Int) S](w: W) = id(w)
+}
diff --git a/test/files/run/t5284c.check b/test/files/run/t5284c.check
new file mode 100644
index 0000000000..00750edc07
--- /dev/null
+++ b/test/files/run/t5284c.check
@@ -0,0 +1 @@
+3
diff --git a/test/files/run/t5284c.scala b/test/files/run/t5284c.scala
new file mode 100644
index 0000000000..383b84c2cc
--- /dev/null
+++ b/test/files/run/t5284c.scala
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+/** Here we have a compound type `List[W]` used in
+ * a position where `List[T]` is expected. The cast
+ * emitted in the normalized `bar` is safe because the
+ * normalized `bar` can only be called if the type
+ * bounds hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val foo = Foo.createUnspecialized[Int]
+ println(foo.bar(List(1, 2, 3)))
+ }
+}
+
+
+object Foo {
+ def createUnspecialized[T] = new Foo[T]
+}
+
+
+class Foo[@specialized(Int) T] {
+ val len: List[T] => Int = xs => xs.length
+
+ def bar[@specialized(Int) W <: T](ws: List[W]) = len(ws)
+}
diff --git a/test/files/run/t5914.check b/test/files/run/t5914.check
new file mode 100644
index 0000000000..818e321255
--- /dev/null
+++ b/test/files/run/t5914.check
@@ -0,0 +1 @@
+correct
diff --git a/test/files/run/t5914.scala b/test/files/run/t5914.scala
new file mode 100644
index 0000000000..45d8815738
--- /dev/null
+++ b/test/files/run/t5914.scala
@@ -0,0 +1,23 @@
+import scala.reflect.ClassTag
+
+trait Trees {
+ class Tree
+ implicit val ttTag: ClassTag[TypeTree]
+ type TypeTree <: Tree
+ val TypeTree: TypeTreeExtractor
+ abstract class TypeTreeExtractor {
+ def unapply(t: TypeTree): Option[String]
+ }
+ def test(tree: Tree) =
+ tree match {
+ case TypeTree(_) => println("lolwut")
+ case null => println("correct")
+ }
+}
+
+object Test extends App with Trees {
+ val ttTag = implicitly[ClassTag[TypeTree]]
+ case class TypeTree(meh: String) extends Tree
+ object TypeTree extends TypeTreeExtractor
+ test(null) // should not crash
+} \ No newline at end of file
diff --git a/test/files/run/t5966.check b/test/files/run/t5966.check
new file mode 100644
index 0000000000..bfe8358a77
--- /dev/null
+++ b/test/files/run/t5966.check
@@ -0,0 +1,3 @@
+(o()_)("") = List()
+(o("a1")_)("") = WrappedArray(a1)
+(o("a1", "a2")_)("") = WrappedArray(a1, a2)
diff --git a/test/files/run/t5966.scala b/test/files/run/t5966.scala
new file mode 100644
index 0000000000..bbe1a6e874
--- /dev/null
+++ b/test/files/run/t5966.scala
@@ -0,0 +1,9 @@
+object o { def apply(i: AnyRef*)(j: String) = i }
+
+object Test {
+ def main(args: Array[String]) {
+ println("(o()_)(\"\") = " + (o()_)(""))
+ println("(o(\"a1\")_)(\"\") = " + (o("a1")_)(""))
+ println("(o(\"a1\", \"a2\")_)(\"\") = " + (o("a1", "a2")_)(""))
+ }
+}
diff --git a/test/files/run/t5971.check b/test/files/run/t5971.check
new file mode 100644
index 0000000000..0c36a1ff02
--- /dev/null
+++ b/test/files/run/t5971.check
@@ -0,0 +1,4 @@
+r,b
+r
+a,b
+r,a,b \ No newline at end of file
diff --git a/test/files/run/t5971.scala b/test/files/run/t5971.scala
new file mode 100644
index 0000000000..dbd9beebb3
--- /dev/null
+++ b/test/files/run/t5971.scala
@@ -0,0 +1,23 @@
+
+
+
+
+
+/** When using `AbstractTransformed` abstract inner class in views in order
+ * to force generating bridges, one must take care to push the corresponding
+ * collection trait (such as `Iterable` or `Seq`) as far as possible to the
+ * left in the linearization order -- otherwise, overridden methods from these
+ * traits can override the already overridden methods in view. This was the
+ * case with `takeWhile`.
+ * Mind blowing, I know.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ println("bar".view.reverse.filter(_ > 'a').mkString(","))
+ println("bar".view.reverse.take(1).mkString(","))
+ println("bar".view.reverse.dropWhile(_ > 'a').mkString(","))
+ println("bar".view.reverse.takeWhile(_ => true).mkString(","))
+ }
+
+}
diff --git a/test/files/run/t5986.check b/test/files/run/t5986.check
new file mode 100644
index 0000000000..4101770c6d
--- /dev/null
+++ b/test/files/run/t5986.check
@@ -0,0 +1,15 @@
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4) \ No newline at end of file
diff --git a/test/files/run/t5986.scala b/test/files/run/t5986.scala
new file mode 100644
index 0000000000..8cf7086f98
--- /dev/null
+++ b/test/files/run/t5986.scala
@@ -0,0 +1,36 @@
+
+
+
+import scala.collection._
+
+
+
+/** A sorted set should not replace elements when adding
+ * and the element already exists in the set.
+ */
+object Test {
+
+ class Foo(val name: String, val n: Int) {
+ override def equals(obj: Any): Boolean = obj match { case other: Foo => name == other.name; case _ => false }
+ override def hashCode = name.##
+ override def toString = "Foo(" + name + ", " + n + ")"
+ }
+
+ implicit val ordering: Ordering[Foo] = Ordering.fromLessThan[Foo] { (a, b) => a.name.compareTo(b.name) < 0 }
+
+ def check[S <: Set[Foo]](set: S) {
+ def output(s: Set[Foo]) = println(s.toList.sorted.mkString(","))
+ output(set + new Foo("bar", 2))
+ output(set ++ List(new Foo("bar", 2), new Foo("bar", 3), new Foo("bar", 4)))
+ output(set union Set(new Foo("bar", 2), new Foo("baz", 3), new Foo("bazz", 4)))
+ }
+
+ def main(args: Array[String]) {
+ check(Set(new Foo("bar", 1)))
+ check(immutable.Set(new Foo("bar", 1)))
+ check(mutable.Set(new Foo("bar", 1)))
+ check(immutable.SortedSet(new Foo("bar", 1)))
+ check(mutable.SortedSet(new Foo("bar", 1)))
+ }
+
+}
diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala
index 0145d3321f..4e8480d72e 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/files/scalacheck/CheckEither.scala
@@ -3,7 +3,6 @@ import org.scalacheck.Arbitrary.{arbitrary, arbThrowable}
import org.scalacheck.Gen.oneOf
import org.scalacheck.util.StdRand
import org.scalacheck.Prop._
-import org.scalacheck.ConsoleReporter.{testReport, propReport}
import org.scalacheck.Test.{Params, check}
import org.scalacheck.ConsoleReporter.testStatsEx
import Function.tupled
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
index e4b356c889..e2609fa200 100644
--- a/test/files/scalacheck/redblacktree.scala
+++ b/test/files/scalacheck/redblacktree.scala
@@ -121,7 +121,7 @@ package scala.collection.immutable.redblacktree {
override type ModifyParm = Int
override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
case Some((key, _)) => key.init.mkString + "MN"
@@ -144,7 +144,7 @@ package scala.collection.immutable.redblacktree {
override type ModifyParm = Int
override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
- case (key, _) => update(tree, key, newValue)
+ case (key, _) => update(tree, key, newValue, true)
} getOrElse tree
property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
diff --git a/test/files/neg/t5334_1.scala b/test/pending/run/t5334_1.scala
index b75badb145..b75badb145 100644
--- a/test/files/neg/t5334_1.scala
+++ b/test/pending/run/t5334_1.scala
diff --git a/test/files/neg/t5334_2.scala b/test/pending/run/t5334_2.scala
index e082e3b8e3..e082e3b8e3 100644
--- a/test/files/neg/t5334_2.scala
+++ b/test/pending/run/t5334_2.scala
diff --git a/test/scaladoc/resources/implicits-ambiguating-res.scala b/test/scaladoc/resources/implicits-ambiguating-res.scala
deleted file mode 100644
index 6ed51366cb..0000000000
--- a/test/scaladoc/resources/implicits-ambiguating-res.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the
- * class' own members
- *
- * {{{
- * scala> class A { def foo(t: String) = 4 }
- * defined class A
- *
- * scala> class B { def foo(t: Any) = 5 }
- * defined class B
- *
- * scala> implicit def AtoB(a:A) = new B
- * AtoB: (a: A)B
- *
- * scala> val a = new A
- * a: A = A@28f553e3
- *
- * scala> a.foo("T")
- * res1: Int = 4
- *
- * scala> a.foo(4)
- * res2: Int = 5
- * }}}
- */
-package scala.test.scaladoc.implicits.ambiguating
-import language.implicitConversions // according to SIP18
-
-/** - conv1-5 should be ambiguous
- * - conv6-7 should not be ambiguous
- * - conv8 should be ambiguous
- * - conv9 should be ambiguous
- * - conv10 and conv11 should not be ambiguous */
-class A[T]
-/** conv1-9 should be the same, conv10 should be ambiguous, conv11 should be okay */
-class B extends A[Int]
-/** conv1-9 should be the same, conv10 and conv11 should not be ambiguous */
-class C extends A[Double]
- /** conv1-9 should be the same, conv10 should not be ambiguous while conv11 should be ambiguous */
-class D extends A[AnyRef]
-
-class X[T] {
- def conv1: AnyRef = ???
- def conv2: T = ???
- def conv3(l: Int): AnyRef = ???
- def conv4(l: AnyRef): AnyRef = ???
- def conv5(l: AnyRef): String = ???
- def conv6(l: String)(m: String): AnyRef = ???
- def conv7(l: AnyRef)(m: AnyRef): AnyRef = ???
- def conv8(l: AnyRef): AnyRef = ???
- def conv9(l: String): AnyRef = ???
- def conv10(l: T): T = ???
- def conv11(l: T): T = ???
-}
-
-class Z[T] {
- def conv1: AnyRef = ???
- def conv2: T = ???
- def conv3(p: Int): AnyRef = ???
- def conv4(p: AnyRef): String = ???
- def conv5(p: AnyRef): AnyRef = ???
- def conv6(p: String, q: String): AnyRef = ???
- def conv7(p: AnyRef, q: AnyRef): AnyRef = ???
- def conv8(p: String): AnyRef = ???
- def conv9(p: AnyRef): AnyRef = ???
- def conv10(p: Int): T = ???
- def conv11(p: String): T = ???
-}
-
-object A {
- implicit def AtoX[T](a: A[T]) = new X[T]
- implicit def AtoZ[T](a: A[T]) = new Z[T]
-}
diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala
index d6c0332c10..65d7bdf67c 100644
--- a/test/scaladoc/resources/implicits-base-res.scala
+++ b/test/scaladoc/resources/implicits-base-res.scala
@@ -16,9 +16,8 @@ trait MyNumeric[R]
* def convToManifestA(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
* def convToMyNumericA(x: T) // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
* def convToNumericA(x: T) // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
- * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
- * def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
+ * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints
+ * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
* def convToTraversableOps(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
* // should not be abstract!
* }}}
@@ -53,10 +52,9 @@ object A {
* def convToManifestA(x: Double) // pimpA7: no constraints
* def convToMyNumericA(x: Double) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
* def convToNumericA(x: Double) // pimpA1: no constraintsd
- * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
+ * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints
* def convToTraversableOps(x: Double) // pimpA7: no constraints
- * // should not be abstract!
+ * // should not be abstract!
* }}}
*/
class B extends A[Double]
@@ -70,8 +68,7 @@ object B extends A
* def convToIntA(x: Int) // pimpA2: no constraints
* def convToMyNumericA(x: Int) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
* def convToNumericA(x: Int) // pimpA1: no constraints
- * def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
- * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints
* }}}
*/
class C extends A[Int]
@@ -84,8 +81,7 @@ object C extends A
* {{{
* def convToMyNumericA(x: String) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
* def convToNumericA(x: String) // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
- * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
+ * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints
* }}}
*/
class D extends A[String]
diff --git a/test/scaladoc/resources/implicits-elimination-res.scala b/test/scaladoc/resources/implicits-elimination-res.scala
index 5f7135c9e8..b23667440c 100644
--- a/test/scaladoc/resources/implicits-elimination-res.scala
+++ b/test/scaladoc/resources/implicits-elimination-res.scala
@@ -2,13 +2,13 @@
* Testing scaladoc implicits elimination
*/
package scala.test.scaladoc.implicits.elimination {
-
+
import language.implicitConversions // according to SIP18
/** No conversion, as B doesn't bring any member */
class A
class B { class C; trait V; type T; }
- object A {
- implicit def toB(a: A): B = null
+ object A {
+ implicit def toB(a: A): B = null
}
}
diff --git a/test/scaladoc/run/diagrams-base.scala b/test/scaladoc/run/diagrams-base.scala
deleted file mode 100644
index 38bed06502..0000000000
--- a/test/scaladoc/run/diagrams-base.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.model.diagram._
-import scala.tools.partest.ScaladocModelTest
-
-object Test extends ScaladocModelTest {
-
- override def code = """
- package scala.test.scaladoc.diagrams
-
- import language.implicitConversions
-
- trait A
- trait B
- trait C
- class E extends A with B with C
- object E { implicit def eToT(e: E) = new T }
-
- class F extends E
- class G extends E
- private class H extends E /* since it's private, it won't go into the diagram */
- class T { def t = true }
-
- class X
- object X { implicit def xToE(x: X) = new E}
- class Y extends X
- class Z
- object Z { implicit def zToE(z: Z) = new E}
- """
-
- // diagrams must be started. In case there's an error with dot, it should not report anything
- def scaladocSettings = "-diagrams -implicits"
-
- def testModel(rootPackage: Package) = {
- // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
- import access._
-
- val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
- val E = base._class("E")
- val diag = E.inheritanceDiagram.get
-
- // there must be a single this node
- assert(diag.nodes.filter(_.isThisNode).length == 1)
-
- // 1. check class E diagram
- assert(diag.isClassDiagram)
-
- val (incoming, outgoing) = diag.edges.partition(!_._1.isThisNode)
- assert(incoming.length == 5)
- assert(outgoing.head._2.length == 4)
-
- val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
- assert(outgoingSuperclass.length == 3)
- assert(outgoingImplicit.length == 1)
-
- val (incomingSubclass, incomingImplicit) = incoming.partition(_._1.isNormalNode)
- assert(incomingSubclass.length == 2)
- assert(incomingImplicit.length == 3)
-
- val classDiag = diag.asInstanceOf[ClassDiagram]
- assert(classDiag.incomingImplicits.length == 3)
- assert(classDiag.outgoingImplicits.length == 1)
-
- // 2. check package diagram
- // NOTE: Z should be eliminated because it's isolated
- val packDiag = base.contentDiagram.get
- assert(packDiag.isPackageDiagram)
- assert(packDiag.nodes.length == 8) // check singular object removal
- assert(packDiag.edges.length == 4)
- assert(packDiag.edges.foldLeft(0)(_ + _._2.length) == 6)
-
- // TODO: Should check numbering
- }
-} \ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-determinism.check b/test/scaladoc/run/diagrams-determinism.check
deleted file mode 100644
index 619c56180b..0000000000
--- a/test/scaladoc/run/diagrams-determinism.check
+++ /dev/null
@@ -1 +0,0 @@
-Done.
diff --git a/test/scaladoc/run/diagrams-determinism.scala b/test/scaladoc/run/diagrams-determinism.scala
deleted file mode 100644
index 6c8db05d78..0000000000
--- a/test/scaladoc/run/diagrams-determinism.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.model.diagram._
-import scala.tools.partest.ScaladocModelTest
-
-object Test extends ScaladocModelTest {
-
- override def code = """
- package scala.test.scaladoc.diagrams
-
- trait A
- trait B extends A
- trait C extends B
- trait D extends C with A
- trait E extends C with A with D
- """
-
- // diagrams must be started. In case there's an error with dot, it should not report anything
- def scaladocSettings = "-diagrams -implicits"
-
- def testModel(rootPackage: Package) = {
- // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
- import access._
-
- def diagramString(rootPackage: Package) = {
- val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
- val A = base._trait("A")
- val B = base._trait("B")
- val C = base._trait("C")
- val D = base._trait("D")
- val E = base._trait("E")
-
- base.contentDiagram.get.toString + "\n" +
- A.inheritanceDiagram.get.toString + "\n" +
- B.inheritanceDiagram.get.toString + "\n" +
- C.inheritanceDiagram.get.toString + "\n" +
- D.inheritanceDiagram.get.toString + "\n" +
- E.inheritanceDiagram.get.toString
- }
-
- // 1. check that several runs produce the same output
- val run0 = diagramString(rootPackage)
- val run1 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
- val run2 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
- val run3 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
-
- // any variance in the order of the diagram elements should crash the following tests:
- assert(run0 == run1)
- assert(run1 == run2)
- assert(run2 == run3)
-
- // 2. check the order in the diagram: this node, subclasses, and then implicit conversions
- def assertRightOrder(diagram: Diagram) = {
- for ((node, subclasses) <- diagram.edges)
- assert(subclasses == subclasses.filter(_.isThisNode) :::
- subclasses.filter(_.isNormalNode) :::
- subclasses.filter(_.isImplicitNode))
- }
-
- val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
- assertRightOrder(base.contentDiagram.get)
- assertRightOrder(base._trait("A").inheritanceDiagram.get)
- assertRightOrder(base._trait("B").inheritanceDiagram.get)
- assertRightOrder(base._trait("C").inheritanceDiagram.get)
- assertRightOrder(base._trait("D").inheritanceDiagram.get)
- assertRightOrder(base._trait("E").inheritanceDiagram.get)
- }
-} \ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-filtering.check b/test/scaladoc/run/diagrams-filtering.check
deleted file mode 100644
index 619c56180b..0000000000
--- a/test/scaladoc/run/diagrams-filtering.check
+++ /dev/null
@@ -1 +0,0 @@
-Done.
diff --git a/test/scaladoc/run/diagrams-filtering.scala b/test/scaladoc/run/diagrams-filtering.scala
deleted file mode 100644
index dfde5cac52..0000000000
--- a/test/scaladoc/run/diagrams-filtering.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.model.diagram._
-import scala.tools.partest.ScaladocModelTest
-
-object Test extends ScaladocModelTest {
-
- override def code = """
- package scala.test.scaladoc
-
- /** @contentDiagram hideNodes "scala.test.*.A" "java.*", hideEdges ("*G" -> "*E") */
- package object diagrams {
- def foo = 4
- }
-
- package diagrams {
- import language.implicitConversions
-
- /** @inheritanceDiagram hideIncomingImplicits, hideNodes "*E" */
- trait A
- trait AA extends A
- trait B
- trait AAA extends B
-
- /** @inheritanceDiagram hideDiagram */
- trait C
- trait AAAA extends C
-
- /** @inheritanceDiagram hideEdges("*E" -> "*A") */
- class E extends A with B with C
- class F extends E
- /** @inheritanceDiagram hideNodes "*G" "G" */
- class G extends E
- private class H extends E /* since it's private, it won't go into the diagram */
- class T { def t = true }
- object E {
- implicit def eToT(e: E) = new T
- implicit def eToA(e: E) = new A { }
- }
- }
- """
-
- // diagrams must be started. In case there's an error with dot, it should not report anything
- def scaladocSettings = "-diagrams -implicits"
-
- def testModel(rootPackage: Package) = {
- // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
- import access._
-
- // base package
- // Assert we have 7 nodes and 6 edges
- val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
- val packDiag = base.contentDiagram.get
- assert(packDiag.nodes.length == 6)
- assert(packDiag.edges.map(_._2.length).sum == 5)
-
- // trait A
- // Assert we have just 2 nodes and 1 edge
- val A = base._trait("A")
- val ADiag = A.inheritanceDiagram.get
- assert(ADiag.nodes.length == 2)
- assert(ADiag.edges.map(_._2.length).sum == 1)
-
- // trait C
- val C = base._trait("C")
- assert(!C.inheritanceDiagram.isDefined)
-
- // trait G
- val G = base._trait("G")
- assert(!G.inheritanceDiagram.isDefined)
-
- // trait E
- val E = base._class("E")
- val EDiag = E.inheritanceDiagram.get
-
- // there must be a single this node
- assert(EDiag.nodes.filter(_.isThisNode).length == 1)
-
- // 1. check class E diagram
- val (incoming, outgoing) = EDiag.edges.partition(!_._1.isThisNode)
- assert(incoming.length == 2) // F and G
- assert(outgoing.head._2.length == 3) // B, C and T
-
- val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
- assert(outgoingSuperclass.length == 2) // B and C
- assert(outgoingImplicit.length == 1) // T
-
- val (incomingSubclass, incomingImplicit) = incoming.partition(_._1.isNormalNode)
- assert(incomingSubclass.length == 2) // F and G
- assert(incomingImplicit.length == 0)
-
- assert(EDiag.nodes.length == 6) // E, B and C, F and G and the implicit conversion to T
- }
-} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-ambiguating.check b/test/scaladoc/run/implicits-ambiguating.check
deleted file mode 100644
index 619c56180b..0000000000
--- a/test/scaladoc/run/implicits-ambiguating.check
+++ /dev/null
@@ -1 +0,0 @@
-Done.
diff --git a/test/scaladoc/run/implicits-ambiguating.scala b/test/scaladoc/run/implicits-ambiguating.scala
deleted file mode 100644
index 19e0f72b7b..0000000000
--- a/test/scaladoc/run/implicits-ambiguating.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.partest.ScaladocModelTest
-
-object Test extends ScaladocModelTest {
-
- // test a file instead of a piece of code
- override def resourceFile = "implicits-ambiguating-res.scala"
-
- // start implicits
- def scaladocSettings = "-implicits"
-
- def testModel(root: Package) = {
- // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
- import access._
-
- // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
- val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._object("ambiguating")
- var conv1: ImplicitConversion = null
- var conv2: ImplicitConversion = null
-
-//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- val A = base._class("A")
-
- conv1 = A._conversion(base._object("A").qualifiedName + ".AtoX")
- conv2 = A._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv1.members.length == 11)
- assert(conv2.members.length == 11)
- assert(conv1.constraints.length == 0)
- assert(conv2.constraints.length == 0)
-
- /** - conv1-5 should be ambiguous
- * - conv6-7 should not be ambiguous
- * - conv8 should be ambiguous
- * - conv9 should be ambiguous
- * - conv10 and conv11 should not be ambiguous */
- def check1to9(cls: String): Unit = {
- for (conv <- (1 to 5).map("conv" + _)) {
- assert(conv1._member(conv).byConversion.get.isAmbiguous, cls + " - AtoX." + conv + " is ambiguous")
- assert(conv2._member(conv).byConversion.get.isAmbiguous, cls + " - AtoZ." + conv + " is ambiguous")
- }
- for (conv <- (6 to 7).map("conv" + _)) {
- assert(!conv1._member(conv).byConversion.get.isAmbiguous, cls + " - AtoX." + conv + " is not ambiguous")
- assert(!conv2._member(conv).byConversion.get.isAmbiguous, cls + " - AtoZ." + conv + " is not ambiguous")
- }
- assert(conv1._member("conv8").byConversion.get.isAmbiguous, cls + " - AtoX.conv8 is ambiguous")
- assert(conv2._member("conv8").byConversion.get.isAmbiguous, cls + " - AtoZ.conv8 is ambiguous")
- assert(conv1._member("conv9").byConversion.get.isAmbiguous, cls + " - AtoX.conv9 is ambiguous")
- assert(conv2._member("conv9").byConversion.get.isAmbiguous, cls + " - AtoZ.conv9 is ambiguous")
- }
- check1to9("A")
- assert(!conv1._member("conv10").byConversion.get.isAmbiguous, "A - AtoX.conv10 is not ambiguous")
- assert(!conv2._member("conv10").byConversion.get.isAmbiguous, "A - AtoZ.conv10 is not ambiguous")
- assert(!conv1._member("conv11").byConversion.get.isAmbiguous, "A - AtoX.conv11 is not ambiguous")
- assert(!conv2._member("conv11").byConversion.get.isAmbiguous, "A - AtoZ.conv11 is not ambiguous")
-
-//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- val B = base._class("B")
-
- conv1 = B._conversion(base._object("A").qualifiedName + ".AtoX")
- conv2 = B._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv1.members.length == 11)
- assert(conv2.members.length == 11)
- assert(conv1.constraints.length == 0)
- assert(conv2.constraints.length == 0)
-
- /** conv1-9 should be the same, conv10 should be ambiguous, conv11 should be okay */
- check1to9("B")
- assert(conv1._member("conv10").byConversion.get.isAmbiguous, "B - AtoX.conv10 is ambiguous")
- assert(conv2._member("conv10").byConversion.get.isAmbiguous, "B - AtoZ.conv10 is ambiguous")
- assert(!conv1._member("conv11").byConversion.get.isAmbiguous, "B - AtoX.conv11 is not ambiguous")
- assert(!conv2._member("conv11").byConversion.get.isAmbiguous, "B - AtoZ.conv11 is not ambiguous")
-
-//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- val C = base._class("C")
-
- conv1 = C._conversion(base._object("A").qualifiedName + ".AtoX")
- conv2 = C._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv1.members.length == 11)
- assert(conv2.members.length == 11)
- assert(conv1.constraints.length == 0)
- assert(conv2.constraints.length == 0)
-
- /** conv1-9 should be the same, conv10 and conv11 should not be ambiguous */
- check1to9("C")
- assert(!conv1._member("conv10").byConversion.get.isAmbiguous, "C - AtoX.conv10 is not ambiguous")
- assert(!conv2._member("conv10").byConversion.get.isAmbiguous, "C - AtoZ.conv10 is not ambiguous")
- assert(!conv1._member("conv11").byConversion.get.isAmbiguous, "C - AtoX.conv11 is not ambiguous")
- assert(!conv2._member("conv11").byConversion.get.isAmbiguous, "C - AtoZ.conv11 is not ambiguous")
-
-//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- val D = base._class("D")
-
- conv1 = D._conversion(base._object("A").qualifiedName + ".AtoX")
- conv2 = D._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv1.members.length == 11)
- assert(conv2.members.length == 11)
- assert(conv1.constraints.length == 0)
- assert(conv2.constraints.length == 0)
-
- /** conv1-9 should be the same, conv10 should not be ambiguous while conv11 should be ambiguous */
- check1to9("D")
- assert(!conv1._member("conv10").byConversion.get.isAmbiguous, "D - AtoX.conv10 is not ambiguous")
- assert(!conv2._member("conv10").byConversion.get.isAmbiguous, "D - AtoZ.conv10 is not ambiguous")
- assert(conv1._member("conv11").byConversion.get.isAmbiguous, "D - AtoX.conv11 is ambiguous")
- assert(conv2._member("conv11").byConversion.get.isAmbiguous, "D - AtoZ.conv11 is ambiguous")
- }
-} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala
index ce2d025511..06d017ed70 100644
--- a/test/scaladoc/run/implicits-base.scala
+++ b/test/scaladoc/run/implicits-base.scala
@@ -22,12 +22,8 @@ object Test extends ScaladocModelTest {
val A = base._class("A")
- // def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
- conv = A._conversion(A.qualifiedName + ".pimpA0")
- assert(conv.members.length == 1)
- assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
- assert(conv._member("convToPimpedA").resultType.name == "T")
+ // the method pimped on by pimpA0 should be shadowed by the method in class A
+ assert(A._conversions(A.qualifiedName + ".pimpA0").isEmpty)
// def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
conv = A._conversion(A.qualifiedName + ".pimpA1")
@@ -57,7 +53,6 @@ object Test extends ScaladocModelTest {
conv = A._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]")
// def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
@@ -81,16 +76,10 @@ object Test extends ScaladocModelTest {
val B = base._class("B")
// these conversions should not affect B
+ assert(B._conversions(A.qualifiedName + ".pimpA0").isEmpty)
assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty)
assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty)
- // def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
- conv = B._conversion(A.qualifiedName + ".pimpA0")
- assert(conv.members.length == 1)
- assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
- assert(conv._member("convToPimpedA").resultType.name == "Double")
-
// def convToNumericA: Double // pimpA1: no constraintsd
conv = B._conversion(A.qualifiedName + ".pimpA1")
assert(conv.members.length == 1)
@@ -107,7 +96,6 @@ object Test extends ScaladocModelTest {
conv = B._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]")
// def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
@@ -131,17 +119,11 @@ object Test extends ScaladocModelTest {
val C = base._class("C")
// these conversions should not affect C
+ assert(C._conversions(A.qualifiedName + ".pimpA0").isEmpty)
assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty)
assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty)
assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty)
- // def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
- conv = C._conversion(A.qualifiedName + ".pimpA0")
- assert(conv.members.length == 1)
- assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
- assert(conv._member("convToPimpedA").resultType.name == "Int")
-
// def convToNumericA: Int // pimpA1: no constraints
conv = C._conversion(A.qualifiedName + ".pimpA1")
assert(conv.members.length == 1)
@@ -158,7 +140,6 @@ object Test extends ScaladocModelTest {
conv = C._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]")
// def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
@@ -172,18 +153,12 @@ object Test extends ScaladocModelTest {
val D = base._class("D")
// these conversions should not affect D
+ assert(D._conversions(A.qualifiedName + ".pimpA0").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty)
assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty)
- // def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
- conv = D._conversion(A.qualifiedName + ".pimpA0")
- assert(conv.members.length == 1)
- assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
- assert(conv._member("convToPimpedA").resultType.name == "String")
-
// def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
conv = D._conversion(A.qualifiedName + ".pimpA1")
assert(conv.members.length == 1)
@@ -194,7 +169,6 @@ object Test extends ScaladocModelTest {
conv = D._conversion(A.qualifiedName + ".pimpA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(conv._member("convToPimpedA").byConversion.get.isShadowed)
assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]")
// def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
diff --git a/test/scaladoc/run/diagrams-base.check b/test/scaladoc/run/implicits-elimination.check
index 619c56180b..619c56180b 100644
--- a/test/scaladoc/run/diagrams-base.check
+++ b/test/scaladoc/run/implicits-elimination.check
diff --git a/test/scaladoc/run/implicits-elimination.scala b/test/scaladoc/run/implicits-elimination.scala
new file mode 100644
index 0000000000..ed37b9cd90
--- /dev/null
+++ b/test/scaladoc/run/implicits-elimination.scala
@@ -0,0 +1,23 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-elimination-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // SEE THE test/resources/implicits-elimination-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("elimination")
+ val A = base._class("A")
+
+ assert(A._conversions(A.qualifiedName + ".toB").isEmpty)
+ }
+}
diff --git a/test/scaladoc/run/implicits-shadowing.scala b/test/scaladoc/run/implicits-shadowing.scala
index f8a016fac9..7835223d21 100644
--- a/test/scaladoc/run/implicits-shadowing.scala
+++ b/test/scaladoc/run/implicits-shadowing.scala
@@ -22,8 +22,12 @@ object Test extends ScaladocModelTest {
val A = base._class("A")
conv = A._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 11)
- assert(conv.members.forall(_.byConversion.get.isShadowed))
+ assert(conv.members.length == 5)
+ conv._member("conv5")
+ conv._member("conv8")
+ conv._member("conv9")
+ conv._member("conv10")
+ conv._member("conv11")
assert(conv.constraints.length == 0)
//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -31,8 +35,11 @@ object Test extends ScaladocModelTest {
val B = base._class("B")
conv = B._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 11)
- assert(conv.members.forall(_.byConversion.get.isShadowed))
+ assert(conv.members.length == 4)
+ conv._member("conv5")
+ conv._member("conv8")
+ conv._member("conv9")
+ conv._member("conv11")
assert(conv.constraints.length == 0)
//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -40,8 +47,12 @@ object Test extends ScaladocModelTest {
val C = base._class("C")
conv = C._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 11)
- assert(conv.members.forall(_.byConversion.get.isShadowed))
+ assert(conv.members.length == 5)
+ conv._member("conv5")
+ conv._member("conv8")
+ conv._member("conv9")
+ conv._member("conv10")
+ conv._member("conv11")
assert(conv.constraints.length == 0)
//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -49,8 +60,11 @@ object Test extends ScaladocModelTest {
val D = base._class("D")
conv = D._conversion(base._object("A").qualifiedName + ".AtoZ")
- assert(conv.members.length == 11)
- assert(conv.members.forall(_.byConversion.get.isShadowed))
+ assert(conv.members.length == 4)
+ conv._member("conv5")
+ conv._member("conv8")
+ conv._member("conv9")
+ conv._member("conv10")
assert(conv.constraints.length == 0)
}
-}
+} \ No newline at end of file
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index b576ba5544..68ca68efdd 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -5,12 +5,10 @@ import scala.tools.nsc.Global
import scala.tools.nsc.doc
import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.comment._
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.model.diagram._
class Factory(val g: Global, val s: doc.Settings)
extends doc.model.ModelFactory(g, s) {
- thisFactory: Factory with ModelFactoryImplicitSupport with DiagramFactory with CommentFactory with doc.model.TreeFactory =>
+ thisFactory: Factory with ModelFactoryImplicitSupport with CommentFactory with doc.model.TreeFactory =>
def strip(c: Comment): Option[Inline] = {
c.body match {
@@ -31,7 +29,7 @@ object Test extends Properties("CommentFactory") {
val settings = new doc.Settings((str: String) => {})
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
val g = new Global(settings, reporter)
- (new Factory(g, settings) with ModelFactoryImplicitSupport with DiagramFactory with CommentFactory with doc.model.TreeFactory)
+ (new Factory(g, settings) with ModelFactoryImplicitSupport with CommentFactory with doc.model.TreeFactory)
}
def parse(src: String, dst: Inline) = {