aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--bridge/src/main/scala/xsbt/ScaladocInterface.scala72
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala79
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java63
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala49
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala34
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala191
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala27
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala199
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala32
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala115
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala94
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala82
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala28
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala25
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala344
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala846
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala84
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala224
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/entities.scala115
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/factories.scala183
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/internal.scala89
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/java.scala223
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/json.scala93
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/parsers.scala98
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/references.scala20
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala92
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala125
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala25
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/mutate.scala65
-rw-r--r--dottydoc/test/BaseTest.scala57
-rw-r--r--dottydoc/test/ConstructorTest.scala211
-rw-r--r--dottydoc/test/PackageStructure.scala89
-rw-r--r--dottydoc/test/SimpleComments.scala29
-rw-r--r--dottydoc/test/WhitelistedStdLib.scala45
-rw-r--r--project/Build.scala10
-rw-r--r--project/plugins.sbt2
-rw-r--r--src/dotty/tools/dotc/ast/Trees.scala7
-rw-r--r--src/dotty/tools/dotc/config/ScalaSettings.scala64
-rw-r--r--src/dotty/tools/dotc/config/Settings.scala4
-rw-r--r--src/dotty/tools/dotc/core/Contexts.scala28
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala23
-rw-r--r--src/dotty/tools/dotc/parsing/Scanners.scala4
-rw-r--r--src/dotty/tools/dotc/typer/FrontEnd.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Namer.scala2
-rw-r--r--test/test/DottyDocParsingTests.scala90
46 files changed, 4315 insertions, 70 deletions
diff --git a/.gitignore b/.gitignore
index c9f12e986..17eba0468 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,7 @@
*.DS_Store
*.class
*.log
+*.swp
*~
*.swp
@@ -37,6 +38,7 @@ scala-scala
# Ignore output files but keep the directory
out/
+build/
!out/.keep
# Ignore build-file
diff --git a/bridge/src/main/scala/xsbt/ScaladocInterface.scala b/bridge/src/main/scala/xsbt/ScaladocInterface.scala
new file mode 100644
index 000000000..3ad9c7941
--- /dev/null
+++ b/bridge/src/main/scala/xsbt/ScaladocInterface.scala
@@ -0,0 +1,72 @@
+/* sbt -- Simple Build Tool
+ * Copyright 2008, 2009 Mark Harrah
+ */
+package xsbt
+
+import xsbti.Logger
+import dotty.tools.dottydoc.api.scala.Dottydoc
+import java.net.URL
+
+class ScaladocInterface {
+ def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) =
+ (new DottydocRunner(args, log, delegate)).run()
+}
+
+class DottydocRunner(args: Array[String], log: Logger, delegate: xsbti.Reporter) extends Dottydoc {
+ def run(): Unit = getOutputFolder(args).map { outputFolder =>
+ val index = createIndex(args)
+ val resources = getResources(args)
+ val template = getTemplate(resources)
+
+ template.fold(writeJson(index, outputFolder)) { tpl =>
+ buildDocs(outputFolder, tpl, resources, index)
+ }
+ } getOrElse {
+ delegate.log(
+ NoPosition,
+ "No output folder set for API documentation (\"-d\" parameter should be passed to the documentation tool)",
+ xsbti.Severity.Error
+ )
+ }
+
+ private[this] val NoPosition = new xsbti.Position {
+ val line = xsbti.Maybe.nothing[Integer]
+ val lineContent = ""
+ val offset = xsbti.Maybe.nothing[Integer]
+ val sourcePath = xsbti.Maybe.nothing[String]
+ val sourceFile = xsbti.Maybe.nothing[java.io.File]
+ val pointer = xsbti.Maybe.nothing[Integer]
+ val pointerSpace = xsbti.Maybe.nothing[String]
+ }
+
+ private def getStringSetting(name: String): Option[String] =
+ args find (_.startsWith(name)) map (_.drop(name.length))
+
+ private def getOutputFolder(args: Array[String]): Option[String] =
+ args sliding(2) find { case Array(x, _) => x == "-d" } map (_.tail.head.trim)
+
+ private def getTemplate(resources: List[URL]): Option[URL] =
+ resources.find(_.getFile.endsWith("template.html"))
+
+ private def getResources(args: Array[String]): List[URL] = {
+ val cp = args sliding (2) find { case Array(x, _) => x == "-classpath" } map (_.tail.head.trim) getOrElse ""
+
+ cp.split(":").find(_.endsWith("dottydoc-client.jar")).map { resourceJar =>
+ import java.util.jar.JarFile
+ val jarEntries = (new JarFile(resourceJar)).entries
+ var entries: List[URL] = Nil
+
+ while (jarEntries.hasMoreElements) {
+ val entry = jarEntries.nextElement()
+
+ if (!entry.isDirectory()) {
+ val path = s"jar:file:$resourceJar!/${entry.getName}"
+ val url = new URL(path)
+ entries = url :: entries
+ }
+ }
+
+ entries
+ } getOrElse (Nil)
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala b/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala
new file mode 100644
index 000000000..2d4c7abcf
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala
@@ -0,0 +1,79 @@
+package dotty.tools
+package dottydoc
+
+import core._
+import core.transform._
+import dotc.config.CompilerCommand
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts._
+import dotc.core.Phases.Phase
+import dotc.typer.FrontEnd
+import dotc.{ CompilationUnit, Compiler, Driver, Run }
+import io.PlainFile
+import model.Package
+import model.json._
+
+import _root_.java.util.{ Map => JMap }
+
+/** Custom Compiler with phases for the documentation tool
+ *
+ * The idea here is to structure `dottydoc` around the new infrastructure. As
+ * such, dottydoc will itself be a compiler. It will, however, produce a format
+ * that can be used by other tools or web-browsers.
+ *
+ * Example:
+ * 1. Use the existing FrontEnd to typecheck the code being fed to dottydoc
+ * 2. Create an AST that is serializable
+ * 3. Serialize to JS object
+ */
+class DocCompiler extends Compiler {
+ override def phases: List[List[Phase]] = List(
+ List(new DocFrontEnd),
+ List(new DocImplicitsPhase),
+ List(new DocASTPhase),
+ List(DocMiniTransformations(new LinkReturnTypes,
+ new LinkParamListTypes,
+ new LinkImplicitlyAddedTypes,
+ new LinkSuperTypes,
+ new AlternateConstructors,
+ new SortMembers))
+ )
+}
+
+class DocFrontEnd extends FrontEnd {
+ override protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
+ unit.isJava
+}
+
+abstract class DocDriver extends Driver {
+ import scala.collection.JavaConverters._
+
+ override def setup(args: Array[String], rootCtx: Context): (List[String], Context) = {
+ val ctx = rootCtx.fresh
+ val summary = CompilerCommand.distill(args)(ctx)
+
+ ctx.setSettings(summary.sstate)
+ ctx.setSetting(ctx.settings.YkeepComments, true)
+
+ val fileNames = CompilerCommand.checkUsage(summary, sourcesRequired)(ctx)
+ (fileNames, ctx)
+ }
+
+ override def newCompiler(implicit ctx: Context): Compiler = new DocCompiler
+
+ def compiledDocs(args: Array[String]): collection.Map[String, Package] = {
+ val (fileNames, ctx) = setup(args, initCtx.fresh)
+ doCompile(newCompiler(ctx), fileNames)(ctx)
+
+ ctx.docbase.packages[Package]
+ }
+
+ def compiledDocsJava(args: Array[String]): JMap[String, Package] =
+ compiledDocs(args).asJava
+
+ def indexToJson(index: collection.Map[String, Package]): String =
+ index.json
+
+ def indexToJsonJava(index: JMap[String, Package]): String =
+ indexToJson(index.asScala)
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java b/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java
new file mode 100644
index 000000000..1bdfe0488
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java
@@ -0,0 +1,63 @@
+package dotty.tools.dottydoc.api.java;
+
+import dotty.tools.dottydoc.DocDriver;
+import dotty.tools.dottydoc.model.Package;
+import dotty.tools.dottydoc.util.OutputWriter;
+import java.util.Map;
+import java.util.List;
+import java.net.URL;
+
+/**
+ * The Dottydoc API is fairly simple. The tool creates an index by calling:
+ * "createIndex" with the same argument list as you would the compiler - e.g:
+ *
+ * {{{
+ * String[] array = {
+ * "-language:Scala2"
+ * };
+ *
+ * Map<String, Package> index = createIndex(array);
+ * }}}
+ *
+ * Once the index has been generated, the tool can also build a documentation
+ * API given a Mustache template and a flat resources structure (i.e. absolute
+ * paths to each resource, which will be put in the same directory).
+ *
+ * {{{
+ * buildDocs("path/to/output/dir", templateURL, resources, index);
+ * }}}
+ *
+ * The tool can also generate JSON from the created index using "toJson(index)"
+ * or directly using "createJsonIndex"
+ */
+public class Dottydoc extends DocDriver {
+
+ /** Creates index from compiler arguments */
+ public Map<String, Package> createIndex(String[] args) {
+ return compiledDocsJava(args);
+ }
+
+ /** Creates JSON from compiler arguments */
+ public String createJsonIndex(String[] args) {
+ return indexToJsonJava(createIndex(args));
+ }
+
+ public String toJson(Map<String, Package> index) {
+ return indexToJsonJava(index);
+ }
+
+ /** Creates a documentation from the given parameters */
+ public void buildDocs(
+ String outputDir,
+ URL template,
+ List<URL> resources,
+ Map<String, Package> index
+ ) {
+ new OutputWriter().writeJava(index, outputDir, template, resources);
+ }
+
+ /** Writes JSON to an output directory as "index.json" */
+ public void writeJson(Map<String, Package> index, String outputDir) {
+ new OutputWriter().writeJsonJava(index, outputDir);
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala b/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala
new file mode 100644
index 000000000..15db81a95
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala
@@ -0,0 +1,49 @@
+package dotty.tools.dottydoc.api.scala
+
+import dotty.tools.dottydoc.DocDriver
+import dotty.tools.dottydoc.model.Package
+import dotty.tools.dottydoc.util.OutputWriter
+
+import scala.collection.Map
+import java.net.URL
+
+/**
+ * The Dottydoc API is fairly simple. The tool creates an index by calling:
+ * "createIndex" with the same argument list as you would the compiler - e.g:
+ *
+ * {{{
+ * val array: Array[String] = Array(
+ * "-language:Scala2"
+ * )
+ *
+ * val index: Map[String, Package] = createIndex(array)
+ * }}}
+ *
+ * Once the index has been generated, the tool can also build a documentation
+ * API given a Mustache template and a flat resources structure (i.e. absolute
+ * paths to each resource, which will be put in the same directory).
+ *
+ * {{{
+ * buildDocs("path/to/output/dir", templateURL, resources, index)
+ * }}}
+ *
+ * The tool can also generate JSON from the created index using "indexToJson"
+ * or directly using "createJsonIndex"
+ */
+trait Dottydoc extends DocDriver {
+ /** Creates index from compiler arguments */
+ def createIndex(args: Array[String]): Map[String, Package] =
+ compiledDocs(args)
+
+ /** Creates JSON from compiler arguments */
+ def createJsonIndex(args: Array[String]): String =
+ indexToJson(compiledDocs(args))
+
+ /** Creates a documentation from the given parameters */
+ def buildDocs(outDir: String, template: URL, resources: List[URL], index: Map[String, Package]) =
+ new OutputWriter().write(index, outDir, template, resources)
+
+ /** Writes JSON to an output directory as "index.json" */
+ def writeJson(index: Map[String, Package], outputDir: String) =
+ new OutputWriter().writeJson(index, outputDir)
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala
new file mode 100644
index 000000000..53c96fc87
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala
@@ -0,0 +1,34 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.core.Contexts.Context
+
+import transform.DocMiniPhase
+import model._
+import model.internal._
+
+/** This DocMiniPhase adds the alternate constructors, currently defined as
+ * methods with the name `<init>`, to the Entity#constructors list
+ */
+class AlternateConstructors extends DocMiniPhase {
+ def partitionMembers(ent: Entity with Constructors with Members): (List[List[ParamList]], List[Entity]) = {
+ val (constructors, members) = ent.members.partition(x => x.name == "<init>")
+
+ val paramLists: List[List[ParamList]] = constructors.collect {
+ case df: Def => df.paramLists
+ }
+
+ (ent.constructors ++ paramLists, members)
+ }
+
+ override def transformClass(implicit ctx: Context) = { case cls: ClassImpl =>
+ val (constructors, members) = partitionMembers(cls)
+ cls.copy(members = members, constructors = constructors)
+ }
+
+ override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
+ val (constructors, members) = partitionMembers(cc)
+ cc.copy(members = members, constructors = constructors)
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala
new file mode 100644
index 000000000..7744752ce
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala
@@ -0,0 +1,191 @@
+package dotty.tools
+package dottydoc
+package core
+
+/** Dotty and Dottydoc imports */
+import dotc.ast.Trees._
+import dotc.CompilationUnit
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts.Context
+import dotc.core.Phases.Phase
+import dotc.core.Symbols.{ Symbol, NoSymbol }
+
+class DocASTPhase extends Phase {
+ import model._
+ import model.factories._
+ import model.internal._
+ import model.parsers.WikiParser
+ import model.comment.Comment
+ import dotty.tools.dotc.core.Flags
+ import dotty.tools.dotc.ast.tpd._
+ import util.traversing._
+ import util.internal.setters._
+
+ def phaseName = "docphase"
+
+ private[this] val commentParser = new WikiParser
+
+ /** Saves the commentParser function for later evaluation, for when the AST has been filled */
+ def track(symbol: Symbol, ctx: Context, parent: Symbol = NoSymbol)(op: => Entity) = {
+ val entity = op
+
+ if (entity != NonEntity)
+ commentParser += (entity, symbol, parent, ctx)
+
+ entity
+ }
+
+ /** Build documentation hierarchy from existing tree */
+ def collect(tree: Tree, prev: List[String] = Nil)(implicit ctx: Context): Entity = track(tree.symbol, ctx) {
+ val implicitConversions = ctx.docbase.defs(tree.symbol)
+
+ def collectList(xs: List[Tree], ps: List[String]): List[Entity] =
+ xs.map(collect(_, ps)).filter(_ != NonEntity)
+
+ def collectEntityMembers(xs: List[Tree], ps: List[String]) =
+ collectList(xs, ps).asInstanceOf[List[Entity with Members]]
+
+ def collectMembers(tree: Tree, ps: List[String] = prev)(implicit ctx: Context): List[Entity] = {
+ val defs = (tree match {
+ case t: Template => collectList(t.body, ps)
+ case _ => Nil
+ })
+
+ defs ++ implicitConversions.flatMap(membersFromSymbol)
+ }
+
+ def membersFromSymbol(sym: Symbol): List[Entity] = {
+ val defs = sym.info.bounds.hi.membersBasedOnFlags(Flags.Method, Flags.Synthetic | Flags.Private)
+ .filterNot(_.symbol.owner.name.show == "Any")
+ .map { meth =>
+ track(meth.symbol, ctx, tree.symbol) {
+ DefImpl(
+ meth.symbol.name.show,
+ Nil,
+ path(meth.symbol),
+ returnType(meth.info),
+ typeParams(meth.symbol),
+ paramLists(meth.info),
+ implicitlyAddedFrom = Some(returnType(meth.symbol.owner.info))
+ )
+ }
+ }.toList
+
+ val vals = sym.info.fields.filterNot(_.symbol.is(Flags.Private | Flags.Synthetic)).map { value =>
+ track(value.symbol, ctx, tree.symbol) {
+ ValImpl(
+ value.symbol.name.show,
+ Nil, path(value.symbol),
+ returnType(value.info),
+ implicitlyAddedFrom = Some(returnType(value.symbol.owner.info))
+ )
+ }
+ }
+
+ defs ++ vals
+ }
+
+
+ tree match {
+ /** package */
+ case pd @ PackageDef(pid, st) =>
+ val newPath = prev :+ pid.name.toString
+ addEntity(PackageImpl(newPath.mkString("."), collectEntityMembers(st, newPath), newPath))
+
+ /** trait */
+ case t @ TypeDef(n, rhs) if t.symbol.is(Flags.Trait) =>
+ val name = n.decode.toString
+ val newPath = prev :+ name
+ //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well
+ TraitImpl(name, collectMembers(rhs), flags(t), newPath, typeParams(t.symbol), traitParameters(t.symbol), superTypes(t))
+
+ /** objects, on the format "Object$" so drop the last letter */
+ case o @ TypeDef(n, rhs) if o.symbol.is(Flags.Module) =>
+ val name = n.decode.toString.dropRight(1)
+ //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well
+ ObjectImpl(name, collectMembers(rhs, prev :+ name), flags(o), prev :+ (name + "$"), superTypes(o))
+
+ /** class / case class */
+ case c @ TypeDef(n, rhs) if c.symbol.isClass =>
+ val name = n.decode.toString
+ val newPath = prev :+ name
+ //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well
+ (name, collectMembers(rhs), flags(c), newPath, typeParams(c.symbol), constructors(c.symbol), superTypes(c), None) match {
+ case x if c.symbol.is(Flags.CaseClass) => CaseClassImpl.tupled(x)
+ case x => ClassImpl.tupled(x)
+ }
+
+ /** def */
+ case d: DefDef =>
+ DefImpl(d.name.decode.toString, flags(d), path(d.symbol), returnType(d.tpt.tpe), typeParams(d.symbol), paramLists(d.symbol.info))
+
+ /** val */
+ case v: ValDef if !v.symbol.is(Flags.ModuleVal) =>
+ ValImpl(v.name.decode.toString, flags(v), path(v.symbol), returnType(v.tpt.tpe))
+
+ case x => {
+ //dottydoc.println(s"Found unwanted entity: $x (${x.pos},\n${x.show}")
+ NonEntity
+ }
+ }
+ }
+
+ var packages: Map[String, Package] = Map.empty
+
+ def addEntity(p: Package): Package = {
+ def mergedChildren(x1s: List[Entity], x2s: List[Entity]): List[Entity] = {
+ val (packs1, others1) = x1s.partition(_.kind == "package")
+ val (packs2, others2) = x2s.partition(_.kind == "package")
+
+ val others = others1 ::: others2
+ val packs = (packs1 ::: packs2).groupBy(_.path).map(_._2.head)
+
+ (others ++ packs).sortBy(_.name)
+ }
+
+ val path = p.path.mkString(".")
+ val newPack = packages.get(path).map {
+ case ex: PackageImpl =>
+ if (!ex.comment.isDefined) ex.comment = p.comment
+ ex.members = mergedChildren(ex.members, p.members)
+ ex
+ }.getOrElse(p)
+
+ packages = packages + (path -> newPack)
+ newPack
+ }
+
+ private[this] var totalRuns = 0
+ private[this] var currentRun = 0
+
+ override def run(implicit ctx: Context): Unit = {
+ currentRun += 1
+ println(s"Compiling ($currentRun/$totalRuns): ${ctx.compilationUnit.source.file.name}")
+ collect(ctx.compilationUnit.tpdTree) // Will put packages in `packages` var
+ }
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
+ // (1) Create package structure for all `units`, this will give us a complete structure
+ totalRuns = units.length
+ val compUnits = super.runOn(units)
+
+ // (2) Set parents of entities, needed for linking
+ for {
+ parent <- packages.values
+ child <- parent.children
+ } setParent(child, to = parent)
+
+ // (3) Create documentation template from docstrings, with internal links
+ println("Generating documentation, this might take a while...")
+ commentParser.parse(packages)
+
+ // (4) Clear caches
+ commentParser.clear()
+
+ // (5) Update Doc AST in ctx.base
+ for (kv <- packages) ctx.docbase.packages += kv
+
+ // Return super's result
+ compUnits
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala
new file mode 100644
index 000000000..f322d7a5a
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala
@@ -0,0 +1,27 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotty.tools.dotc.transform.TreeTransforms.{ MiniPhaseTransform, TransformerInfo }
+import dotty.tools.dotc.core.Flags
+import dotc.core.Contexts.Context
+
+class DocImplicitsPhase extends MiniPhaseTransform { thisTransformer =>
+ import dotty.tools.dotc.ast.tpd._
+
+ def phaseName = "addImplicitsPhase"
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (
+ tree.symbol.is(Flags.Implicit) && // has to have an implicit flag
+ tree.symbol.owner.isStaticOwner && // owner has to be static (e.g. top-level `object`)
+ tree.vparamss.length > 0 &&
+ tree.vparamss(0).length == 1 // should only take one arg, since it has to be a transformation
+ ) {
+ val convertee = tree.vparamss(0)(0).symbol.info.widenDealias.finalResultType.typeSymbol // the pimped type (i.e. `class`)
+ ctx.docbase.addDef(convertee, tree.symbol.info.widenDealias.finalResultType.typeSymbol)
+ }
+
+ tree
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala b/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala
new file mode 100644
index 000000000..2690ac7b7
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala
@@ -0,0 +1,199 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.CompilationUnit
+import dotc.core.Contexts.Context
+import dotc.core.Phases.Phase
+import model._
+import model.internal._
+
+object transform {
+ /**
+ * The idea behind DocMiniTransformations is to fuse transformations to the
+ * doc AST, much like `MiniPhaseTransform` in dotty core - but in a much more
+ * simple implementation
+ *
+ * Usage
+ * -----
+ *
+ * Create a `DocMiniPhase` which overrides the relevant method:
+ *
+ * {{{
+ * override def transformDef(implicit ctx: Context) = {
+ * case x if shouldTransform(x) => x.copy(newValue = ...)
+ * }
+ * }}}
+ *
+ * On each node in the AST, the appropriate method in `DocMiniPhase` will be
+ * called in the order that they are supplied in
+ * `DocMiniphaseTransformations`.
+ *
+ * There won't be a match-error as `transformX` is composed with an
+ * `identity` function.
+ *
+ * The transformations in `DocMiniTransformations` will apply transformations
+ * to all nodes - this means that you do _not_ need to transform children in
+ * `transformPackage`, because `transformX` will be called for the relevant
+ * children. If you want to add children to `Package` you need to do that in
+ * `transformPackage`, these additions will be persisted.
+ */
+ abstract class DocMiniTransformations(transformations: List[DocMiniPhase]) extends Phase {
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
+ for {
+ rootName <- rootPackages
+ pack = ctx.docbase.packages[Package](rootName)
+ transformed = performPackageTransform(pack)
+ } yield ctx.docbase.packages(rootName) = transformed
+ super.runOn(units)
+ }
+
+ private def rootPackages(implicit ctx: Context): List[String] = {
+ var currentDepth = Int.MaxValue
+ var packs = List.empty[String]
+
+ for (key <- ctx.docbase.packages.keys) {
+ val keyDepth = key.split("\\.").length
+ packs =
+ if (keyDepth < currentDepth) {
+ currentDepth = keyDepth
+ key :: Nil
+ } else if (keyDepth == currentDepth) {
+ key :: packs
+ } else packs
+ }
+ packs
+ }
+
+ private def performPackageTransform(pack: Package)(implicit ctx: Context): Package = {
+ def transformEntity[E <: Entity](e: E, f: DocMiniPhase => E => E)(createNew: E => E): E = {
+ val transformedEntity = transformations.foldLeft(e) { case (oldE, transf) =>
+ f(transf)(oldE)
+ }
+ createNew(transformedEntity)
+ }
+
+ def traverse(ent: Entity): Entity = ent match {
+ case p: Package => transformEntity(p, _.packageTransformation) { p =>
+ val newPackage = PackageImpl(
+ p.name,
+ p.members.map(traverse),
+ p.path,
+ p.comment
+ )
+
+ // Update reference in context to newPackage
+ ctx.docbase.packages[Package] += (newPackage.path.mkString(".") -> newPackage)
+
+ newPackage
+ }
+ case c: Class => transformEntity(c, _.classTransformation) { cls =>
+ ClassImpl(
+ cls.name,
+ cls.members.map(traverse),
+ cls.modifiers,
+ cls.path,
+ cls.typeParams,
+ cls.constructors,
+ cls.superTypes,
+ cls.comment
+ )
+ }
+ case cc: CaseClass => transformEntity(cc, _.caseClassTransformation) { cc =>
+ CaseClassImpl(
+ cc.name,
+ cc.members.map(traverse),
+ cc.modifiers,
+ cc.path,
+ cc.typeParams,
+ cc.constructors,
+ cc.superTypes,
+ cc.comment
+ )
+ }
+ case trt: Trait => transformEntity(trt, _.traitTransformation) { trt =>
+ TraitImpl(
+ trt.name,
+ trt.members.map(traverse),
+ trt.modifiers,
+ trt.path,
+ trt.typeParams,
+ trt.traitParams,
+ trt.superTypes,
+ trt.comment
+ )
+ }
+ case obj: Object => transformEntity(obj, _.objectTransformation) { obj =>
+ ObjectImpl(
+ obj.name,
+ obj.members.map(traverse),
+ obj.modifiers,
+ obj.path,
+ obj.superTypes,
+ obj.comment
+ )
+ }
+ case df: Def => transformEntity(df, _.defTransformation) { df =>
+ DefImpl(
+ df.name,
+ df.modifiers,
+ df.path,
+ df.returnValue,
+ df.typeParams,
+ df.paramLists,
+ df.comment,
+ df.implicitlyAddedFrom
+ )
+ }
+ case vl: Val => transformEntity(vl, _.valTransformation) { vl =>
+ ValImpl(
+ vl.name,
+ vl.modifiers,
+ vl.path,
+ vl.returnValue,
+ vl.comment,
+ vl.implicitlyAddedFrom
+ )
+ }
+ }
+
+ traverse(pack).asInstanceOf[Package]
+ }
+
+ override def run(implicit ctx: Context): Unit = ()
+ }
+
+ object DocMiniTransformations {
+ private var previousPhase = 0
+ def apply(transformations: DocMiniPhase*) =
+ new DocMiniTransformations(transformations.toList) {
+ val packages = Map.empty[String, Package]
+
+ def phaseName = s"MiniTransformation${ previousPhase += 1 }"
+ }
+ }
+
+ trait DocMiniPhase { phase =>
+ private def identity[E]: PartialFunction[E, E] = {
+ case id => id
+ }
+
+ // Partial functions instead????
+ def transformPackage(implicit ctx: Context): PartialFunction[Package, Package] = identity
+ def transformClass(implicit ctx: Context): PartialFunction[Class, Class] = identity
+ def transformCaseClass(implicit ctx: Context): PartialFunction[CaseClass, CaseClass] = identity
+ def transformTrait(implicit ctx: Context): PartialFunction[Trait, Trait] = identity
+ def transformObject(implicit ctx: Context): PartialFunction[Object, Object] = identity
+ def transformDef(implicit ctx: Context): PartialFunction[Def, Def] = identity
+ def transformVal(implicit ctx: Context): PartialFunction[Val, Val] = identity
+
+ private[transform] def packageTransformation(p: Package)(implicit ctx: Context) = (transformPackage orElse identity)(p)
+ private[transform] def classTransformation(cls: Class)(implicit ctx: Context) = (transformClass orElse identity)(cls)
+ private[transform] def caseClassTransformation(cc: CaseClass)(implicit ctx: Context) = (transformCaseClass orElse identity)(cc)
+ private[transform] def traitTransformation(trt: Trait)(implicit ctx: Context) = (transformTrait orElse identity)(trt)
+ private[transform] def objectTransformation(obj: Object)(implicit ctx: Context) = (transformObject orElse identity)(obj)
+ private[transform] def defTransformation(df: Def)(implicit ctx: Context) = (transformDef orElse identity)(df)
+ private[transform] def valTransformation(vl: Val)(implicit ctx: Context) = (transformVal orElse identity)(vl)
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala
new file mode 100644
index 000000000..c8de532bb
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala
@@ -0,0 +1,32 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.core.Contexts.Context
+
+import transform.DocMiniPhase
+import model._
+import model.internal._
+
+/** This DocMiniPhase sorts the members of all classes, traits, objects and packages */
+class SortMembers extends DocMiniPhase {
+ override def transformPackage(implicit ctx: Context) = { case p: PackageImpl =>
+ p.copy(members = p.members.sortBy(_.name))
+ }
+
+ override def transformClass(implicit ctx: Context) = { case c: ClassImpl =>
+ c.copy(members = c.members.sortBy(_.name))
+ }
+
+ override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
+ cc.copy(members = cc.members.sortBy(_.name))
+ }
+
+ override def transformTrait(implicit ctx: Context) = { case t: TraitImpl =>
+ t.copy(members = t.members.sortBy(_.name))
+ }
+
+ override def transformObject(implicit ctx: Context) = { case o: ObjectImpl =>
+ o.copy(members = o.members.sortBy(_.name))
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala b/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala
new file mode 100644
index 000000000..ae07effa9
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala
@@ -0,0 +1,115 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.core.Contexts.Context
+import dotc.util.Positions.NoPosition
+
+import transform.DocMiniPhase
+import model._
+import model.internal._
+import model.comment._
+import model.references._
+import BodyParsers._
+import util.MemberLookup
+import util.traversing._
+import util.internal.setters._
+
+class LinkReturnTypes extends DocMiniPhase with TypeLinker {
+ override def transformDef(implicit ctx: Context) = { case df: DefImpl =>
+ val returnValue = linkReference(df, df.returnValue, ctx.docbase.packages[Package].toMap)
+ df.copy(returnValue = returnValue)
+ }
+
+ override def transformVal(implicit ctx: Context) = { case vl: ValImpl =>
+ val returnValue = linkReference(vl, vl.returnValue, ctx.docbase.packages[Package].toMap)
+ vl.copy(returnValue = returnValue)
+ }
+}
+
+class LinkParamListTypes extends DocMiniPhase with TypeLinker {
+ override def transformDef(implicit ctx: Context) = { case df: DefImpl =>
+ val newParamLists = for {
+ ParamListImpl(list, isImplicit) <- df.paramLists
+ newList = list.map(linkReference(df, _, ctx.docbase.packages[Package].toMap))
+ } yield ParamListImpl(newList.asInstanceOf[List[NamedReference]], isImplicit)
+
+ df.copy(paramLists = newParamLists)
+ }
+}
+
+class LinkSuperTypes extends DocMiniPhase with TypeLinker {
+ def linkSuperTypes(ent: Entity with SuperTypes)(implicit ctx: Context): List[MaterializableLink] =
+ ent.superTypes.collect {
+ case UnsetLink(title, query) =>
+ val packages = ctx.docbase.packages[Package].toMap
+ val entityLink = makeEntityLink(ent, packages, Text(title), NoPosition, query).link
+ handleEntityLink(title, entityLink, ent)
+ }
+
+ override def transformClass(implicit ctx: Context) = { case cls: ClassImpl =>
+ cls.copy(superTypes = linkSuperTypes(cls))
+ }
+
+ override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
+ cc.copy(superTypes = linkSuperTypes(cc))
+ }
+
+ override def transformTrait(implicit ctx: Context) = { case trt: TraitImpl =>
+ trt.copy(superTypes = linkSuperTypes(trt))
+ }
+
+ override def transformObject(implicit ctx: Context) = { case obj: ObjectImpl =>
+ obj.copy(superTypes = linkSuperTypes(obj))
+ }
+}
+
+class LinkImplicitlyAddedTypes extends DocMiniPhase with TypeLinker {
+ override def transformDef(implicit ctx: Context) = {
+ case df: DefImpl if df.implicitlyAddedFrom.isDefined =>
+ val implicitlyAddedFrom = linkReference(df, df.implicitlyAddedFrom.get, ctx.docbase.packages[Package].toMap)
+ df.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom))
+ }
+
+ override def transformVal(implicit ctx: Context) = {
+ case vl: ValImpl if vl.implicitlyAddedFrom.isDefined =>
+ val implicitlyAddedFrom = linkReference(vl, vl.implicitlyAddedFrom.get, ctx.docbase.packages[Package].toMap)
+ vl.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom))
+ }
+}
+
+trait TypeLinker extends MemberLookup {
+ def handleEntityLink(title: String, lt: LinkTo, ent: Entity): MaterializableLink = lt match {
+ case Tooltip(str) => NoLink(title, str)
+ case LinkToExternal(_, url) => MaterializedLink(title, url)
+ case LinkToEntity(target) => MaterializedLink(title, util.traversing.relativePath(ent, target))
+ }
+
+ def linkReference(ent: Entity, ref: Reference, packs: Map[String, Package]): Reference = {
+ def linkRef(ref: Reference) = linkReference(ent, ref, packs)
+
+ ref match {
+ case ref @ TypeReference(_, UnsetLink(t, query), tps) =>
+ val inlineToHtml = InlineToHtml(ent)
+ val title = t
+
+ val target = handleEntityLink(title, makeEntityLink(ent, packs, Text(t), NoPosition, query).link, ent)
+ val tpTargets = tps.map(linkReference(ent, _, packs))
+ ref.copy(tpeLink = target, paramLinks = tpTargets)
+ case ref @ OrTypeReference(left, right) =>
+ ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs))
+ case ref @ AndTypeReference(left, right) =>
+ ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs))
+ case ref @ NamedReference(_, rf, _, _) =>
+ ref.copy(ref = linkRef(rf))
+ case ref @ FunctionReference(args, rv) =>
+ ref.copy(args = args.map(linkReference(ent, _, packs)), returnValue = linkReference(ent, rv, packs))
+ case ref @ TupleReference(args) =>
+ ref.copy(args = args.map(linkRef))
+ case ref @ BoundsReference(low, high) =>
+ ref.copy(low = linkRef(low), high = linkRef(high))
+ case _ =>
+ ref
+ }
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala
new file mode 100644
index 000000000..29fe48de3
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala
@@ -0,0 +1,94 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+import scala.collection._
+
+/** A body of text. A comment has a single body, which is composed of
+ * at least one block. Inside every body is exactly one summary (see
+ * [[scala.tools.nsc.doc.model.comment.Summary]]). */
+final case class Body(blocks: Seq[Block]) {
+
+ /** The summary text of the comment body. */
+ lazy val summary: Option[Body] = {
+ def summaryInBlock(block: Block): Seq[Inline] = block match {
+ case Title(text, _) => summaryInInline(text)
+ case Paragraph(text) => summaryInInline(text)
+ case UnorderedList(items) => items flatMap summaryInBlock
+ case OrderedList(items, _) => items flatMap summaryInBlock
+ case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
+ case _ => Nil
+ }
+ def summaryInInline(text: Inline): Seq[Inline] = text match {
+ case Summary(text) => List(text)
+ case Chain(items) => items flatMap summaryInInline
+ case Italic(text) => summaryInInline(text)
+ case Bold(text) => summaryInInline(text)
+ case Underline(text) => summaryInInline(text)
+ case Superscript(text) => summaryInInline(text)
+ case Subscript(text) => summaryInInline(text)
+ case Link(_, title) => summaryInInline(title)
+ case _ => Nil
+ }
+ (blocks flatMap summaryInBlock).toList match {
+ case Nil => None
+ case inline :: Nil => Some(Body(Seq(Paragraph(inline))))
+ case inlines => Some(Body(Seq(Paragraph(Chain(inlines)))))
+ }
+ }
+}
+
+/** A block-level element of text, such as a paragraph or code block. */
+sealed abstract class Block
+
+final case class Title(text: Inline, level: Int) extends Block
+final case class Paragraph(text: Inline) extends Block
+final case class Code(data: String) extends Block
+final case class UnorderedList(items: Seq[Block]) extends Block
+final case class OrderedList(items: Seq[Block], style: String) extends Block
+final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
+final case class HorizontalRule() extends Block
+
+/** An section of text inside a block, possibly with formatting. */
+sealed abstract class Inline
+
+final case class Chain(items: Seq[Inline]) extends Inline
+final case class Italic(text: Inline) extends Inline
+final case class Bold(text: Inline) extends Inline
+final case class Underline(text: Inline) extends Inline
+final case class Superscript(text: Inline) extends Inline
+final case class Subscript(text: Inline) extends Inline
+final case class Link(target: String, title: Inline) extends Inline
+final case class Monospace(text: Inline) extends Inline
+final case class Text(text: String) extends Inline
+abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo }
+object EntityLink {
+ def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo }
+ def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
+}
+final case class HtmlTag(data: String) extends Inline {
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
+ private val (isEnd, tagName) = data match {
+ case Pattern(s1, s2) =>
+ (! s1.isEmpty, Some(s2.toLowerCase))
+ case _ =>
+ (false, None)
+ }
+
+ def canClose(open: HtmlTag) = {
+ isEnd && tagName == open.tagName
+ }
+
+ private val TagsNotToClose = Set("br", "img")
+ def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
+}
+
+/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
+final case class Summary(text: Inline) extends Inline
+
+sealed trait LinkTo
+final case class LinkToExternal(name: String, url: String) extends LinkTo
+final case class Tooltip(name: String) extends LinkTo
+
+/** Linking directly to entities is not picklable because of cyclic references */
+final case class LinkToEntity(entity: Entity) extends LinkTo
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala
new file mode 100644
index 000000000..8c1fa8d49
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala
@@ -0,0 +1,82 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+object BodyParsers {
+
+ implicit class BodyToHtml(val body: Body) extends AnyVal {
+ def toHtml(origin: Entity): String = {
+ val inlineToHtml = InlineToHtml(origin)
+
+ def bodyToHtml(body: Body): String =
+ (body.blocks map blockToHtml).mkString
+
+ def blockToHtml(block: Block): String = block match {
+ case Title(in, 1) => s"<h1>${inlineToHtml(in)}</h1>"
+ case Title(in, 2) => s"<h2>${inlineToHtml(in)}</h2>"
+ case Title(in, 3) => s"<h3>${inlineToHtml(in)}</h3>"
+ case Title(in, _) => s"<h4>${inlineToHtml(in)}</h4>"
+ case Paragraph(in) => s"<p>${inlineToHtml(in)}</p>"
+ case Code(data) => s"""<pre><code class="scala">$data</code></pre>"""
+ case UnorderedList(items) =>
+ s"<ul>${listItemsToHtml(items)}</ul>"
+ case OrderedList(items, listStyle) =>
+ s"<ol class=${listStyle}>${listItemsToHtml(items)}</ol>"
+ case DefinitionList(items) =>
+ s"<dl>${items map { case (t, d) => s"<dt>${inlineToHtml(t)}</dt><dd>${blockToHtml(d)}</dd>" } }</dl>"
+ case HorizontalRule() =>
+ "<hr/>"
+ }
+
+ def listItemsToHtml(items: Seq[Block]) =
+ items.foldLeft(""){ (list, item) =>
+ item match {
+ case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI
+ list + s"<li>${blockToHtml(item)}</li>"
+ case Paragraph(inline) =>
+ list + s"<li>${inlineToHtml(inline)}</li>" // LIs are blocks, no need to use Ps
+ case block =>
+ list + s"<li>${blockToHtml(block)}</li>"
+ }
+ }
+
+ bodyToHtml(body)
+ }
+ }
+
+ case class InlineToHtml(origin: Entity) {
+ def apply(inline: Inline) = toHtml(inline)
+
+ def relativePath(target: Entity) =
+ util.traversing.relativePath(origin, target)
+
+ def toHtml(inline: Inline): String = inline match {
+ case Chain(items) => (items map toHtml).mkString
+ case Italic(in) => s"<i>${toHtml(in)}</i>"
+ case Bold(in) => s"<b>${toHtml(in)}</b>"
+ case Underline(in) => s"<u>${toHtml(in)}</u>"
+ case Superscript(in) => s"<sup>${toHtml(in)}</sup>"
+ case Subscript(in) => s"<sub>${toHtml(in) }</sub>"
+ case Link(raw, title) => s"""<a href=$raw target="_blank">${toHtml(title)}</a>"""
+ case Monospace(in) => s"<code>${toHtml(in)}</code>"
+ case Text(text) => text
+ case Summary(in) => toHtml(in)
+ case HtmlTag(tag) => tag
+ case EntityLink(target, link) => enityLinkToHtml(target, link)
+ }
+
+ def enityLinkToHtml(target: Inline, link: LinkTo) = link match {
+ case Tooltip(_) => toHtml(target)
+ case LinkToExternal(n, url) => s"""<a href="$url">$n</a>"""
+ case LinkToEntity(t: Entity) => t match {
+ // Entity is a package member
+ case e: Entity with Members =>
+ s"""<a href="${relativePath(t)}">${toHtml(target)}</a>"""
+ // Entity is a Val / Def
+ case x => x.parent.fold(toHtml(target)) { xpar =>
+ s"""<a href="${relativePath(xpar)}#${x.name}">${toHtml(target)}</a>"""
+ }
+ }
+ }
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala
new file mode 100644
index 000000000..c4f6ccf5d
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala
@@ -0,0 +1,28 @@
+package dotty.tools
+package dottydoc
+package model
+package comment
+
+case class Comment (
+ body: String,
+ short: String,
+ authors: List[String],
+ see: List[String],
+ result: Option[String],
+ throws: Map[String, String],
+ valueParams: Map[String, String],
+ typeParams: Map[String, String],
+ version: Option[String],
+ since: Option[String],
+ todo: List[String],
+ deprecated: Option[String],
+ note: List[String],
+ example: List[String],
+ constructor: Option[String],
+ group: Option[String],
+ groupDesc: Map[String, String],
+ groupNames: Map[String, String],
+ groupPrio: Map[String, String],
+ /** List of conversions to hide - containing e.g: `scala.Predef.FloatArrayOps` */
+ hideImplicitConversions: List[String]
+)
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala
new file mode 100644
index 000000000..27b0ff977
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala
@@ -0,0 +1,25 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+trait CommentCleaner {
+ import Regexes._
+
+ def clean(comment: String): List[String] = {
+ def cleanLine(line: String): String = {
+ // Remove trailing whitespaces
+ TrailingWhitespace.replaceAllIn(line, "") match {
+ case CleanCommentLine(ctl) => ctl
+ case tl => tl
+ }
+ }
+ val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
+ val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
+ val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
+ val markedTagComment =
+ SafeTags.replaceAllIn(javadoclessComment, { mtch =>
+ _root_.java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
+ })
+ markedTagComment.lines.toList map (cleanLine(_))
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala
new file mode 100644
index 000000000..32a0d8128
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala
@@ -0,0 +1,344 @@
+/*
+ * Port of DocComment.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+
+package dotty.tools
+package dottydoc
+package model
+package comment
+
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts.Context
+import dotc.core.Symbols._
+import dotc.core.Flags
+import dotc.util.Positions._
+
+import scala.collection.mutable
+
+trait CommentExpander {
+ import CommentUtils._
+
+ def expand(sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val parent = if (site != NoSymbol) site else sym
+ defineVariables(parent)
+ expandedDocComment(sym, parent)
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ *
+ * @param sym The symbol for which doc comment is returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(implicit ctx: Context): String = {
+ // when parsing a top level class or module, use the (module-)class itself to look up variable definitions
+ val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym
+ else site
+ expandVariables(cookedDocComment(sym, docStr), sym, parent)
+ }
+
+ private def template(raw: String): String = {
+ val sections = tagIndex(raw)
+
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ if (end == raw.length - 2) raw else raw.substring(0, end) + "*/"
+ }
+
+ def defines(raw: String): List[String] = {
+ val sections = tagIndex(raw)
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ defines map { case (start, end) => raw.substring(start, end) }
+ }
+
+ private def replaceInheritDocToInheritdoc(docStr: String): String =
+ docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc")
+
+ /** The cooked doc comment of an overridden symbol */
+ protected def superComment(sym: Symbol)(implicit ctx: Context): Option[String] =
+ allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "")
+
+ private val cookedDocComments = mutable.HashMap[Symbol, String]()
+
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
+ * missing sections of an inherited doc comment.
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the doc comment of the overridden version is copied instead.
+ */
+ def cookedDocComment(sym: Symbol, docStr: String = "")(implicit ctx: Context): String = cookedDocComments.getOrElseUpdate(sym, {
+ var ownComment =
+ if (docStr.length == 0) ctx.docbase.docstring(sym).map(c => template(c.chrs)).getOrElse("")
+ else template(docStr)
+ ownComment = replaceInheritDocToInheritdoc(ownComment)
+
+ superComment(sym) match {
+ case None =>
+ // SI-8210 - The warning would be false negative when this symbol is a setter
+ if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter)
+ dottydoc.println(s"${sym.pos}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
+ }
+ })
+
+ private def isMovable(str: String, sec: (Int, Int)): Boolean =
+ startsWithTag(str, sec, "@param") ||
+ startsWithTag(str, sec, "@tparam") ||
+ startsWithTag(str, sec, "@return")
+
+ def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+
+ if (copyFirstPara) {
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
+ copied = 3
+ tocopy = 3
+ }
+
+ def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match {
+ case Some((start, end)) =>
+ if (end > tocopy) tocopy = end
+ case None =>
+ srcSec match {
+ case Some((start1, end1)) => {
+ out append dst.substring(copied, tocopy).trim
+ out append "\n"
+ copied = tocopy
+ out append src.substring(start1, end1).trim
+ }
+ case None =>
+ }
+ }
+
+ //TODO: enable this once you know how to get `sym.paramss`
+ /*
+ for (params <- sym.paramss; param <- params)
+ mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
+ for (tparam <- sym.typeParams)
+ mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
+
+ mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+ mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections))
+ */
+
+ if (out.length == 0) dst
+ else {
+ out append dst.substring(copied)
+ out.toString
+ }
+ }
+
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
+ */
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
+ else {
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
+
+ def getParentSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ dottydoc.println(s"""${sym.pos}: the """" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.")
+ "<invalid inheritdoc annotation>"
+ }
+
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
+
+ // Append sections
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
+
+ out.append("*/")
+ out.toString
+ }
+
+ protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val expandLimit = 10
+
+ def expandInternal(str: String, depth: Int): String = {
+ if (depth >= expandLimit)
+ throw new ExpansionLimitExceeded(str)
+
+ val out = new StringBuilder
+ var copied, idx = 0
+ // excluding variables written as \$foo so we can use them when
+ // necessary to document things like Symbol#decode
+ def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\'
+ while (idx < str.length) {
+ if ((str charAt idx) != '$' || isEscaped)
+ idx += 1
+ else {
+ val vstart = idx
+ idx = skipVariable(str, idx + 1)
+ def replaceWith(repl: String) {
+ out append str.substring(copied, vstart)
+ out append repl
+ copied = idx
+ }
+ variableName(str.substring(vstart + 1, idx)) match {
+ case "super" =>
+ superComment(sym) foreach { sc =>
+ val superSections = tagIndex(sc)
+ replaceWith(sc.substring(3, startTag(sc, superSections)))
+ for (sec @ (start, end) <- superSections)
+ if (!isMovable(sc, sec)) out append sc.substring(start, end)
+ }
+ case "" => idx += 1
+ case vname =>
+ lookupVariable(vname, site) match {
+ case Some(replacement) => replaceWith(replacement)
+ case None =>
+ dottydoc.println(s"Variable $vname undefined in comment for $sym in $site")
+ }
+ }
+ }
+ }
+ if (out.length == 0) str
+ else {
+ out append str.substring(copied)
+ expandInternal(out.toString, depth + 1)
+ }
+ }
+
+ // We suppressed expanding \$ throughout the recursion, and now we
+ // need to replace \$ with $ so it looks as intended.
+ expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$")
+ }
+
+ def defineVariables(sym: Symbol)(implicit ctx: Context) = {
+ val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r
+
+ val raw = ctx.docbase.docstring(sym).map(_.chrs).getOrElse("")
+ defs(sym) ++= defines(raw).map {
+ str => {
+ val start = skipWhitespace(str, "@define".length)
+ val (key, value) = str.splitAt(skipVariable(str, start))
+ key.drop(start) -> value
+ }
+ } map {
+ case (key, Trim(value)) =>
+ variableName(key) -> value.replaceAll("\\s+\\*+$", "")
+ }
+ }
+
+ /** Maps symbols to the variable -> replacement maps that are defined
+ * in their doc comments
+ */
+ private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map()
+
+ /** Lookup definition of variable.
+ *
+ * @param vble The variable for which a definition is searched
+ * @param site The class for which doc comments are generated
+ */
+ def lookupVariable(vble: String, site: Symbol)(implicit ctx: Context): Option[String] = site match {
+ case NoSymbol => None
+ case _ =>
+ val searchList =
+ if (site.flags.is(Flags.Module)) site :: site.info.baseClasses
+ else site.info.baseClasses
+
+ searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
+ }
+ }
+
+ /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the position of the doc comment of the overridden version is returned instead.
+ */
+ def docCommentPos(sym: Symbol)(implicit ctx: Context): Position =
+ ctx.docbase.docstring(sym).map(_.pos).getOrElse(NoPosition)
+
+ /** A version which doesn't consider self types, as a temporary measure:
+ * an infinite loop has broken out between superComment and cookedDocComment
+ * since r23926.
+ */
+ private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = {
+ if (!sym.owner.isClass) Nil
+ else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..`
+ //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
+ }
+
+ class ExpansionLimitExceeded(str: String) extends Exception
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala
new file mode 100644
index 000000000..9685b6934
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala
@@ -0,0 +1,846 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+import dotty.tools.dotc.util.Positions._
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Contexts.Context
+import scala.collection.mutable
+import dotty.tools.dotc.config.Printers.dottydoc
+import scala.util.matching.Regex
+
+trait CommentParser extends util.MemberLookup {
+ import Regexes._
+ import model.internal._
+
+ case class FullComment (
+ body: Body,
+ authors: List[Body],
+ see: List[Body],
+ result: Option[Body],
+ throws: Map[String, Body],
+ valueParams: Map[String, Body],
+ typeParams: Map[String, Body],
+ version: Option[Body],
+ since: Option[Body],
+ todo: List[Body],
+ deprecated: Option[Body],
+ note: List[Body],
+ example: List[Body],
+ constructor: Option[Body],
+ group: Option[Body],
+ groupDesc: Map[String, Body],
+ groupNames: Map[String, Body],
+ groupPrio: Map[String, Body],
+ hideImplicitConversions: List[Body],
+ shortDescription: List[Body]
+ ) {
+
+ /**
+ * Transform this CommentParser.FullComment to a Comment using the supplied
+ * Body transformer
+ */
+ def toComment(transform: Body => String) = Comment(
+ transform(body),
+ short =
+ if (shortDescription.nonEmpty) shortDescription.map(transform).mkString
+ else body.summary.map(transform).getOrElse(""),
+ authors.map(transform),
+ see.map(transform),
+ result.map(transform),
+ throws.map { case (k, v) => (k, transform(v)) },
+ valueParams.map { case (k, v) => (k, transform(v)) },
+ typeParams.map { case (k, v) => (k, transform(v)) },
+ version.map(transform),
+ since.map(transform),
+ todo.map(transform),
+ deprecated.map(transform),
+ note.map(transform),
+ example.map(transform),
+ constructor.map(transform),
+ group.map(transform),
+ groupDesc.map { case (k, v) => (k, transform(v)) },
+ groupNames.map { case (k, v) => (k, transform(v)) },
+ groupPrio.map { case (k, v) => (k, transform(v)) },
+ hideImplicitConversions.map(transform)
+ )
+ }
+
+ /** Parses a raw comment string into a `Comment` object.
+ * @param packages all packages parsed by Scaladoc tool, used for lookup
+ * @param cleanComment a cleaned comment to be parsed
+ * @param src the raw comment source string.
+ * @param pos the position of the comment in source.
+ */
+ def parse(
+ entity: Entity,
+ packages: Map[String, Package],
+ comment: List[String],
+ src: String,
+ pos: Position,
+ site: Symbol = NoSymbol
+ )(implicit ctx: Context): FullComment = {
+
+ /** Parses a comment (in the form of a list of lines) to a `Comment`
+ * instance, recursively on lines. To do so, it splits the whole comment
+ * into main body and tag bodies, then runs the `WikiParser` on each body
+ * before creating the comment instance.
+ *
+ * @param docBody The body of the comment parsed until now.
+ * @param tags All tags parsed until now.
+ * @param lastTagKey The last parsed tag, or `None` if the tag section
+ * hasn't started. Lines that are not tagged are part
+ * of the previous tag or, if none exists, of the body.
+ * @param remaining The lines that must still recursively be parsed.
+ * @param inCodeBlock Whether the next line is part of a code block (in
+ * which no tags must be read).
+ */
+ def parseComment (
+ docBody: StringBuilder,
+ tags: Map[TagKey, List[String]],
+ lastTagKey: Option[TagKey],
+ remaining: List[String],
+ inCodeBlock: Boolean
+ ): FullComment = remaining match {
+
+ case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false)
+ else if (!before.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false)
+ else if (!after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true)
+ else lastTagKey match {
+ case Some(key) =>
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parseComment(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true)
+ case None =>
+ parseComment(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true)
+ }
+
+ case CodeBlockEndRegex(before, marker, after) :: ls => {
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true)
+ if (!before.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true)
+ else if (!after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false)
+ else lastTagKey match {
+ case Some(key) =>
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parseComment(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false)
+ case None =>
+ parseComment(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false)
+ }
+ }
+
+ case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => {
+ val key = SymbolTagKey(name, sym)
+ val value = body :: tags.getOrElse(key, Nil)
+ parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
+
+ case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) => {
+ val key = SimpleTagKey(name)
+ val value = body :: tags.getOrElse(key, Nil)
+ parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
+
+ case SingleTagRegex(name) :: ls if (!inCodeBlock) => {
+ val key = SimpleTagKey(name)
+ val value = "" :: tags.getOrElse(key, Nil)
+ parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
+
+ case line :: ls if (lastTagKey.isDefined) => {
+ val newtags = if (!line.isEmpty) {
+ val key = lastTagKey.get
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + line) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ tags + (key -> value)
+ } else tags
+ parseComment(docBody, newtags, lastTagKey, ls, inCodeBlock)
+ }
+
+ case line :: ls => {
+ if (docBody.length > 0) docBody append endOfLine
+ docBody append line
+ parseComment(docBody, tags, lastTagKey, ls, inCodeBlock)
+ }
+
+ case Nil => {
+ // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
+ val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
+ val contentDiagramTag = SimpleTagKey("contentDiagram")
+
+ val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable"))
+ val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
+
+ val bodyTags: mutable.Map[TagKey, List[Body]] =
+ mutable.Map((tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(entity, packages, _, pos, site))}).toSeq: _*)
+
+ def oneTag(key: SimpleTagKey, filterEmpty: Boolean = true): Option[Body] =
+ ((bodyTags remove key): @unchecked) match {
+ case Some(r :: rs) if !(filterEmpty && r.blocks.isEmpty) =>
+ if (!rs.isEmpty) dottydoc.println(s"$pos: only one '@${key.name}' tag is allowed")
+ Some(r)
+ case _ => None
+ }
+
+ def allTags[B](key: SimpleTagKey): List[Body] =
+ (bodyTags remove key).getOrElse(Nil).filterNot(_.blocks.isEmpty).reverse
+
+ def allSymsOneTag(key: TagKey, filterEmpty: Boolean = true): Map[String, Body] = {
+ val keys: Seq[SymbolTagKey] =
+ bodyTags.keys.toSeq flatMap {
+ case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
+ case stk: SimpleTagKey if (stk.name == key.name) =>
+ dottydoc.println(s"$pos: tag '@${stk.name}' must be followed by a symbol name")
+ None
+ case _ => None
+ }
+ val pairs: Seq[(String, Body)] =
+ for (key <- keys) yield {
+ val bs = (bodyTags remove key).get
+ if (bs.length > 1)
+ dottydoc.println(s"$pos: only one '@${key.name}' tag for symbol ${key.symbol} is allowed")
+ (key.symbol, bs.head)
+ }
+ Map.empty[String, Body] ++ (if (filterEmpty) pairs.filterNot(_._2.blocks.isEmpty) else pairs)
+ }
+
+ def linkedExceptions: Map[String, Body] = {
+ val m = allSymsOneTag(SimpleTagKey("throws"), filterEmpty = false)
+
+ m.map { case (targetStr,body) =>
+ val link = lookup(entity, packages, targetStr, pos)
+ val newBody = body match {
+ case Body(List(Paragraph(Chain(content)))) =>
+ val descr = Text(" ") +: content
+ val entityLink = EntityLink(Monospace(Text(targetStr)), link)
+ Body(List(Paragraph(Chain(entityLink +: descr))))
+ case _ => body
+ }
+ (targetStr, newBody)
+ }
+ }
+
+ val cmt = FullComment(
+ body = parseWikiAtSymbol(entity, packages, docBody.toString, pos, site),
+ authors = allTags(SimpleTagKey("author")),
+ see = allTags(SimpleTagKey("see")),
+ result = oneTag(SimpleTagKey("return")),
+ throws = linkedExceptions,
+ valueParams = allSymsOneTag(SimpleTagKey("param")),
+ typeParams = allSymsOneTag(SimpleTagKey("tparam")),
+ version = oneTag(SimpleTagKey("version")),
+ since = oneTag(SimpleTagKey("since")),
+ todo = allTags(SimpleTagKey("todo")),
+ deprecated = oneTag(SimpleTagKey("deprecated"), filterEmpty = false),
+ note = allTags(SimpleTagKey("note")),
+ example = allTags(SimpleTagKey("example")),
+ constructor = oneTag(SimpleTagKey("constructor")),
+ group = oneTag(SimpleTagKey("group")),
+ groupDesc = allSymsOneTag(SimpleTagKey("groupdesc")),
+ groupNames = allSymsOneTag(SimpleTagKey("groupname")),
+ groupPrio = allSymsOneTag(SimpleTagKey("groupprio")),
+ hideImplicitConversions = allTags(SimpleTagKey("hideImplicitConversion")),
+ shortDescription = allTags(SimpleTagKey("shortDescription"))
+ )
+
+ for ((key, _) <- bodyTags)
+ dottydoc.println(s"$pos: Tag '@${key.name}' is not recognised")
+
+ cmt
+ }
+ }
+
+ parseComment(new StringBuilder(comment.size), Map.empty, None, comment, inCodeBlock = false)
+ }
+
+ /** A key used for a tag map. The key is built from the name of the tag and
+ * from the linked symbol if the tag has one.
+ * Equality on tag keys is structural. */
+ private sealed abstract class TagKey {
+ def name: String
+ }
+
+ private final case class SimpleTagKey(name: String) extends TagKey
+ private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+
+ /** Something that should not have happened, happened, and Scaladoc should exit. */
+ private def oops(msg: String): Nothing =
+ throw new IllegalArgumentException("program logic: " + msg)
+
+ /** Parses a string containing wiki syntax into a `Comment` object.
+ * Note that the string is assumed to be clean:
+ * - Removed Scaladoc start and end markers.
+ * - Removed start-of-line star and one whitespace afterwards (if present).
+ * - Removed all end-of-line whitespace.
+ * - Only `endOfLine` is used to mark line endings. */
+ def parseWikiAtSymbol(
+ entity: Entity,
+ packages: Map[String, Package],
+ string: String,
+ pos: Position,
+ site: Symbol
+ )(implicit ctx: Context): Body = new WikiParser(entity, packages, string, pos, site).document()
+
+ /** Original wikiparser from NSC
+ * @author Ingo Maier
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet
+ */
+ protected final class WikiParser(
+ entity: Entity,
+ packages: Map[String, Package],
+ val buffer: String,
+ pos: Position,
+ site: Symbol
+ )(implicit ctx: Context) extends CharReader(buffer) { wiki =>
+ var summaryParsed = false
+
+ def document(): Body = {
+ val blocks = new mutable.ListBuffer[Block]
+ while (char != endOfText)
+ blocks += block()
+ Body(blocks.toList)
+ }
+
+ /* BLOCKS */
+
+ /** {{{ block ::= code | title | hrule | listBlock | para }}} */
+ def block(): Block = {
+ if (checkSkipInitWhitespace("{{{"))
+ code()
+ else if (checkSkipInitWhitespace('='))
+ title()
+ else if (checkSkipInitWhitespace("----"))
+ hrule()
+ else if (checkList)
+ listBlock
+ else {
+ para()
+ }
+ }
+
+ /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
+ * Characters used to build lists and their constructors */
+ protected val listStyles = Map[String, (Seq[Block] => Block)](
+ "- " -> ( UnorderedList(_) ),
+ "1. " -> ( OrderedList(_,"decimal") ),
+ "I. " -> ( OrderedList(_,"upperRoman") ),
+ "i. " -> ( OrderedList(_,"lowerRoman") ),
+ "A. " -> ( OrderedList(_,"upperAlpha") ),
+ "a. " -> ( OrderedList(_,"lowerAlpha") )
+ )
+
+ /** Checks if the current line is formed with more than one space and one the listStyles */
+ def checkList =
+ (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
+
+ /** {{{
+ * nListBlock ::= nLine { mListBlock }
+ * nLine ::= nSpc listStyle para '\n'
+ * }}}
+ * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
+ def listBlock(): Block = {
+
+ /** Consumes one list item block and returns it, or None if the block is
+ * not a list or a different list. */
+ def listLine(indent: Int, style: String): Option[Block] =
+ if (countWhitespace > indent && checkList)
+ Some(listBlock)
+ else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
+ None
+ else {
+ jumpWhitespace()
+ jump(style)
+ val p = Paragraph(inline(isInlineEnd = false))
+ blockEnded("end of list line ")
+ Some(p)
+ }
+
+ /** Consumes all list item blocks (possibly with nested lists) of the
+ * same list and returns the list block. */
+ def listLevel(indent: Int, style: String): Block = {
+ val lines = mutable.ListBuffer.empty[Block]
+ var line: Option[Block] = listLine(indent, style)
+ while (line.isDefined) {
+ lines += line.get
+ line = listLine(indent, style)
+ }
+ val constructor = listStyles(style)
+ constructor(lines)
+ }
+
+ val indent = countWhitespace
+ val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
+ listLevel(indent, style)
+ }
+
+ def code(): Block = {
+ jumpWhitespace()
+ jump("{{{")
+ val str = readUntil("}}}")
+ if (char == endOfText)
+ reportError(pos, "unclosed code block")
+ else
+ jump("}}}")
+ blockEnded("code block")
+ Code(normalizeIndentation(str))
+ }
+
+ /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
+ def title(): Block = {
+ jumpWhitespace()
+ val inLevel = repeatJump('=')
+ val text = inline(check("=" * inLevel))
+ val outLevel = repeatJump('=', inLevel)
+ if (inLevel != outLevel)
+ reportError(pos, "unbalanced or unclosed heading")
+ blockEnded("heading")
+ Title(text, inLevel)
+ }
+
+ /** {{{ hrule ::= "----" { '-' } '\n' }}} */
+ def hrule(): Block = {
+ jumpWhitespace()
+ repeatJump('-')
+ blockEnded("horizontal rule")
+ HorizontalRule()
+ }
+
+ /** {{{ para ::= inline '\n' }}} */
+ def para(): Block = {
+ val p =
+ if (summaryParsed)
+ Paragraph(inline(isInlineEnd = false))
+ else {
+ val s = summary()
+ val r =
+ if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false))
+ summaryParsed = true
+ Paragraph(Chain(r))
+ }
+ while (char == endOfLine && char != endOfText)
+ nextChar()
+ p
+ }
+
+ /* INLINES */
+
+ val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
+ val CLOSE_TAG = "^</([A-Za-z]+)>$".r
+ private def readHTMLFrom(begin: HtmlTag): String = {
+ val list = mutable.ListBuffer.empty[String]
+ val stack = mutable.ListBuffer.empty[String]
+
+ begin.close match {
+ case Some(HtmlTag(CLOSE_TAG(s))) =>
+ stack += s
+ case _ =>
+ return ""
+ }
+
+ do {
+ val str = readUntil { char == safeTagMarker || char == endOfText }
+ nextChar()
+
+ list += str
+
+ str match {
+ case OPEN_TAG(s, _, standalone) => {
+ if (standalone != "/") {
+ stack += s
+ }
+ }
+ case CLOSE_TAG(s) => {
+ if (s == stack.last) {
+ stack.remove(stack.length-1)
+ }
+ }
+ case _ => ;
+ }
+ } while (stack.length > 0 && char != endOfText)
+
+ list mkString ""
+ }
+
+ def inline(isInlineEnd: => Boolean): Inline = {
+
+ def inline0(): Inline = {
+ if (char == safeTagMarker) {
+ val tag = htmlTag()
+ HtmlTag(tag.data + readHTMLFrom(tag))
+ }
+ else if (check("'''")) bold()
+ else if (check("''")) italic()
+ else if (check("`")) monospace()
+ else if (check("__")) underline()
+ else if (check("^")) superscript()
+ else if (check(",,")) subscript()
+ else if (check("[[")) link()
+ else {
+ val str = readUntil {
+ char == safeTagMarker ||
+ check("''") ||
+ char == '`' ||
+ check("__") ||
+ char == '^' ||
+ check(",,") ||
+ check("[[") ||
+ isInlineEnd ||
+ checkParaEnded ||
+ char == endOfLine
+ }
+ Text(str)
+ }
+ }
+
+ val inlines: List[Inline] = {
+ val iss = mutable.ListBuffer.empty[Inline]
+ iss += inline0()
+ while (!isInlineEnd && !checkParaEnded) {
+ val skipEndOfLine = if (char == endOfLine) {
+ nextChar()
+ true
+ } else {
+ false
+ }
+
+ val current = inline0()
+ (iss.last, current) match {
+ case (Text(t1), Text(t2)) if skipEndOfLine =>
+ iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
+ case (i1, i2) if skipEndOfLine =>
+ iss ++= List(Text(endOfLine.toString), i2)
+ case _ => iss += current
+ }
+ }
+ iss.toList
+ }
+
+ inlines match {
+ case Nil => Text("")
+ case i :: Nil => i
+ case is => Chain(is)
+ }
+
+ }
+
+ def htmlTag(): HtmlTag = {
+ jump(safeTagMarker)
+ val read = readUntil(safeTagMarker)
+ if (char != endOfText) jump(safeTagMarker)
+ HtmlTag(read)
+ }
+
+ def bold(): Inline = {
+ jump("'''")
+ val i = inline(check("'''"))
+ jump("'''")
+ Bold(i)
+ }
+
+ def italic(): Inline = {
+ jump("''")
+ val i = inline(check("''"))
+ jump("''")
+ Italic(i)
+ }
+
+ def monospace(): Inline = {
+ jump("`")
+ val i = inline(check("`"))
+ jump("`")
+ Monospace(i)
+ }
+
+ def underline(): Inline = {
+ jump("__")
+ val i = inline(check("__"))
+ jump("__")
+ Underline(i)
+ }
+
+ def superscript(): Inline = {
+ jump("^")
+ val i = inline(check("^"))
+ if (jump("^")) {
+ Superscript(i)
+ } else {
+ Chain(Seq(Text("^"), i))
+ }
+ }
+
+ def subscript(): Inline = {
+ jump(",,")
+ val i = inline(check(",,"))
+ jump(",,")
+ Subscript(i)
+ }
+
+ def summary(): Inline = {
+ val i = inline(checkSentenceEnded())
+ Summary(
+ if (jump("."))
+ Chain(List(i, Text(".")))
+ else
+ i
+ )
+ }
+
+ def link(): Inline = {
+ val SchemeUri = """([a-z]+:.*)""".r
+ jump("[[")
+ val parens = 2 + repeatJump('[')
+ val stop = "]" * parens
+ val target = readUntil { check(stop) || isWhitespaceOrNewLine(char) }
+ val title =
+ if (!check(stop)) Some({
+ jumpWhitespaceOrNewLine()
+ inline(check(stop))
+ })
+ else None
+ jump(stop)
+
+ (target, title) match {
+ case (SchemeUri(uri), optTitle) =>
+ Link(uri, optTitle getOrElse Text(uri))
+ case (qualName, optTitle) =>
+ makeEntityLink(entity, packages, optTitle getOrElse Text(target), pos, target)
+ }
+ }
+
+ /* UTILITY */
+
+ /** {{{ eol ::= { whitespace } '\n' }}} */
+ def blockEnded(blockType: String): Unit = {
+ if (char != endOfLine && char != endOfText) {
+ reportError(pos, "no additional content on same line after " + blockType)
+ jumpUntil(endOfLine)
+ }
+ while (char == endOfLine)
+ nextChar()
+ }
+
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
+ * For indented pieces of code, it reduces the indent to the least whitespace prefix:
+ * {{{
+ * indented example
+ * another indented line
+ * if (condition)
+ * then do something;
+ * ^ this is the least whitespace prefix
+ * }}}
+ */
+ def normalizeIndentation(_code: String): String = {
+
+ val code = _code.replaceAll("\\s+$", "").dropWhile(_ == '\n') // right-trim + remove all leading '\n'
+ val lines = code.split("\n")
+
+ // maxSkip - size of the longest common whitespace prefix of non-empty lines
+ val nonEmptyLines = lines.filter(_.trim.nonEmpty)
+ val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.prefixLength(_ == ' ')).min
+
+ // remove common whitespace prefix
+ lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n")
+ }
+
+ def checkParaEnded(): Boolean = {
+ (char == endOfText) ||
+ ((char == endOfLine) && {
+ val poff = offset
+ nextChar() // read EOL
+ val ok = {
+ checkSkipInitWhitespace(endOfLine) ||
+ checkSkipInitWhitespace('=') ||
+ checkSkipInitWhitespace("{{{") ||
+ checkList ||
+ checkSkipInitWhitespace('\u003D')
+ }
+ offset = poff
+ ok
+ })
+ }
+
+ def checkSentenceEnded(): Boolean = {
+ (char == '.') && {
+ val poff = offset
+ nextChar() // read '.'
+ val ok = char == endOfText || char == endOfLine || isWhitespace(char)
+ offset = poff
+ ok
+ }
+ }
+
+ def reportError(pos: Position, message: String) =
+ dottydoc.println(s"$pos: $message")
+ }
+
+ protected sealed class CharReader(buffer: String) { reader =>
+
+ var offset: Int = 0
+ def char: Char =
+ if (offset >= buffer.length) endOfText else buffer charAt offset
+
+ final def nextChar() =
+ offset += 1
+
+ final def check(chars: String): Boolean = {
+ val poff = offset
+ val ok = jump(chars)
+ offset = poff
+ ok
+ }
+
+ def checkSkipInitWhitespace(c: Char): Boolean = {
+ val poff = offset
+ jumpWhitespace()
+ val ok = jump(c)
+ offset = poff
+ ok
+ }
+
+ def checkSkipInitWhitespace(chars: String): Boolean = {
+ val poff = offset
+ jumpWhitespace()
+ val (ok0, chars0) =
+ if (chars.charAt(0) == ' ')
+ (offset > poff, chars substring 1)
+ else
+ (true, chars)
+ val ok = ok0 && jump(chars0)
+ offset = poff
+ ok
+ }
+
+ def countWhitespace: Int = {
+ var count = 0
+ val poff = offset
+ while (isWhitespace(char) && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ offset = poff
+ count
+ }
+
+ /* Jumpers */
+
+ /** Jumps a character and consumes it
+ * @return true only if the correct character has been jumped */
+ final def jump(ch: Char): Boolean = {
+ if (char == ch) {
+ nextChar()
+ true
+ }
+ else false
+ }
+
+ /** Jumps all the characters in chars, consuming them in the process.
+ * @return true only if the correct characters have been jumped
+ */
+ final def jump(chars: String): Boolean = {
+ var index = 0
+ while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
+ nextChar()
+ index += 1
+ }
+ index == chars.length
+ }
+
+ final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = {
+ var count = 0
+ while (jump(c) && count < max)
+ count += 1
+ count
+ }
+
+ final def jumpUntil(ch: Char): Int = {
+ var count = 0
+ while (char != ch && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ final def jumpUntil(pred: => Boolean): Int = {
+ var count = 0
+ while (!pred && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ def jumpWhitespace() = jumpUntil(!isWhitespace(char))
+
+ def jumpWhitespaceOrNewLine() = jumpUntil(!isWhitespaceOrNewLine(char))
+
+
+ /* Readers */
+ final def readUntil(c: Char): String = {
+ withRead {
+ while (char != c && char != endOfText) {
+ nextChar()
+ }
+ }
+ }
+
+ final def readUntil(chars: String): String = {
+ assert(chars.length > 0)
+ withRead {
+ val c = chars.charAt(0)
+ while (!check(chars) && char != endOfText) {
+ nextChar()
+ while (char != c && char != endOfText)
+ nextChar()
+ }
+ }
+ }
+
+ final def readUntil(pred: => Boolean): String = {
+ withRead {
+ while (char != endOfText && !pred) {
+ nextChar()
+ }
+ }
+ }
+
+ private def withRead(read: => Unit): String = {
+ val start = offset
+ read
+ buffer.substring(start, offset)
+ }
+
+ /* Chars classes */
+ def isWhitespace(c: Char) = c == ' ' || c == '\t'
+
+ def isWhitespaceOrNewLine(c: Char) = isWhitespace(c) || c == '\n'
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala
new file mode 100644
index 000000000..2d75b0c66
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala
@@ -0,0 +1,84 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+import scala.util.matching.Regex
+
+object Regexes {
+ val TrailingWhitespace = """\s+$""".r
+
+ /** The body of a line, dropping the (optional) start star-marker,
+ * one leading whitespace and all trailing whitespace
+ */
+ val CleanCommentLine =
+ new Regex("""(?:\s*\*\s?)?(.*)""")
+
+ /** Dangerous HTML tags that should be replaced by something safer,
+ * such as wiki syntax, or that should be dropped
+ */
+ val DangerousTags =
+ new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
+
+ /** Javadoc tags that should be replaced by something useful, such as wiki
+ * syntax, or that should be dropped. */
+ val JavadocTags =
+ new Regex("""\{\@(code|docRoot|linkplain|link|literal|value)\p{Zs}*([^}]*)\}""")
+
+ /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
+ def javadocReplacement(mtch: Regex.Match): String = {
+ mtch.group(1) match {
+ case "code" => "<code>" + mtch.group(2) + "</code>"
+ case "docRoot" => ""
+ case "link" => "`[[" + mtch.group(2) + "]]`"
+ case "linkplain" => "[[" + mtch.group(2) + "]]"
+ case "literal" => "`" + mtch.group(2) + "`"
+ case "value" => "`" + mtch.group(2) + "`"
+ case _ => ""
+ }
+ }
+
+ /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
+ * if it cannot be salvaged. */
+ def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ case "p" | "div" => "\n\n"
+ case "h1" => "\n= "
+ case "/h1" => " =\n"
+ case "h2" => "\n== "
+ case "/h2" => " ==\n"
+ case "h3" => "\n=== "
+ case "/h3" => " ===\n"
+ case "h4" | "h5" | "h6" => "\n==== "
+ case "/h4" | "/h5" | "/h6" => " ====\n"
+ case "li" => "\n * - "
+ case _ => ""
+ }
+
+ /** Safe HTML tags that can be kept. */
+ val SafeTags =
+ new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
+
+ val safeTagMarker = '\u000E'
+ val endOfLine = '\u000A'
+ val endOfText = '\u0003'
+
+ /** A Scaladoc tag not linked to a symbol and not followed by text */
+ val SingleTagRegex =
+ new Regex("""\s*@(\S+)\s*""")
+
+ /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
+ val SimpleTagRegex =
+ new Regex("""\s*@(\S+)\s+(.*)""")
+
+ /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
+ * of the symbol, and the rest of the line. */
+ val SymbolTagRegex =
+ new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
+
+ /** The start of a Scaladoc code block */
+ val CodeBlockStartRegex =
+ new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
+
+ /** The end of a Scaladoc code block */
+ val CodeBlockEndRegex =
+ new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala
new file mode 100644
index 000000000..e5307bd3c
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala
@@ -0,0 +1,224 @@
+/*
+ * Port of DocStrings.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+
+package dotty.tools
+package dottydoc
+package model
+package comment
+
+import scala.reflect.internal.Chars._
+
+object CommentUtils {
+
+ /** Returns index of string `str` following `start` skipping longest
+ * sequence of whitespace characters characters (but no newlines)
+ */
+ def skipWhitespace(str: String, start: Int): Int =
+ if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipIdent(str: String, start: Int): Int =
+ if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
+ /** Returns index of string `str` after `start` skipping longest
+ * sequence of space and tab characters, possibly also containing
+ * a single `*` character or the `/``**` sequence.
+ * @pre start == str.length || str(start) == `\n`
+ */
+ def skipLineLead(str: String, start: Int): Int =
+ if (start == str.length) start
+ else {
+ val idx = skipWhitespace(str, start + 1)
+ if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
+ else idx
+ }
+
+ /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`.
+ */
+ def skipToEol(str: String, start: Int): Int =
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ else start
+
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
+ * which satisfies predicate `p`.
+ */
+ def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
+ val idx = skipLineLead(str, skipToEol(str, start))
+ if (idx < str.length && !p(idx)) findNext(str, idx)(p)
+ else idx
+ }
+
+ /** Return first index following `start` and starting a line (i.e. after skipLineLead)
+ * which satisfies predicate `p`.
+ */
+ def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
+ val idx = findNext(str, start)(p)
+ if (idx == str.length) List()
+ else idx :: findAll(str, idx)(p)
+ }
+
+ /** Produces a string index, which is a list of `sections`, i.e
+ * pairs of start/end positions of all tagged sections in the string.
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
+ * characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
+ */
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
+ case List() => List()
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ }
+ }
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.indexWhere(str.startsWith("@usecase", _)) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _))
+ commentSections ::: usecaseSections
+ case _ =>
+ idxs
+ }
+ }
+
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.startsWith("@inheritdoc", _))
+
+ /** Does interval `iv` start with given `tag`?
+ */
+ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
+ startsWithTag(str, section._1, tag)
+
+ def startsWithTag(str: String, start: Int, tag: String): Boolean =
+ str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
+
+ /** The first start tag of a list of tag intervals,
+ * or the end of the whole comment string - 2 if list is empty
+ */
+ def startTag(str: String, sections: List[(Int, Int)]) = sections match {
+ case Nil => str.length - 2
+ case (start, _) :: _ => start
+ }
+
+ /** A map from parameter names to start/end indices describing all parameter
+ * sections in `str` tagged with `tag`, where `sections` is the index of `str`.
+ */
+ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections if startsWithTag(str, section, tag)) yield {
+ val start = skipWhitespace(str, section._1 + tag.length)
+ str.substring(start, skipIdent(str, start)) -> section
+ }
+ }
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @group. */
+ def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@group"))
+
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @return.
+ */
+ def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@return"))
+
+ /** Extracts variable name from a string, stripping any pair of surrounding braces */
+ def variableName(str: String): String =
+ if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}')
+ str.substring(1, str.length - 1)
+ else
+ str
+
+ /** Returns index following variable, or start index if no variable was recognized
+ */
+ def skipVariable(str: String, start: Int): Int = {
+ var idx = start
+ if (idx < str.length && (str charAt idx) == '{') {
+ do idx += 1
+ while (idx < str.length && (str charAt idx) != '}')
+ if (idx < str.length) idx + 1 else start
+ } else {
+ while (idx < str.length && isVarPart(str charAt idx))
+ idx += 1
+ idx
+ }
+ }
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an identifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ val (beg, _) = section
+ assert(str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+
+ val start = skipWhitespace(str, skipTag(str, beg))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ val (beg, end) = section
+ if (str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end)
+ else
+ (skipWhitespace(str, skipTag(str, beg)), end)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/entities.scala b/dottydoc/src/dotty/tools/dottydoc/model/entities.scala
new file mode 100644
index 000000000..76792070c
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/entities.scala
@@ -0,0 +1,115 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+
+trait Entity {
+ def name: String
+
+ /** Path from root, i.e. `scala.Option$` */
+ def path: List[String]
+
+ def comment: Option[Comment]
+
+ def kind: String
+
+ def parent: Entity
+
+ /** All parents from package level i.e. Package to Object to Member etc */
+ def parents: List[Entity] = parent match {
+ case NonEntity => Nil
+ case e => e :: e.parents
+ }
+
+ /** Applies `f` to entity if != `NonEntity` */
+ def fold[A](nonEntity: A)(f: Entity => A) = this match {
+ case NonEntity => nonEntity
+ case x => f(x)
+ }
+}
+
+trait SuperTypes {
+ def superTypes: List[MaterializableLink]
+}
+
+trait Members {
+ def members: List[Entity]
+}
+
+trait Modifiers {
+ def modifiers: List[String]
+
+ val isPrivate: Boolean =
+ modifiers.contains("private")
+}
+
+trait TypeParams {
+ def typeParams: List[String]
+}
+
+trait ReturnValue {
+ def returnValue: Reference
+}
+
+trait ParamList {
+ def list: List[NamedReference]
+ def isImplicit: Boolean
+}
+
+trait Constructors {
+ def constructors: List[List[ParamList]]
+}
+
+trait ImplicitlyAddedEntity extends Entity {
+ def implicitlyAddedFrom: Option[Reference]
+}
+
+trait Package extends Entity with Members {
+ val kind = "package"
+
+ def children: List[Entity with Members]
+}
+
+trait Class extends Entity with Modifiers with TypeParams with Constructors with SuperTypes with Members {
+ val kind = "class"
+}
+
+trait CaseClass extends Entity with Modifiers with TypeParams with Constructors with SuperTypes with Members {
+ override val kind = "case class"
+}
+
+trait Trait extends Entity with Modifiers with TypeParams with SuperTypes with Members {
+ def traitParams: List[ParamList]
+ override val kind = "trait"
+}
+
+trait Object extends Entity with Modifiers with SuperTypes with Members {
+ override val kind = "object"
+}
+
+trait Def extends Entity with Modifiers with TypeParams with ReturnValue with ImplicitlyAddedEntity {
+ val kind = "def"
+ def paramLists: List[ParamList]
+}
+
+trait Val extends Entity with Modifiers with ReturnValue with ImplicitlyAddedEntity {
+ val kind = "val"
+}
+
+trait Var extends Entity with Modifiers with ReturnValue {
+ val kind = "var"
+}
+
+trait NonEntity extends Entity {
+ val name = ""
+ val comment = None
+ val path = Nil
+ val kind = ""
+ val parent = NonEntity
+}
+
+final case object NonEntity extends NonEntity
+final case object RootEntity extends NonEntity {
+ override val name = "root"
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/factories.scala b/dottydoc/src/dotty/tools/dottydoc/model/factories.scala
new file mode 100644
index 000000000..b19b836ee
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/factories.scala
@@ -0,0 +1,183 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+import dotty.tools.dotc
+import dotc.core.Types._
+import dotc.core.TypeApplications._
+import dotc.core.Contexts.Context
+import dotc.core.Symbols.{ Symbol, ClassSymbol }
+import dotty.tools.dotc.core.SymDenotations._
+import dotty.tools.dotc.core.Names.TypeName
+import dotc.ast.Trees._
+
+
+object factories {
+ import dotty.tools.dotc.ast.tpd._
+ import dotty.tools.dottydoc.model.internal.ParamListImpl
+ import dotc.core.Flags._
+
+ type TypeTree = dotty.tools.dotc.ast.Trees.Tree[Type]
+
+ def flags(t: Tree)(implicit ctx: Context): List[String] =
+ (t.symbol.flags & SourceModifierFlags)
+ .flagStrings.toList
+ .filter(_ != "<trait>")
+ .filter(_ != "interface")
+
+ def path(sym: Symbol)(implicit ctx: Context): List[String] = sym match {
+ case sym if sym.name.decode.toString == "<root>" => Nil
+ case sym => path(sym.owner) :+ sym.name.show
+ }
+
+
+ private val product = """Product[1-9][0-9]*""".r
+
+ def returnType(t: Type)(implicit ctx: Context): Reference = {
+ val defn = ctx.definitions
+
+ def typeRef(name: String, query: String = "", params: List[Reference] = Nil) = {
+ val realQuery = if (query != "") query else name
+ TypeReference(name, UnsetLink(name, realQuery), params)
+ }
+
+ def expandTpe(t: Type, params: List[Reference] = Nil): Reference = t match {
+ case tl: TypeLambda =>
+ //FIXME: should be handled correctly
+ // example, in `Option`:
+ //
+ // {{{
+ // def companion: GenericCompanion[collection.Iterable]
+ // }}}
+ //
+ // Becomes: def companion: [+X0] -> collection.Iterable[X0]
+ typeRef(tl.show + " (not handled)")
+ case AppliedType(tycon, args) =>
+ val cls = tycon.typeSymbol
+ if (tycon.isRepeatedParam)
+ expandTpe(args.head)
+ else if (defn.isFunctionClass(cls))
+ FunctionReference(args.init.map(expandTpe(_, Nil)), expandTpe(args.last))
+ else if (defn.isTupleClass(cls))
+ TupleReference(args.map(expandTpe(_, Nil)))
+ else {
+ val query = tycon.show
+ val name = query.split("\\.").last
+ typeRef(name, query, params = args.map(expandTpe(_, Nil)))
+ }
+
+ case ref @ RefinedType(parent, rn, info) =>
+ expandTpe(parent) //FIXME: will be a refined HK, aka class Foo[X] { def bar: List[X] } or similar
+ case ref @ HKApply(tycon, args) =>
+ expandTpe(tycon, args.map(expandTpe(_, params)))
+ case TypeRef(_, n) =>
+ val name = n.decode.toString.split("\\$").last
+ typeRef(name, params = params)
+ case ta: TypeAlias =>
+ expandTpe(ta.alias.widenDealias)
+ case OrType(left, right) =>
+ OrTypeReference(expandTpe(left), expandTpe(right))
+ case AndType(left, right) =>
+ AndTypeReference(expandTpe(left), expandTpe(right))
+ case tb @ TypeBounds(lo, hi) =>
+ BoundsReference(expandTpe(lo), expandTpe(hi))
+ case AnnotatedType(tpe, _) =>
+ expandTpe(tpe)
+ case ExprType(tpe) =>
+ expandTpe(tpe)
+ case c: ConstantType =>
+ ConstantReference(c.show)
+ case tt: ThisType =>
+ expandTpe(tt.underlying)
+ case ci: ClassInfo =>
+ val query = path(ci.typeSymbol).mkString(".")
+ typeRef(ci.cls.name.show, query = query)
+ case mt: MethodType =>
+ expandTpe(mt.resultType)
+ case pt: PolyType =>
+ expandTpe(pt.resultType)
+ case pp: PolyParam =>
+ val paramName = pp.paramName.show
+ val name =
+ if (paramName.contains('$'))
+ paramName.split("\\$\\$").last
+ else paramName
+
+ typeRef(name)
+ }
+
+ expandTpe(t)
+ }
+
+ def typeParams(sym: Symbol)(implicit ctx: Context): List[String] =
+ sym.info match {
+ case pt: PolyType => // TODO: not sure if this case is needed anymore
+ pt.paramNames.map(_.show.split("\\$").last)
+ case ClassInfo(_, _, _, decls, _) =>
+ decls.iterator
+ .filter(_.flags is TypeParam)
+ .map { tp =>
+ val prefix =
+ if (tp.flags is Covariant) "+"
+ else if (tp.flags is Contravariant) "-"
+ else ""
+ prefix + tp.name.show.split("\\$").last
+ }
+ .toList
+ case _ =>
+ Nil
+ }
+
+ def constructors(sym: Symbol)(implicit ctx: Context): List[List[ParamList]] = sym match {
+ case sym: ClassSymbol =>
+ paramLists(sym.primaryConstructor.info) :: Nil
+ case _ => Nil
+ }
+
+ def traitParameters(sym: Symbol)(implicit ctx: Context): List[ParamList] =
+ constructors(sym).head
+
+ def paramLists(tpe: Type)(implicit ctx: Context): List[ParamList] = tpe match {
+ case pt: PolyType =>
+ paramLists(pt.resultType)
+
+ case mt: MethodType =>
+ ParamListImpl(mt.paramNames.zip(mt.paramTypes).map { case (name, tpe) =>
+ NamedReference(
+ name.decode.toString,
+ returnType(tpe),
+ isByName = tpe.isInstanceOf[ExprType],
+ isRepeated = tpe.isRepeatedParam
+ )
+ }, mt.isImplicit) :: paramLists(mt.resultType)
+
+ case annot: AnnotatedType => paramLists(annot.tpe)
+ case (_: PolyParam | _: RefinedType | _: TypeRef | _: ThisType |
+ _: ExprType | _: OrType | _: AndType | _: HKApply) => Nil // return types should not be in the paramlist
+ }
+
+ def superTypes(t: Tree)(implicit ctx: Context): List[MaterializableLink] = t.symbol.denot match {
+ case cd: ClassDenotation =>
+ def isJavaLangObject(prefix: Type): Boolean =
+ prefix match {
+ case TypeRef(ThisType(TypeRef(NoPrefix, outerName)), innerName) =>
+ outerName.toString == "lang" && innerName.toString == "Object"
+ case _ => false
+ }
+
+ def isProductWithArity(prefix: Type): Boolean = prefix match {
+ case TypeRef(TermRef(TermRef(NoPrefix, root), scala), prod) =>
+ root.toString == "_root_" &&
+ scala.toString == "scala" &&
+ product.findFirstIn(prod.toString).isDefined
+ case _ => false
+ }
+
+ cd.classParents.collect {
+ case t: TypeRef if !isJavaLangObject(t) && !isProductWithArity(t) =>
+ UnsetLink(t.name.toString, path(t.symbol).mkString("."))
+ }
+ case _ => Nil
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/internal.scala b/dottydoc/src/dotty/tools/dottydoc/model/internal.scala
new file mode 100644
index 000000000..6afb1ec9b
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/internal.scala
@@ -0,0 +1,89 @@
+package dotty.tools.dottydoc
+package model
+
+import comment.Comment
+import references._
+
+object internal {
+
+ trait Impl {
+ var parent: Entity = NonEntity
+ }
+
+ final case class PackageImpl(
+ name: String,
+ var members: List[Entity],
+ path: List[String],
+ var comment: Option[Comment] = None
+ ) extends Package with Impl {
+ def children: List[Entity with Members] =
+ members.collect { case x: Entity with Members => x }
+ }
+
+ final case class ClassImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ typeParams: List[String] = Nil,
+ constructors: List[List[ParamList]] = Nil,
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends Class with Impl
+
+ final case class CaseClassImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ typeParams: List[String] = Nil,
+ constructors: List[List[ParamList]] = Nil,
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends CaseClass with Impl
+
+ final case class TraitImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ typeParams: List[String] = Nil,
+ traitParams: List[ParamList] = Nil,
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends Trait with Impl
+
+ final case class ObjectImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends Object with Impl
+
+ final case class DefImpl(
+ name: String,
+ modifiers: List[String],
+ path: List[String],
+ returnValue: Reference,
+ typeParams: List[String] = Nil,
+ paramLists: List[ParamList] = Nil,
+ var comment: Option[Comment] = None,
+ implicitlyAddedFrom: Option[Reference] = None
+ ) extends Def with Impl
+
+ final case class ValImpl(
+ name: String,
+ modifiers: List[String],
+ path: List[String],
+ returnValue: Reference,
+ var comment: Option[Comment] = None,
+ implicitlyAddedFrom: Option[Reference] = None
+ ) extends Val with Impl
+
+ final case class ParamListImpl(
+ list: List[NamedReference],
+ isImplicit: Boolean
+ ) extends ParamList
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/java.scala b/dottydoc/src/dotty/tools/dottydoc/model/java.scala
new file mode 100644
index 000000000..410085061
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/java.scala
@@ -0,0 +1,223 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+
+object java {
+ import scala.collection.JavaConverters._
+ import _root_.java.util.{ Optional => JOptional, Map => JMap }
+
+ implicit class JavaOption[A](val opt: Option[A]) extends AnyVal {
+ def asJava: JOptional[A] =
+ opt.map(a => JOptional.of(a)).getOrElse(JOptional.empty[A])
+ }
+
+ implicit class JavaComment(val cmt: Comment) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "body" -> cmt.body,
+ "short" -> cmt.short,
+ "authors" -> cmt.authors.asJava,
+ "see" -> cmt.see.asJava,
+ "result" -> cmt.result.asJava,
+ "throws" -> cmt.throws.asJava,
+ "valueParams" -> cmt.valueParams.asJava,
+ "typeParams" -> cmt.typeParams.asJava,
+ "version" -> cmt.version.asJava,
+ "since" -> cmt.since.asJava,
+ "todo" -> cmt.todo.asJava,
+ "deprecated" -> cmt.deprecated.asJava,
+ "note" -> cmt.note.asJava,
+ "example" -> cmt.example.asJava,
+ "constructor" -> cmt.constructor.asJava,
+ "group" -> cmt.group.asJava,
+ "groupDesc" -> cmt.groupDesc.asJava,
+ "groupNames" -> cmt.groupNames.asJava,
+ "groupPrio" -> cmt.groupPrio.asJava,
+ "hideImplicitConversions" -> cmt.hideImplicitConversions.asJava
+ ).asJava
+ }
+
+ implicit class JavaPackage(val ent: Package) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "path" -> ent.path.asJava,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaCaseClass(val ent: CaseClass) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaClass(val ent: Class) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaTrait(val ent: Trait) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaObject(val ent: Object) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaDef(val ent: Def) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "returnValue" -> ent.returnValue.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "paramLists" -> ent.paramLists.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava,
+ "implicitlyAddedFrom" -> ent.implicitlyAddedFrom.map(_.asJava).asJava
+ ).asJava
+ }
+
+ implicit class JavaVal(val ent: Val) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "returnValue" -> ent.returnValue.asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava,
+ "implicitlyAddedFrom" -> ent.implicitlyAddedFrom.map(_.asJava).asJava
+ ).asJava
+ }
+
+ implicit class JavaParamList(val pl: ParamList) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "list" -> pl.list.map(_.asJava).asJava,
+ "isImplicit" -> pl.isImplicit
+ ).asJava
+ }
+
+ implicit class JavaReference(val ref: Reference) extends AnyVal {
+ def asJava: JMap[String, _] = ref match {
+ case TypeReference(title, tpeLink, paramLinks) => Map(
+ "kind" -> "TypeReference",
+ "title" -> title,
+ "tpeLink" -> tpeLink.asJava,
+ "paramLinks" -> paramLinks.map(_.asJava).asJava
+ ).asJava
+
+ case OrTypeReference(left, right) => Map(
+ "kind" -> "OrTypeReference",
+ "left" -> left.asJava,
+ "right" -> right.asJava
+ ).asJava
+
+ case AndTypeReference(left, right) => Map(
+ "kind" -> "AndTypeReference",
+ "left" -> left.asJava,
+ "right" -> right.asJava
+ ).asJava
+
+ case FunctionReference(args, returnValue) => Map(
+ "kind" -> "FunctionReference",
+ "args" -> args.map(_.asJava).asJava,
+ "returnValue" -> returnValue
+ ).asJava
+
+ case TupleReference(args) => Map(
+ "kind" -> "TupleReference",
+ "args" -> args.map(_.asJava).asJava
+ ).asJava
+
+ case BoundsReference(low, high) => Map(
+ "kind" -> "BoundsReference",
+ "low" -> low.asJava,
+ "hight" -> high.asJava
+ ).asJava
+
+ case NamedReference(title, ref, isByName, isRepeated) => Map(
+ "kind" -> "NamedReference",
+ "title" -> title,
+ "ref" -> ref.asJava,
+ "isByName" -> isByName,
+ "isRepeated" -> isRepeated
+ ).asJava
+
+ case ConstantReference(title) => Map(
+ "kind" -> "ConstantReference",
+ "title" -> title
+ ).asJava
+ }
+ }
+
+ implicit class JavaMaterializableLink(val link: MaterializableLink) extends AnyVal {
+ def asJava: JMap[String, _] = link match {
+ case UnsetLink(title, query) => Map(
+ "kind" -> "UnsetLink",
+ "title" -> title,
+ "query" -> query
+ ).asJava
+
+ case MaterializedLink(title, target) => Map(
+ "kind" -> "MaterializedLink",
+ "title" -> title,
+ "target" -> target
+ ).asJava
+
+ case NoLink(title, target) => Map(
+ "kind" -> "NoLink",
+ "title" -> title,
+ "target" -> target
+ ).asJava
+ }
+ }
+
+ implicit class JavaEntity(val ent: Entity) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = parseEntity(ent, extras)
+ }
+
+ private def parseEntity(ent: Entity, extras: Map[String, _]): JMap[String, _] = ent match {
+ case ent: Package => ent.asJava(extras)
+ case ent: CaseClass => ent.asJava(extras)
+ case ent: Class => ent.asJava(extras)
+ case ent: Trait => ent.asJava(extras)
+ case ent: Object => ent.asJava(extras)
+ case ent: Def => ent.asJava
+ case ent: Val => ent.asJava
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/json.scala b/dottydoc/src/dotty/tools/dottydoc/model/json.scala
new file mode 100644
index 000000000..145728f8a
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/json.scala
@@ -0,0 +1,93 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+
+/** This object provides a protocol for serializing the package AST to JSON
+ *
+ * TODO: It might be a good ideat to represent the JSON better than just
+ * serializing a big string-blob in the future.
+ */
+object json {
+ implicit class JsonString(val str: String) extends AnyVal {
+ def json: String = {
+ val cleanedString = str
+ .replaceAll("\\\\","\\\\\\\\")
+ .replaceAll("\\\"", "\\\\\"")
+ .replaceAll("\n", "\\\\n")
+
+ s""""$cleanedString""""
+ }
+ }
+
+ implicit class JsonBoolean(val boo: Boolean) extends AnyVal {
+ def json: String = if (boo) "true" else "false"
+ }
+
+ implicit class JsonComment(val cmt: Comment) extends AnyVal {
+ def json: String =
+ s"""{"body":${cmt.body.json},"short":${cmt.short.json},"authors":${cmt.authors.map(_.json).mkString("[",",","]")},"see":${cmt.see.map(_.json).mkString("[",",","]")},${cmt.result.map(res => s""""result":${res.json},""").getOrElse("")}"throws":${cmt.throws.map { case (k, v) => s"${k.json}:${v.json}" }.mkString("{",",","}")},"valueParams":${cmt.valueParams.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"typeParams":${cmt.typeParams.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},${cmt.version.map(x => s""""version":${x.json},""").getOrElse("")}${cmt.since.map(x => s""""since":${x.json},""").getOrElse("")}"todo":${cmt.todo.map(_.json).mkString("[",",","]")},${cmt.deprecated.map(x => s""""deprecated":${x.json},""").getOrElse("")}"note":${cmt.note.map(_.json).mkString("[",",","]")},"example":${cmt.example.map(_.json).mkString("[",",","]")},${cmt.constructor.map(x => s""""constructor":${x.json},""").getOrElse("")}${cmt.group.map(x => s""""group":${x.json},""").getOrElse("")}"groupDesc":${cmt.groupDesc.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"groupNames":${cmt.groupNames.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"groupPrio":${cmt.groupPrio.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"hideImplicitConversions":${cmt.hideImplicitConversions.map(_.json).mkString("[",",","]")}}"""
+ }
+
+ implicit class LinkJson(val link: MaterializableLink) extends AnyVal {
+ def json: String = {
+ val (secondTitle, secondValue, kind) = link match {
+ case ul: UnsetLink => ("query".json, ul.query.json, "UnsetLink".json)
+ case ml: MaterializedLink => ("target".json, ml.target.json, "MaterializedLink".json)
+ case nl: NoLink => ("target".json, nl.target.json, "NoLink".json)
+ }
+ s"""{"title":${link.title.json},$secondTitle:${secondValue},"kind":$kind}"""
+ }
+ }
+
+ implicit class ParamListJson(val plist: ParamList) extends AnyVal {
+ def json: String =
+ s"""{"list":${plist.list.map(_.json).mkString("[",",","]")},"isImplicit":${plist.isImplicit.json}}"""
+ }
+
+ private def refToJson(ref: Reference): String = ref match {
+ case ref: TypeReference =>
+ s"""{"title":${ref.title.json},"tpeLink":${ref.tpeLink.json},"paramLinks":${ref.paramLinks.map(_.json).mkString("[",",","]")},"kind":"TypeReference"}"""
+ case ref: AndTypeReference =>
+ s"""{"left":${refToJson(ref.left)},"right":${refToJson(ref.right)},"kind":"AndTypeReference"}"""
+ case ref: OrTypeReference =>
+ s"""{"left":${refToJson(ref.left)},"right":${refToJson(ref.right)},"kind":"OrTypeReference"}"""
+ case ref: BoundsReference =>
+ s"""{"low":${refToJson(ref.low)},"high":${refToJson(ref.high)},"kind":"BoundsReference"}"""
+ case ref: NamedReference =>
+ s"""{"title":${ref.title.json},"ref":${refToJson(ref.ref)},"isByName":${ref.isByName.json},"isRepeated":${ref.isRepeated.json},"kind":"NamedReference"}"""
+ case ref: ConstantReference =>
+ s"""{"title":${ref.title.json},"kind": "ConstantReference"}"""
+ case ref: FunctionReference =>
+ s"""{"args":${ref.args.map(refToJson).mkString("[",",","]")},"returnValue":${refToJson(ref.returnValue)},"kind": "FunctionReference"}"""
+ case ref: TupleReference =>
+ s"""{"args":${ref.args.map(refToJson).mkString("[",",","]")},"kind": "TupleReference"}"""
+ }
+ implicit class ReferenceJson(val ref: Reference) extends AnyVal { def json: String = refToJson(ref) }
+
+ private def entToJson(ent: Entity): String = ent match {
+ case ent: Package =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"package"}"""
+ case ent: Class =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"constructors":${ent.constructors.map(xs => xs.map(_.json).mkString("[",",","]")).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"class"}"""
+ case ent: CaseClass =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"constructors":${ent.constructors.map(xs => xs.map(_.json).mkString("[",",","]")).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"case class"}"""
+ case ent: Trait =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"traitParams":${ent.traitParams.map(_.json).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"trait"}"""
+ case ent: Object =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"object"}"""
+ case ent: Def =>
+ s"""{"name":${ent.name.json},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"returnValue":${ent.returnValue.json},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"paramLists":${ent.paramLists.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}${ent.implicitlyAddedFrom.fold("")(ref => s""""implicitlyAddedFrom":${ref.json},""")}"kind":"def"}"""
+ case ent: Val =>
+ s"""{"name":${ent.name.json},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"returnValue":${ent.returnValue.json},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}${ent.implicitlyAddedFrom.fold("")(ref => s""""implicitlyAddedFrom":${ref.json},""")}"kind":"val"}"""
+ }
+ implicit class EntityJson(val ent: Entity) extends AnyVal { def json: String = entToJson(ent) }
+ implicit class PackageJson(val pack: Package) extends AnyVal { def json: String = (pack: Entity).json }
+
+ implicit class PackMapJson(val packs: collection.Map[String, Package]) extends AnyVal {
+ def json: String = packs
+ .map { case (k, v) => s"${k.json}: ${v.json}" }
+ .mkString("{",",","}")
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala b/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala
new file mode 100644
index 000000000..fa54163e5
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala
@@ -0,0 +1,98 @@
+package dotty.tools
+package dottydoc
+package model
+
+import dotc.core.Symbols.Symbol
+import dotc.core.Contexts.Context
+import dotc.util.Positions.NoPosition
+
+object parsers {
+ import comment._
+ import BodyParsers._
+ import model.internal._
+ import util.MemberLookup
+ import util.traversing._
+ import util.internal.setters._
+
+ class WikiParser extends CommentCleaner with CommentParser with CommentExpander {
+ private[this] var commentCache: Map[String, (Entity, Map[String, Package]) => Option[Comment]] = Map.empty
+
+ /** Parses comment and returns the path to the entity with an optional comment
+ *
+ * The idea here is to use this fact to create `Future[Seq[(String, Option[Comment]]]`
+ * which can then be awaited near the end of the run - before the pickling.
+ */
+ def parseHtml(sym: Symbol, parent: Symbol, entity: Entity, packages: Map[String, Package])(implicit ctx: Context): (String, Option[Comment]) = {
+ val cmt = ctx.docbase.docstring(sym).map { d =>
+ val expanded = expand(sym, parent)
+ parse(entity, packages, clean(expanded), expanded, d.pos).toComment(_.toHtml(entity))
+ }
+
+ (entity.path.mkString("."), cmt)
+ }
+
+
+ def add(entity: Entity, symbol: Symbol, parent: Symbol, ctx: Context): Unit = {
+ val commentParser = { (entity: Entity, packs: Map[String, Package]) =>
+ parseHtml(symbol, parent, entity, packs)(ctx)._2
+ }
+
+ /** TODO: this if statement searches for doc comments in parent
+ * definitions if one is not defined for the current symbol.
+ *
+ * It might be a good idea to factor this out of the WikiParser - since
+ * it mutates the state of docbase sort of silently.
+ */
+ implicit val implCtx = ctx
+ if (!ctx.docbase.docstring(symbol).isDefined) {
+ val parentCmt =
+ symbol.extendedOverriddenSymbols
+ .find(ctx.docbase.docstring(_).isDefined)
+ .flatMap(p => ctx.docbase.docstring(p))
+
+ ctx.docbase.addDocstring(symbol, parentCmt)
+ }
+
+
+ val path = entity.path.mkString(".")
+ if (!commentCache.contains(path) || ctx.docbase.docstring(symbol).isDefined)
+ commentCache = commentCache + (path -> commentParser)
+ }
+
+ def +=(entity: Entity, symbol: Symbol, parent: Symbol, ctx: Context) = add(entity, symbol, parent, ctx)
+
+ def size: Int = commentCache.size
+
+ private def parse(entity: Entity, packs: Map[String, Package]): Option[Comment] =
+ commentCache(entity.path.mkString("."))(entity, packs)
+
+ def parse(packs: Map[String, Package]): Unit = {
+ def rootPackages: List[String] = {
+ var currentDepth = Int.MaxValue
+ var packages: List[String] = Nil
+
+ for (key <- packs.keys) {
+ val keyDepth = key.split("\\.").length
+ packages =
+ if (keyDepth < currentDepth) {
+ currentDepth = keyDepth
+ key :: Nil
+ } else if (keyDepth == currentDepth) {
+ key :: packages
+ } else packages
+ }
+
+ packages
+ }
+
+ for (pack <- rootPackages) {
+ mutateEntities(packs(pack)) { e =>
+ val comment = parse(e, packs)
+ setComment(e, to = comment)
+ }
+ }
+ }
+
+ def clear(): Unit = commentCache = Map.empty
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/references.scala b/dottydoc/src/dotty/tools/dottydoc/model/references.scala
new file mode 100644
index 000000000..a28148fa7
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/references.scala
@@ -0,0 +1,20 @@
+package dotty.tools.dottydoc
+package model
+
+object references {
+ sealed trait Reference
+ final case class TypeReference(title: String, tpeLink: MaterializableLink, paramLinks: List[Reference]) extends Reference
+ final case class OrTypeReference(left: Reference, right: Reference) extends Reference
+ final case class AndTypeReference(left: Reference, right: Reference) extends Reference
+ final case class FunctionReference(args: List[Reference], returnValue: Reference) extends Reference
+ final case class TupleReference(args: List[Reference]) extends Reference
+ final case class BoundsReference(low: Reference, high: Reference) extends Reference
+ final case class NamedReference(title: String, ref: Reference, isByName: Boolean = false, isRepeated: Boolean = false) extends Reference
+ final case class ConstantReference(title: String) extends Reference
+
+ /** Use MaterializableLink for entities that need be picklable */
+ sealed trait MaterializableLink { def title: String }
+ final case class UnsetLink(title: String, query: String) extends MaterializableLink
+ final case class MaterializedLink(title: String, target: String) extends MaterializableLink
+ final case class NoLink(title: String, target: String) extends MaterializableLink
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala b/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala
new file mode 100644
index 000000000..40c775428
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala
@@ -0,0 +1,92 @@
+package dotty.tools
+package dottydoc
+package util
+
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts.Context
+import dotc.core.Flags
+import dotc.core.Names._
+import dotc.core.Symbols._
+import dotc.core.Types._
+import dotc.core.Names._
+import dotc.util.Positions._
+import model.internal._
+import model.comment._
+import model._
+
+trait MemberLookup {
+ /** Performs a lookup based on the provided (pruned) query string
+ *
+ * Will return a `Tooltip` if unsucessfull, otherwise a LinkToEntity or LinkToExternal
+ */
+ def lookup(
+ entity: Entity,
+ packages: Map[String, Package],
+ query: String,
+ pos: Position
+ ): LinkTo = {
+ val notFound: LinkTo = Tooltip(query)
+ val querys = query.split("\\.").toList
+
+ /** Looks for the specified entity among `ent`'s members */
+ def localLookup(ent: Entity with Members, searchStr: String): LinkTo =
+ ent
+ .members
+ .collect { case x if x.name == searchStr => x }
+ .sortBy(_.path.last)
+ .headOption
+ .fold(notFound)(e => LinkToEntity(e))
+
+ /** Looks for an entity down in the structure, if the search list is Nil,
+ * the search stops
+ */
+ def downwardLookup(ent: Entity with Members, search: List[String]): LinkTo =
+ search match {
+ case Nil => notFound
+ case x :: Nil =>
+ localLookup(ent, x)
+ case x :: xs =>
+ ent
+ .members
+ .collect { case e: Entity with Members if e.name == x => e }
+ .headOption
+ .fold(notFound)(e => downwardLookup(e, xs))
+ }
+
+ /** Finds package with longest matching name, then does downwardLookup in
+ * the package
+ */
+ def globalLookup: LinkTo = {
+ def longestMatch(list: List[String]): List[String] =
+ if (list == Nil) Nil
+ else
+ packages
+ .get(list.mkString("."))
+ .map(_ => list)
+ .getOrElse(longestMatch(list.dropRight(1)))
+
+ longestMatch(querys) match {
+ case Nil => notFound
+ case xs => downwardLookup(packages(xs.mkString(".")), querys diff xs)
+ }
+ }
+
+ (querys, entity) match {
+ case (x :: Nil, e: Entity with Members) =>
+ localLookup(e, x)
+ case (x :: _, e: Entity with Members) if x == entity.name =>
+ downwardLookup(e, querys)
+ case (x :: xs, _) =>
+ if (xs.nonEmpty) globalLookup
+ else lookup(entity, packages, "scala." + query, pos)
+ }
+ }
+
+ def makeEntityLink(
+ entity: Entity,
+ packages: Map[String, Package],
+ title: Inline,
+ pos: Position,
+ query: String
+ ): EntityLink = EntityLink(title, lookup(entity, packages, query, pos))
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala b/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala
new file mode 100644
index 000000000..2084e0a97
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala
@@ -0,0 +1,125 @@
+package dotty.tools.dottydoc
+package util
+
+import dotty.tools.dotc.config.Printers.dottydoc
+
+import _root_.java.io.{
+ File => JFile,
+ PrintWriter => JPrintWriter,
+ FileReader => JFileReader,
+ BufferedInputStream,
+ InputStream,
+ InputStreamReader,
+ FileOutputStream,
+ BufferedOutputStream,
+ FileNotFoundException
+}
+import _root_.java.net.URL
+import _root_.java.util.{ Map => JMap, List => JList }
+import model.{ Entity, Package }
+import model.json._
+import com.github.mustachejava.DefaultMustacheFactory
+import scala.collection.JavaConverters._
+
+class OutputWriter {
+
+ def writeJava(packs: JMap[String, Package], outPath: String, template: URL, resources: JList[URL]): Unit = {
+ write(packs.asScala, outPath, template, resources.asScala)
+ }
+
+ def write(packs: collection.Map[String, Package], outPath: String, template: URL, resources: Traversable[URL]): Unit = {
+ // Write all packages to `outPath`
+ for (pack <- packs.values) {
+ println(s"""Writing '${pack.path.mkString(".")}'""")
+ writeFile(
+ expandTemplate(template, pack, outPath),
+ outPath + pack.path.mkString("/", "/", "/"),
+ "index.html")
+
+ // Write all package children to outPath
+ for {
+ child <- pack.children
+ if child.kind != "package"
+ } {
+ println(s"""Writing '${child.path.mkString(".")}'""")
+ writeFile(
+ expandTemplate(template, child, outPath),
+ outPath + child.path.dropRight(1).mkString("/", "/", "/"),
+ child.path.last + ".html")
+ }
+ }
+
+ // Write full index to outPath
+ val js = "Index = {}; Index.packages = " + packs.json + ";"
+ println("Writing index.js...")
+ writeFile(js, outPath + "/docassets/", "index.js")
+
+ // Write resources to outPath
+ println("Copying CSS/JS resources to destination...")
+ assert(resources.nonEmpty)
+
+ // TODO: splitting the URL by '/' and taking the last means that we don't
+ // allow folders among the resources
+ resources.foreach(url => copy(url.openStream, outPath, url.getFile.split("/").last))
+
+ println("Done writing static material, building js-app")
+ }
+
+ def writeJsonJava(index: JMap[String, Package], outputDir: String): Unit =
+ writeJson(index.asScala, outputDir)
+
+ def writeJson(index: collection.Map[String, Package], outputDir: String): Unit =
+ writeFile(index.json, outputDir + "/", "index.json")
+
+ def expandTemplate(template: URL, entity: Entity, outPath: String): String = try {
+ import model.json._
+ import model.java._
+
+ val inputStream = template.openStream
+ val writer = new _root_.java.io.StringWriter()
+ val mf = new DefaultMustacheFactory()
+
+ def toRoot = "../" * (entity.path.length - { if (entity.isInstanceOf[Package]) 0 else 1 })
+
+ val entityWithExtras = entity.asJava(Map(
+ "assets" -> s"${toRoot}docassets",
+ "index" -> s"${toRoot}docassets/index.js",
+ "currentEntity" -> entity.json
+ ))
+
+ mf.compile(new InputStreamReader(inputStream), "template")
+ .execute(writer, entityWithExtras)
+
+ inputStream.close()
+ writer.flush()
+ writer.toString
+ } catch {
+ case fnf: FileNotFoundException =>
+ dottydoc.println(s"""Couldn't find the template: "${template.getFile}"...exiting""")
+ System.exit(1); ""
+ }
+
+ def writeFile(str: String, path: String, file: String): Unit =
+ writeFile(str.map(_.toByte).toArray, path, file)
+
+ def writeFile(bytes: Array[Byte], path: String, file: String): Unit = {
+ def printToFile(f: JFile)(op: JPrintWriter => Unit) = {
+ val bos = new BufferedOutputStream(new FileOutputStream(f))
+ try {
+ Stream.continually(bos.write(bytes))
+ } finally bos.close()
+ }
+
+ new JFile(path).mkdirs()
+ printToFile(new JFile(path + file))(printer => bytes.foreach(printer.print))
+ }
+
+ def copy(src: InputStream, path: String, name: String): Unit = {
+ val reader = new BufferedInputStream(src)
+ try {
+ val bytes = Stream.continually(reader.read).takeWhile(-1 != _).map(_.toByte)
+ writeFile(bytes.toArray, path + "/docassets/", name)
+ src.close()
+ } finally reader.close()
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala b/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala
new file mode 100644
index 000000000..a3b60fa44
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala
@@ -0,0 +1,25 @@
+package dotty.tools.dottydoc
+package util
+
+object traversing {
+ import model._
+
+ def mutateEntities(e: Entity)(trans: Entity => Unit): Unit = e match {
+ case e: Entity with Members =>
+ trans(e)
+ e.members.map(mutateEntities(_)(trans))
+ case e: Entity => trans(e)
+ }
+
+ def relativePath(from: Entity, to: Entity) = {
+ val offset = from match {
+ case v: Val if v.implicitlyAddedFrom.isDefined => 3
+ case d: Def if d.implicitlyAddedFrom.isDefined => 3
+ case _: Val | _: Def => 2
+ case _ => 1
+ }
+
+ "../" * (from.path.length - offset) +
+ to.path.mkString("", "/", ".html")
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala b/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala
new file mode 100644
index 000000000..a5a4dfec6
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala
@@ -0,0 +1,65 @@
+package dotty.tools.dottydoc
+package util
+package internal
+
+object setters {
+ import model._
+ import comment.Comment
+ import model.references._
+ import internal._
+
+ def setComment(ent: Entity, to: Option[Comment]) = ent match {
+ case x: PackageImpl => x.comment = to
+ case x: ClassImpl => x.comment = to
+ case x: CaseClassImpl => x.comment = to
+ case x: TraitImpl => x.comment = to
+ case x: ObjectImpl => x.comment = to
+ case x: DefImpl => x.comment = to
+ case x: ValImpl => x.comment = to
+ }
+
+ def setParent(ent: Entity, to: Entity): Unit = ent match {
+ case e: ClassImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: CaseClassImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: ObjectImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: TraitImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: ValImpl =>
+ e.parent = to
+ case e: DefImpl =>
+ e.parent = to
+ case _ => ()
+ }
+
+ implicit class FlattenedEntity(val ent: Entity) extends AnyVal {
+ /** Returns a flat copy if anything was changed (Entity with Members) else
+ * the identity
+ */
+ def flat: Entity = {
+ def flattenMember: Entity => Entity = {
+ case e: PackageImpl => e.copy(members = Nil)
+ case e: ObjectImpl => e.copy(members = Nil)
+ case e: CaseClassImpl => e.copy(members = Nil)
+ case e: ClassImpl => e.copy(members = Nil)
+ case e: TraitImpl => e.copy(members = Nil)
+ case other => other
+ }
+
+ ent match {
+ case e: PackageImpl => e.copy(members = e.members.map(flattenMember))
+ case e: ObjectImpl => e.copy(members = e.members.map(flattenMember))
+ case e: CaseClassImpl => e.copy(members = e.members.map(flattenMember))
+ case e: ClassImpl => e.copy(members = e.members.map(flattenMember))
+ case e: TraitImpl => e.copy(members = e.members.map(flattenMember))
+ case other => other
+ }
+ }
+ }
+}
diff --git a/dottydoc/test/BaseTest.scala b/dottydoc/test/BaseTest.scala
new file mode 100644
index 000000000..2233d03c8
--- /dev/null
+++ b/dottydoc/test/BaseTest.scala
@@ -0,0 +1,57 @@
+package dotty.tools
+package dottydoc
+
+import dotc.core.Contexts
+import Contexts.{ Context, ContextBase, FreshContext }
+import dotc.util.SourceFile
+import dotc.core.Phases.Phase
+import dotc.typer.FrontEnd
+import dottydoc.core.DocASTPhase
+import model.Package
+
+trait DottyTest {
+ dotty.tools.dotc.parsing.Scanners // initialize keywords
+
+ implicit var ctx: FreshContext = {
+ val base = new ContextBase
+ import base.settings._
+ val ctx = base.initialCtx.fresh
+ ctx.setSetting(ctx.settings.language, List("Scala2"))
+ ctx.setSetting(ctx.settings.YkeepComments, true)
+ base.initialize()(ctx)
+ ctx
+ }
+
+ private def compilerWithChecker(assertion: Map[String, Package] => Unit) = new DocCompiler {
+ private[this] val assertionPhase: List[List[Phase]] =
+ List(new Phase {
+ def phaseName = "assertionPhase"
+ override def run(implicit ctx: Context): Unit =
+ assertion(ctx.docbase.packages[Package].toMap)
+ }) :: Nil
+
+ override def phases =
+ super.phases ++ assertionPhase
+ }
+
+ def checkSource(source: String)(assertion: Map[String, Package] => Unit): Unit = {
+ val c = compilerWithChecker(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compile(source)
+ }
+
+ def checkFiles(sources: List[String])(assertion: Map[String, Package] => Unit): Unit = {
+ val c = compilerWithChecker(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compile(sources)
+ }
+
+ def checkSources(sourceFiles: List[SourceFile])(assertion: Map[String, Package] => Unit): Unit = {
+ val c = compilerWithChecker(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compileSources(sourceFiles)
+ }
+}
diff --git a/dottydoc/test/ConstructorTest.scala b/dottydoc/test/ConstructorTest.scala
new file mode 100644
index 000000000..8aa883022
--- /dev/null
+++ b/dottydoc/test/ConstructorTest.scala
@@ -0,0 +1,211 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+import dotc.util.SourceFile
+import model._
+import model.internal._
+import model.references._
+
+class Constructors extends DottyTest {
+ @Test def singleClassConstructor = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val str: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors.headOption match {
+ case Some(ParamListImpl(NamedReference("str", _, false, false) :: Nil, false) :: Nil) =>
+ // success!
+ case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}")
+ }
+ }
+ }
+ }
+
+ @Test def constructorPlusImplicitArgList = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val str1: String)(implicit str2: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("str1", _, false, false) :: Nil, false) ::
+ ParamListImpl(NamedReference("str2", _, false, false) :: Nil, true) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}")
+ }
+ }
+ }
+ }
+
+ @Test def multipleArgumentListsForConstructor = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val str1: String)(val str2: String)(implicit str3: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("str1", _, false, false) :: Nil, false) ::
+ ParamListImpl(NamedReference("str2", _, false, false) :: Nil, false) ::
+ ParamListImpl(NamedReference("str3", _, false, false) :: Nil, true) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}")
+ }
+ }
+ }
+ }
+
+ @Test def multipleConstructors = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val main: String) {
+ | def this(alt1: Int) =
+ | this("String")
+ |
+ | def this(alt2: List[String]) =
+ | this(alt2.head)
+ |}
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt1", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt2", _, false, false) :: Nil, false) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ =>
+ assert(
+ false,
+ s"""Incorrect constructor found:\n${cls.constructors.mkString("\n")}"""
+ )
+ }
+ }
+ }
+ }
+
+ @Test def multipleConstructorsCC = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |case class Class(val main: String) {
+ | def this(alt1: Int) =
+ | this("String")
+ |
+ | def this(alt2: List[String]) =
+ | this(alt2.head)
+ |}
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: CaseClass, obj: Object), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt1", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt2", _, false, false) :: Nil, false) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ =>
+ println(obj.members.map(x => x.kind + " " + x.name))
+ assert(
+ false,
+ s"""Incorrect constructor found:\n${cls.constructors.mkString("\n")}"""
+ )
+ }
+ }
+ }
+ }
+
+ @Test def traitParameters = {
+ val source = new SourceFile (
+ "Trait.scala",
+ """
+ |package scala
+ |
+ |trait Trait(val main: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(trt: Trait), _, _) =>
+ trt.traitParams match {
+ case ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil =>
+ case _ =>
+ assert(
+ false,
+ s"""Incorrect constructor found:\n${trt.traitParams.mkString("\n")}"""
+ )
+ }
+ }
+ }
+ }
+
+ @Test def testJson = {
+ val actualSource =
+ """
+ |package scala
+ |
+ |trait Trait(val main: String)
+ |class Class(val main: String)
+ |case class CaseClass(main: String)
+ """.stripMargin
+
+ val source = new SourceFile ("JsonTest.scala", actualSource)
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cc: CaseClass, _, cls: Class, trt: Trait), _, _) =>
+ import model.json._
+ lazy val incorrectJson = s"The json generated for:\n$actualSource\n\nIs not correct"
+ assert(cc.json.contains(s""""constructors":[[{"list":[{"title":"main""""), incorrectJson)
+ assert(cls.json.contains(s""""constructors":[[{"list":[{"title":"main""""), incorrectJson)
+ assert(trt.json.contains(s""""traitParams":[{"list":[{"title":"main""""), incorrectJson)
+ }
+ }
+ }
+}
diff --git a/dottydoc/test/PackageStructure.scala b/dottydoc/test/PackageStructure.scala
new file mode 100644
index 000000000..00caaa2c0
--- /dev/null
+++ b/dottydoc/test/PackageStructure.scala
@@ -0,0 +1,89 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+import dotc.util.SourceFile
+import model.internal._
+
+class PackageStructure extends DottyTest {
+ @Test def multipleCompilationUnits = {
+ val source1 = new SourceFile(
+ "TraitA.scala",
+ """
+ |package scala
+ |
+ |trait A
+ """.stripMargin
+ )
+
+ val source2 = new SourceFile(
+ "TraitB.scala",
+ """
+ |package scala
+ |
+ |trait B
+ """.stripMargin
+ )
+
+ checkSources(source1 :: source2 :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(tA, tB), _, _) =>
+ assert(
+ tA.name == "A" && tB.name == "B",
+ s"trait A had name '${tA.name}' and trait B had name '${tB.name}'"
+ )
+ case _ => fail("Incorrect package structure after run")
+ }
+ }
+ }
+
+
+ @Test def multiplePackages = {
+ val source1 = new SourceFile(
+ "TraitA.scala",
+ """
+ |package scala
+ |package collection
+ |
+ |trait A
+ """.stripMargin)
+
+ val source2 = new SourceFile(
+ "TraitB.scala",
+ """
+ |package scala
+ |package collection
+ |
+ |trait B
+ """.stripMargin)
+
+ checkSources(source1 :: source2 :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(
+ "scala",
+ List(PackageImpl("scala.collection", List(tA, tB), _, _)),
+ _, _
+ ) =>
+ assert(
+ tA.name == "A" && tB.name == "B",
+ s"trait A had name '${tA.name}' and trait B had name '${tB.name}'"
+ )
+
+ case _ =>
+ fail(s"""Incorrect package structure for 'scala' package: ${packages("scala")}""")
+ }
+
+ packages("scala.collection") match {
+ case PackageImpl("scala.collection", List(tA, tB), _, _) =>
+ assert(
+ tA.name == "A" && tB.name == "B",
+ s"trait A had name '${tA.name}' and trait B had name '${tB.name}'"
+ )
+
+ case _ => fail("Incorrect package structure for 'scala.collection' package")
+ }
+ }
+ }
+}
diff --git a/dottydoc/test/SimpleComments.scala b/dottydoc/test/SimpleComments.scala
new file mode 100644
index 000000000..959eb1745
--- /dev/null
+++ b/dottydoc/test/SimpleComments.scala
@@ -0,0 +1,29 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+class TestSimpleComments extends DottyTest {
+
+ @Test def simpleComment = {
+ val source =
+ """
+ |package scala
+ |
+ |/** Hello, world! */
+ |trait HelloWorld
+ """.stripMargin
+
+ checkSource(source) { packages =>
+ val traitCmt =
+ packages("scala")
+ .children.find(_.path.mkString(".") == "scala.HelloWorld")
+ .flatMap(_.comment.map(_.body))
+ .get
+
+ assertEquals(traitCmt, "<p>Hello, world!</p>")
+ }
+ }
+
+}
diff --git a/dottydoc/test/WhitelistedStdLib.scala b/dottydoc/test/WhitelistedStdLib.scala
new file mode 100644
index 000000000..48697ea7f
--- /dev/null
+++ b/dottydoc/test/WhitelistedStdLib.scala
@@ -0,0 +1,45 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+class TestWhitelistedCollections extends DottyTest {
+ val files: List[String] = {
+ val whitelist = "./test/dotc/scala-collections.whitelist"
+
+ scala.io.Source.fromFile(whitelist, "UTF8")
+ .getLines()
+ .map(_.trim) // allow identation
+ .filter(!_.startsWith("#")) // allow comment lines prefixed by #
+ .map(_.takeWhile(_ != '#').trim) // allow comments in the end of line
+ .filter(_.nonEmpty)
+ .filterNot(_.endsWith("package.scala"))
+ .toList
+ }
+
+ @Test def arrayHasDocumentation =
+ checkFiles(files) { packages =>
+ val array =
+ packages("scala")
+ .children.find(_.path.mkString(".") == "scala.Array")
+ .get
+
+ assert(array.comment.get.body.length > 0)
+ }
+
+ @Test def traitImmutableHasDocumentation =
+ checkFiles(files) { packages =>
+ val imm =
+ packages("scala")
+ .children.find(_.path.mkString(".") == "scala.Immutable")
+ .get
+
+ assert(
+ imm.kind == "trait" && imm.name == "Immutable",
+ "Found wrong `Immutable`")
+ assert(
+ imm.comment.map(_.body).get.length > 0,
+ "Imm did not have a comment with length > 0")
+ }
+}
diff --git a/project/Build.scala b/project/Build.scala
index b7822907d..8157147d2 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -73,7 +73,9 @@ object DottyBuild extends Build {
javaSource in Test := baseDirectory.value / "test",
resourceDirectory in Compile := baseDirectory.value / "resources",
unmanagedSourceDirectories in Compile := Seq((scalaSource in Compile).value),
+ unmanagedSourceDirectories in Compile += baseDirectory.value / "dottydoc" / "src",
unmanagedSourceDirectories in Test := Seq((scalaSource in Test).value),
+ unmanagedSourceDirectories in Test += baseDirectory.value / "dottydoc" / "test",
// set system in/out for repl
connectInput in run := true,
@@ -91,6 +93,8 @@ object DottyBuild extends Build {
//http://stackoverflow.com/questions/10472840/how-to-attach-sources-to-sbt-managed-dependencies-in-scala-ide#answer-11683728
com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys.withSource := true,
+ resolvers += Resolver.sonatypeRepo("snapshots"),
+
// get libraries onboard
partestDeps := Seq(scalaCompiler,
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
@@ -98,8 +102,10 @@ object DottyBuild extends Build {
libraryDependencies ++= partestDeps.value,
libraryDependencies ++= Seq("org.scala-lang.modules" %% "scala-xml" % "1.0.1",
"org.scala-lang.modules" %% "scala-partest" % "1.0.11" % "test",
+ "ch.epfl.lamp" % "dottydoc-client" % "0.1-SNAPSHOT",
"com.novocode" % "junit-interface" % "0.11" % "test",
"com.googlecode.java-diff-utils" % "diffutils" % "1.3.0",
+ "com.github.spullara.mustache.java" % "compiler" % "0.9.3",
"com.typesafe.sbt" % "sbt-interface" % sbtVersion.value),
// enable improved incremental compilation algorithm
incOptions := incOptions.value.withNameHashing(true),
@@ -199,7 +205,8 @@ object DottyBuild extends Build {
settings(
addCommandAlias("partest", ";test:package;package;test:runMain dotc.build;lockPartestFile;test:test;runPartestRunner") ++
addCommandAlias("partest-only", ";test:package;package;test:runMain dotc.build;lockPartestFile;test:test-only dotc.tests;runPartestRunner") ++
- addCommandAlias("partest-only-no-bootstrap", ";test:package;package; lockPartestFile;test:test-only dotc.tests;runPartestRunner")
+ addCommandAlias("partest-only-no-bootstrap", ";test:package;package; lockPartestFile;test:test-only dotc.tests;runPartestRunner") ++
+ addCommandAlias("dottydoc", ";dottydoc/run")
).
settings(publishing)
@@ -263,7 +270,6 @@ object DottyInjectedPlugin extends AutoPlugin {
).
settings(publishing)
-
/** A sandbox to play with the Scala.js back-end of dotty.
*
* This sandbox is compiled with dotty with support for Scala.js. It can be
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 57bd46581..8ac4d69bf 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -8,3 +8,5 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0")
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.8")
+
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.3.5")
diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala
index 20ae02994..cf11c27fa 100644
--- a/src/dotty/tools/dotc/ast/Trees.scala
+++ b/src/dotty/tools/dotc/ast/Trees.scala
@@ -15,6 +15,7 @@ import printing.Printer
import util.{Stats, Attachment, DotClass}
import annotation.unchecked.uncheckedVariance
import language.implicitConversions
+import parsing.Scanners.Comment
object Trees {
@@ -30,7 +31,7 @@ object Trees {
@sharable var ntrees = 0
/** Attachment key for trees with documentation strings attached */
- val DocComment = new Attachment.Key[String]
+ val DocComment = new Attachment.Key[Comment]
/** Modifiers and annotations for definitions
* @param flags The set flags
@@ -324,7 +325,7 @@ object Trees {
private[ast] def rawMods: Modifiers[T] =
if (myMods == null) genericEmptyModifiers else myMods
- def rawComment: Option[String] = getAttachment(DocComment)
+ def rawComment: Option[Comment] = getAttachment(DocComment)
def withMods(mods: Modifiers[Untyped]): ThisTree[Untyped] = {
val tree = if (myMods == null || (myMods == mods)) this else clone.asInstanceOf[MemberDef[Untyped]]
@@ -334,7 +335,7 @@ object Trees {
def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(Modifiers(flags))
- def setComment(comment: Option[String]): ThisTree[Untyped] = {
+ def setComment(comment: Option[Comment]): ThisTree[Untyped] = {
comment.map(putAttachment(DocComment, _))
asInstanceOf[ThisTree[Untyped]]
}
diff --git a/src/dotty/tools/dotc/config/ScalaSettings.scala b/src/dotty/tools/dotc/config/ScalaSettings.scala
index d0c4cc02c..5d5903584 100644
--- a/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -196,4 +196,68 @@ class ScalaSettings extends Settings.SettingGroup {
val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999)
+
+ /** Doc specific settings */
+ val template = OptionSetting[String](
+ "-template",
+ "A mustache template for rendering each top-level entity in the API"
+ )
+
+ val resources = OptionSetting[String](
+ "-resources",
+ "A directory containing static resources needed for the API documentation"
+ )
+
+ val DocTitle = StringSetting (
+ "-Ydoc-title",
+ "title",
+ "The overall name of the Scaladoc site",
+ ""
+ )
+
+ val DocVersion = StringSetting (
+ "-Ydoc-version",
+ "version",
+ "An optional version number, to be appended to the title",
+ ""
+ )
+
+ val DocOutput = StringSetting (
+ "-Ydoc-output",
+ "outdir",
+ "The output directory in which to place the documentation",
+ "."
+ )
+
+ val DocFooter = StringSetting (
+ "-Ydoc-footer",
+ "footer",
+ "A footer on every Scaladoc page, by default the EPFL/Lightbend copyright notice. Can be overridden with a custom footer.",
+ ""
+ )
+
+ val DocUncompilable = StringSetting (
+ "-Ydoc-no-compile",
+ "path",
+ "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
+ ""
+ )
+
+ //def DocUncompilableFiles(implicit ctx: Context) = DocUncompilable.value match {
+ // case "" => Nil
+ // case path => io.Directory(path).deepFiles.filter(_ hasExtension "scala").toList
+ //}
+
+ val DocExternalDoc = MultiStringSetting (
+ "-Ydoc-external-doc",
+ "external-doc",
+ "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
+ )
+
+ val DocAuthor = BooleanSetting("-Ydoc-author", "Include authors.", true)
+
+ val DocGroups = BooleanSetting (
+ "-Ydoc:groups",
+ "Group similar functions together (based on the @group annotation)"
+ )
}
diff --git a/src/dotty/tools/dotc/config/Settings.scala b/src/dotty/tools/dotc/config/Settings.scala
index f30cedaa0..73df4e1ec 100644
--- a/src/dotty/tools/dotc/config/Settings.scala
+++ b/src/dotty/tools/dotc/config/Settings.scala
@@ -235,8 +235,8 @@ object Settings {
setting
}
- def BooleanSetting(name: String, descr: String): Setting[Boolean] =
- publish(Setting(name, descr, false))
+ def BooleanSetting(name: String, descr: String, initialValue: Boolean = false): Setting[Boolean] =
+ publish(Setting(name, descr, initialValue))
def StringSetting(name: String, helpArg: String, descr: String, default: String): Setting[String] =
publish(Setting(name, descr, default, helpArg))
diff --git a/src/dotty/tools/dotc/core/Contexts.scala b/src/dotty/tools/dotc/core/Contexts.scala
index 262443314..cd76fe88b 100644
--- a/src/dotty/tools/dotc/core/Contexts.scala
+++ b/src/dotty/tools/dotc/core/Contexts.scala
@@ -29,6 +29,7 @@ import printing._
import config.{Settings, ScalaSettings, Platform, JavaPlatform, SJSPlatform}
import language.implicitConversions
import DenotTransformers.DenotTransformer
+import parsing.Scanners.Comment
import xsbti.AnalysisCallback
object Contexts {
@@ -531,6 +532,9 @@ object Contexts {
/** The symbol loaders */
val loaders = new SymbolLoaders
+ /** Documentation base */
+ val docbase = new DocBase
+
/** The platform, initialized by `initPlatform()`. */
private var _platform: Platform = _
@@ -567,14 +571,32 @@ object Contexts {
def squashed(p: Phase): Phase = {
allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase)
}
+ }
- val _docstrings: mutable.Map[Symbol, String] =
+ class DocBase {
+ private[this] val _docstrings: mutable.Map[Symbol, Comment] =
mutable.Map.empty
- def docstring(sym: Symbol): Option[String] = _docstrings.get(sym)
+ def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym)
- def addDocstring(sym: Symbol, doc: Option[String]): Unit =
+ def addDocstring(sym: Symbol, doc: Option[Comment]): Unit =
doc.map(d => _docstrings += (sym -> d))
+
+ /*
+ * Dottydoc places instances of `Package` in this map - but we do not want
+ * to depend on `dottydoc` for the compiler, as such this is defined as a
+ * map of `String -> AnyRef`
+ */
+ private[this] val _packages: mutable.Map[String, AnyRef] = mutable.Map.empty
+ def packages[A]: mutable.Map[String, A] = _packages.asInstanceOf[mutable.Map[String, A]]
+
+ /** Should perhaps factorize this into caches that get flushed */
+ private var _defs: Map[Symbol, Set[Symbol]] = Map.empty
+ def defs(sym: Symbol): Set[Symbol] = _defs.get(sym).getOrElse(Set.empty)
+
+ def addDef(s: Symbol, d: Symbol): Unit = _defs = (_defs + {
+ s -> _defs.get(s).map(xs => xs + d).getOrElse(Set(d))
+ })
}
/** The essential mutable state of a context base, collected into a common class */
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index 600707cbf..378aa6ed7 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -22,6 +22,7 @@ import ScriptParsers._
import scala.annotation.{tailrec, switch}
import util.DotClass
import rewrite.Rewrites.patch
+import Scanners.Comment
object Parsers {
@@ -1778,13 +1779,13 @@ object Parsers {
*/
def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
case VAL =>
- patDefOrDcl(posMods(start, mods), in.getDocString(start))
+ patDefOrDcl(posMods(start, mods), in.getDocComment(start))
case VAR =>
- patDefOrDcl(posMods(start, addFlag(mods, Mutable)), in.getDocString(start))
+ patDefOrDcl(posMods(start, addFlag(mods, Mutable)), in.getDocComment(start))
case DEF =>
- defDefOrDcl(posMods(start, mods), in.getDocString(start))
+ defDefOrDcl(posMods(start, mods), in.getDocComment(start))
case TYPE =>
- typeDefOrDcl(posMods(start, mods), in.getDocString(start))
+ typeDefOrDcl(posMods(start, mods), in.getDocComment(start))
case _ =>
tmplDef(start, mods)
}
@@ -1794,7 +1795,7 @@ object Parsers {
* ValDcl ::= Id {`,' Id} `:' Type
* VarDcl ::= Id {`,' Id} `:' Type
*/
- def patDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = {
+ def patDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = {
val lhs = commaSeparated(pattern2)
val tpt = typedOpt()
val rhs =
@@ -1820,7 +1821,7 @@ object Parsers {
* DefDcl ::= DefSig `:' Type
* DefSig ::= id [DefTypeParamClause] ParamClauses
*/
- def defDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = atPos(tokenRange) {
+ def defDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(tokenRange) {
def scala2ProcedureSyntax(resultTypeStr: String) = {
val toInsert =
if (in.token == LBRACE) s"$resultTypeStr ="
@@ -1895,7 +1896,7 @@ object Parsers {
/** TypeDef ::= type Id [TypeParamClause] `=' Type
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
*/
- def typeDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = {
+ def typeDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = {
newLinesOpt()
atPos(tokenRange) {
val name = ident().toTypeName
@@ -1917,7 +1918,7 @@ object Parsers {
* | [`case'] `object' ObjectDef
*/
def tmplDef(start: Int, mods: Modifiers): Tree = {
- val docstring = in.getDocString(start)
+ val docstring = in.getDocComment(start)
in.token match {
case TRAIT =>
classDef(posMods(start, addFlag(mods, Trait)), docstring)
@@ -1938,7 +1939,7 @@ object Parsers {
/** ClassDef ::= Id [ClsTypeParamClause]
* [ConstrMods] ClsParamClauses TemplateOpt
*/
- def classDef(mods: Modifiers, docstring: Option[String]): TypeDef = atPos(tokenRange) {
+ def classDef(mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(tokenRange) {
val name = ident().toTypeName
val constr = atPos(in.offset) {
val tparams = typeParamClauseOpt(ParamOwner.Class)
@@ -1965,7 +1966,7 @@ object Parsers {
/** ObjectDef ::= Id TemplateOpt
*/
- def objectDef(mods: Modifiers, docstring: Option[String] = None): ModuleDef = {
+ def objectDef(mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = {
val name = ident()
val template = templateOpt(emptyConstructor())
@@ -2190,7 +2191,7 @@ object Parsers {
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
- val docstring = in.getDocString(start)
+ val docstring = in.getDocComment(start)
ts += objectDef(atPos(start, in.skipToken()) { Modifiers(Package) }, docstring)
if (in.token != EOF) {
acceptStatSep()
diff --git a/src/dotty/tools/dotc/parsing/Scanners.scala b/src/dotty/tools/dotc/parsing/Scanners.scala
index 1355ea386..b46ab6348 100644
--- a/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -193,7 +193,7 @@ object Scanners {
}
/** Returns the closest docstring preceding the position supplied */
- def getDocString(pos: Int): Option[String] = {
+ def getDocComment(pos: Int): Option[Comment] = {
def closest(c: Comment, docstrings: List[Comment]): Comment = docstrings match {
case x :: xs if (c.pos.end < x.pos.end && x.pos.end <= pos) => closest(x, xs)
case Nil => c
@@ -203,7 +203,7 @@ object Scanners {
case (list @ (x :: xs)) :: _ => {
val c = closest(x, xs)
docsPerBlockStack = list.dropWhile(_ != c).tail :: docsPerBlockStack.tail
- Some(c.chrs)
+ Some(c)
}
case _ => None
}
diff --git a/src/dotty/tools/dotc/typer/FrontEnd.scala b/src/dotty/tools/dotc/typer/FrontEnd.scala
index c5c6aec3c..e193b126a 100644
--- a/src/dotty/tools/dotc/typer/FrontEnd.scala
+++ b/src/dotty/tools/dotc/typer/FrontEnd.scala
@@ -57,7 +57,7 @@ class FrontEnd extends Phase {
case _ => NoSymbol
}
- private def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
+ protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
unit.isJava || firstTopLevelDef(unit.tpdTree :: Nil).isPrimitiveValueClass
override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala
index 698f7e9a9..b8e75664c 100644
--- a/src/dotty/tools/dotc/typer/Namer.scala
+++ b/src/dotty/tools/dotc/typer/Namer.scala
@@ -426,7 +426,7 @@ class Namer { typer: Typer =>
}
def setDocstring(sym: Symbol, tree: Tree)(implicit ctx: Context) = tree match {
- case t: MemberDef => ctx.base.addDocstring(sym, t.rawComment)
+ case t: MemberDef => ctx.docbase.addDocstring(sym, t.rawComment)
case _ => ()
}
diff --git a/test/test/DottyDocParsingTests.scala b/test/test/DottyDocParsingTests.scala
index b09d048da..ed89c6114 100644
--- a/test/test/DottyDocParsingTests.scala
+++ b/test/test/DottyDocParsingTests.scala
@@ -14,7 +14,7 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(c: TypeDef)) =>
- assert(c.rawComment == None, "Should not have a comment, mainly used for exhaustive tests")
+ assert(c.rawComment.map(_.chrs) == None, "Should not have a comment, mainly used for exhaustive tests")
}
}
@@ -29,7 +29,7 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" =>
- checkDocString(t.rawComment, "/** Hello world! */")
+ checkDocString(t.rawComment.map(_.chrs), "/** Hello world! */")
}
}
@@ -44,7 +44,7 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" =>
- checkDocString(t.rawComment, "/** Hello /* multiple open */ world! */")
+ checkDocString(t.rawComment.map(_.chrs), "/** Hello /* multiple open */ world! */")
}
}
@Test def multipleClassesInPackage = {
@@ -62,8 +62,8 @@ class DottyDocParsingTests extends DottyDocTest {
checkCompile("frontend", source) { (_, ctx) =>
ctx.compilationUnit.untpdTree match {
case PackageDef(_, Seq(c1 @ TypeDef(_,_), c2 @ TypeDef(_,_))) => {
- checkDocString(c1.rawComment, "/** Class1 docstring */")
- checkDocString(c2.rawComment, "/** Class2 docstring */")
+ checkDocString(c1.rawComment.map(_.chrs), "/** Class1 docstring */")
+ checkDocString(c2.rawComment.map(_.chrs), "/** Class2 docstring */")
}
}
}
@@ -77,7 +77,7 @@ class DottyDocParsingTests extends DottyDocTest {
""".stripMargin
checkFrontend(source) {
- case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment, "/** Class without package */")
+ case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.chrs), "/** Class without package */")
}
}
@@ -85,7 +85,7 @@ class DottyDocParsingTests extends DottyDocTest {
val source = "/** Trait docstring */\ntrait Trait"
checkFrontend(source) {
- case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment, "/** Trait docstring */")
+ case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.chrs), "/** Trait docstring */")
}
}
@@ -101,8 +101,8 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t1 @ TypeDef(_,_), t2 @ TypeDef(_,_))) => {
- checkDocString(t1.rawComment, "/** Trait1 docstring */")
- checkDocString(t2.rawComment, "/** Trait2 docstring */")
+ checkDocString(t1.rawComment.map(_.chrs), "/** Trait1 docstring */")
+ checkDocString(t2.rawComment.map(_.chrs), "/** Trait2 docstring */")
}
}
}
@@ -127,10 +127,10 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t1 @ TypeDef(_,_), c2 @ TypeDef(_,_), cc3 @ TypeDef(_,_), _, ac4 @ TypeDef(_,_))) => {
- checkDocString(t1.rawComment, "/** Trait1 docstring */")
- checkDocString(c2.rawComment, "/** Class2 docstring */")
- checkDocString(cc3.rawComment, "/** CaseClass3 docstring */")
- checkDocString(ac4.rawComment, "/** AbstractClass4 docstring */")
+ checkDocString(t1.rawComment.map(_.chrs), "/** Trait1 docstring */")
+ checkDocString(c2.rawComment.map(_.chrs), "/** Class2 docstring */")
+ checkDocString(cc3.rawComment.map(_.chrs), "/** CaseClass3 docstring */")
+ checkDocString(ac4.rawComment.map(_.chrs), "/** AbstractClass4 docstring */")
}
}
}
@@ -147,9 +147,9 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(outer @ TypeDef(_, tpl @ Template(_,_,_,_)))) => {
- checkDocString(outer.rawComment, "/** Outer docstring */")
+ checkDocString(outer.rawComment.map(_.chrs), "/** Outer docstring */")
tpl.body match {
- case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */")
+ case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */")
case _ => assert(false, "Couldn't find inner class")
}
}
@@ -171,10 +171,10 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(o1 @ TypeDef(_, tpl @ Template(_,_,_,_)), o2 @ TypeDef(_,_))) => {
- checkDocString(o1.rawComment, "/** Outer1 docstring */")
- checkDocString(o2.rawComment, "/** Outer2 docstring */")
+ checkDocString(o1.rawComment.map(_.chrs), "/** Outer1 docstring */")
+ checkDocString(o2.rawComment.map(_.chrs), "/** Outer2 docstring */")
tpl.body match {
- case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */")
+ case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */")
case _ => assert(false, "Couldn't find inner class")
}
}
@@ -196,9 +196,9 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case p @ PackageDef(_, Seq(o1: MemberDef[Untyped], o2: MemberDef[Untyped])) => {
assertEquals(o1.name.toString, "Object1")
- checkDocString(o1.rawComment, "/** Object1 docstring */")
+ checkDocString(o1.rawComment.map(_.chrs), "/** Object1 docstring */")
assertEquals(o2.name.toString, "Object2")
- checkDocString(o2.rawComment, "/** Object2 docstring */")
+ checkDocString(o2.rawComment.map(_.chrs), "/** Object2 docstring */")
}
}
}
@@ -223,12 +223,12 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case p @ PackageDef(_, Seq(o1: ModuleDef, o2: ModuleDef)) => {
assert(o1.name.toString == "Object1")
- checkDocString(o1.rawComment, "/** Object1 docstring */")
+ checkDocString(o1.rawComment.map(_.chrs), "/** Object1 docstring */")
assert(o2.name.toString == "Object2")
- checkDocString(o2.rawComment, "/** Object2 docstring */")
+ checkDocString(o2.rawComment.map(_.chrs), "/** Object2 docstring */")
o2.impl.body match {
- case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */")
+ case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */")
case _ => assert(false, "Couldn't find inner class")
}
}
@@ -257,14 +257,14 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case PackageDef(_, Seq(p: ModuleDef)) => {
- checkDocString(p.rawComment, "/** Package object docstring */")
+ checkDocString(p.rawComment.map(_.chrs), "/** Package object docstring */")
p.impl.body match {
case (b: TypeDef) :: (t: TypeDef) :: (o: ModuleDef) :: Nil => {
- checkDocString(b.rawComment, "/** Boo docstring */")
- checkDocString(t.rawComment, "/** Trait docstring */")
- checkDocString(o.rawComment, "/** InnerObject docstring */")
- checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment, "/** InnerClass docstring */")
+ checkDocString(b.rawComment.map(_.chrs), "/** Boo docstring */")
+ checkDocString(t.rawComment.map(_.chrs), "/** Trait docstring */")
+ checkDocString(o.rawComment.map(_.chrs), "/** InnerObject docstring */")
+ checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment.map(_.chrs), "/** InnerClass docstring */")
}
case _ => assert(false, "Incorrect structure inside package object")
}
@@ -284,7 +284,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case PackageDef(_, Seq(c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Real comment */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Real comment */")
}
}
@@ -303,7 +303,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case PackageDef(_, Seq(c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Real comment */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Real comment */")
}
}
@@ -329,9 +329,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** val1 */")
- checkDocString(v2.rawComment, "/** val2 */")
- checkDocString(v3.rawComment, "/** val3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** val1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** val2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** val3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -361,9 +361,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** var1 */")
- checkDocString(v2.rawComment, "/** var2 */")
- checkDocString(v3.rawComment, "/** var3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** var1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** var2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** var3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -393,9 +393,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** def1 */")
- checkDocString(v2.rawComment, "/** def2 */")
- checkDocString(v3.rawComment, "/** def3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** def1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** def2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** def3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -425,9 +425,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** type1 */")
- checkDocString(v2.rawComment, "/** type2 */")
- checkDocString(v3.rawComment, "/** type3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** type1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** type2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** type3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -451,7 +451,7 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) =>
o.impl.body match {
case (foo: MemberDef) :: Nil =>
- expectNoDocString(foo.rawComment)
+ expectNoDocString(foo.rawComment.map(_.chrs))
case _ => assert(false, "Incorrect structure inside object")
}
}
@@ -468,7 +468,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case p @ PackageDef(_, Seq(_, c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Class1 */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Class1 */")
}
}
@@ -483,7 +483,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case p @ PackageDef(_, Seq(c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Class1 */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Class1 */")
}
}
} /* End class */