diff options
205 files changed, 8357 insertions, 102 deletions
diff --git a/.gitignore b/.gitignore index c9f12e986..17eba0468 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.DS_Store *.class *.log +*.swp *~ *.swp @@ -37,6 +38,7 @@ scala-scala # Ignore output files but keep the directory out/ +build/ !out/.keep # Ignore build-file diff --git a/bridge/src/main/scala/xsbt/ScaladocInterface.scala b/bridge/src/main/scala/xsbt/ScaladocInterface.scala new file mode 100644 index 000000000..3ad9c7941 --- /dev/null +++ b/bridge/src/main/scala/xsbt/ScaladocInterface.scala @@ -0,0 +1,72 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package xsbt + +import xsbti.Logger +import dotty.tools.dottydoc.api.scala.Dottydoc +import java.net.URL + +class ScaladocInterface { + def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = + (new DottydocRunner(args, log, delegate)).run() +} + +class DottydocRunner(args: Array[String], log: Logger, delegate: xsbti.Reporter) extends Dottydoc { + def run(): Unit = getOutputFolder(args).map { outputFolder => + val index = createIndex(args) + val resources = getResources(args) + val template = getTemplate(resources) + + template.fold(writeJson(index, outputFolder)) { tpl => + buildDocs(outputFolder, tpl, resources, index) + } + } getOrElse { + delegate.log( + NoPosition, + "No output folder set for API documentation (\"-d\" parameter should be passed to the documentation tool)", + xsbti.Severity.Error + ) + } + + private[this] val NoPosition = new xsbti.Position { + val line = xsbti.Maybe.nothing[Integer] + val lineContent = "" + val offset = xsbti.Maybe.nothing[Integer] + val sourcePath = xsbti.Maybe.nothing[String] + val sourceFile = xsbti.Maybe.nothing[java.io.File] + val pointer = xsbti.Maybe.nothing[Integer] + val pointerSpace = xsbti.Maybe.nothing[String] + } + + private def getStringSetting(name: String): Option[String] = + args find (_.startsWith(name)) map (_.drop(name.length)) + + private def getOutputFolder(args: Array[String]): Option[String] = + args sliding(2) find { case Array(x, _) => x == "-d" } map (_.tail.head.trim) + + private def getTemplate(resources: List[URL]): Option[URL] = + resources.find(_.getFile.endsWith("template.html")) + + private def getResources(args: Array[String]): List[URL] = { + val cp = args sliding (2) find { case Array(x, _) => x == "-classpath" } map (_.tail.head.trim) getOrElse "" + + cp.split(":").find(_.endsWith("dottydoc-client.jar")).map { resourceJar => + import java.util.jar.JarFile + val jarEntries = (new JarFile(resourceJar)).entries + var entries: List[URL] = Nil + + while (jarEntries.hasMoreElements) { + val entry = jarEntries.nextElement() + + if (!entry.isDirectory()) { + val path = s"jar:file:$resourceJar!/${entry.getName}" + val url = new URL(path) + entries = url :: entries + } + } + + entries + } getOrElse (Nil) + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala b/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala new file mode 100644 index 000000000..2d4c7abcf --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala @@ -0,0 +1,79 @@ +package dotty.tools +package dottydoc + +import core._ +import core.transform._ +import dotc.config.CompilerCommand +import dotc.config.Printers.dottydoc +import dotc.core.Contexts._ +import dotc.core.Phases.Phase +import dotc.typer.FrontEnd +import dotc.{ CompilationUnit, Compiler, Driver, Run } +import io.PlainFile +import model.Package +import model.json._ + +import _root_.java.util.{ Map => JMap } + +/** Custom Compiler with phases for the documentation tool + * + * The idea here is to structure `dottydoc` around the new infrastructure. As + * such, dottydoc will itself be a compiler. It will, however, produce a format + * that can be used by other tools or web-browsers. + * + * Example: + * 1. Use the existing FrontEnd to typecheck the code being fed to dottydoc + * 2. Create an AST that is serializable + * 3. Serialize to JS object + */ +class DocCompiler extends Compiler { + override def phases: List[List[Phase]] = List( + List(new DocFrontEnd), + List(new DocImplicitsPhase), + List(new DocASTPhase), + List(DocMiniTransformations(new LinkReturnTypes, + new LinkParamListTypes, + new LinkImplicitlyAddedTypes, + new LinkSuperTypes, + new AlternateConstructors, + new SortMembers)) + ) +} + +class DocFrontEnd extends FrontEnd { + override protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) = + unit.isJava +} + +abstract class DocDriver extends Driver { + import scala.collection.JavaConverters._ + + override def setup(args: Array[String], rootCtx: Context): (List[String], Context) = { + val ctx = rootCtx.fresh + val summary = CompilerCommand.distill(args)(ctx) + + ctx.setSettings(summary.sstate) + ctx.setSetting(ctx.settings.YkeepComments, true) + + val fileNames = CompilerCommand.checkUsage(summary, sourcesRequired)(ctx) + (fileNames, ctx) + } + + override def newCompiler(implicit ctx: Context): Compiler = new DocCompiler + + def compiledDocs(args: Array[String]): collection.Map[String, Package] = { + val (fileNames, ctx) = setup(args, initCtx.fresh) + doCompile(newCompiler(ctx), fileNames)(ctx) + + ctx.docbase.packages[Package] + } + + def compiledDocsJava(args: Array[String]): JMap[String, Package] = + compiledDocs(args).asJava + + def indexToJson(index: collection.Map[String, Package]): String = + index.json + + def indexToJsonJava(index: JMap[String, Package]): String = + indexToJson(index.asScala) +} diff --git a/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java b/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java new file mode 100644 index 000000000..1bdfe0488 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java @@ -0,0 +1,63 @@ +package dotty.tools.dottydoc.api.java; + +import dotty.tools.dottydoc.DocDriver; +import dotty.tools.dottydoc.model.Package; +import dotty.tools.dottydoc.util.OutputWriter; +import java.util.Map; +import java.util.List; +import java.net.URL; + +/** + * The Dottydoc API is fairly simple. The tool creates an index by calling: + * "createIndex" with the same argument list as you would the compiler - e.g: + * + * {{{ + * String[] array = { + * "-language:Scala2" + * }; + * + * Map<String, Package> index = createIndex(array); + * }}} + * + * Once the index has been generated, the tool can also build a documentation + * API given a Mustache template and a flat resources structure (i.e. absolute + * paths to each resource, which will be put in the same directory). + * + * {{{ + * buildDocs("path/to/output/dir", templateURL, resources, index); + * }}} + * + * The tool can also generate JSON from the created index using "toJson(index)" + * or directly using "createJsonIndex" + */ +public class Dottydoc extends DocDriver { + + /** Creates index from compiler arguments */ + public Map<String, Package> createIndex(String[] args) { + return compiledDocsJava(args); + } + + /** Creates JSON from compiler arguments */ + public String createJsonIndex(String[] args) { + return indexToJsonJava(createIndex(args)); + } + + public String toJson(Map<String, Package> index) { + return indexToJsonJava(index); + } + + /** Creates a documentation from the given parameters */ + public void buildDocs( + String outputDir, + URL template, + List<URL> resources, + Map<String, Package> index + ) { + new OutputWriter().writeJava(index, outputDir, template, resources); + } + + /** Writes JSON to an output directory as "index.json" */ + public void writeJson(Map<String, Package> index, String outputDir) { + new OutputWriter().writeJsonJava(index, outputDir); + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala b/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala new file mode 100644 index 000000000..15db81a95 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala @@ -0,0 +1,49 @@ +package dotty.tools.dottydoc.api.scala + +import dotty.tools.dottydoc.DocDriver +import dotty.tools.dottydoc.model.Package +import dotty.tools.dottydoc.util.OutputWriter + +import scala.collection.Map +import java.net.URL + +/** + * The Dottydoc API is fairly simple. The tool creates an index by calling: + * "createIndex" with the same argument list as you would the compiler - e.g: + * + * {{{ + * val array: Array[String] = Array( + * "-language:Scala2" + * ) + * + * val index: Map[String, Package] = createIndex(array) + * }}} + * + * Once the index has been generated, the tool can also build a documentation + * API given a Mustache template and a flat resources structure (i.e. absolute + * paths to each resource, which will be put in the same directory). + * + * {{{ + * buildDocs("path/to/output/dir", templateURL, resources, index) + * }}} + * + * The tool can also generate JSON from the created index using "indexToJson" + * or directly using "createJsonIndex" + */ +trait Dottydoc extends DocDriver { + /** Creates index from compiler arguments */ + def createIndex(args: Array[String]): Map[String, Package] = + compiledDocs(args) + + /** Creates JSON from compiler arguments */ + def createJsonIndex(args: Array[String]): String = + indexToJson(compiledDocs(args)) + + /** Creates a documentation from the given parameters */ + def buildDocs(outDir: String, template: URL, resources: List[URL], index: Map[String, Package]) = + new OutputWriter().write(index, outDir, template, resources) + + /** Writes JSON to an output directory as "index.json" */ + def writeJson(index: Map[String, Package], outputDir: String) = + new OutputWriter().writeJson(index, outputDir) +} diff --git a/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala new file mode 100644 index 000000000..53c96fc87 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala @@ -0,0 +1,34 @@ +package dotty.tools +package dottydoc +package core + +import dotc.core.Contexts.Context + +import transform.DocMiniPhase +import model._ +import model.internal._ + +/** This DocMiniPhase adds the alternate constructors, currently defined as + * methods with the name `<init>`, to the Entity#constructors list + */ +class AlternateConstructors extends DocMiniPhase { + def partitionMembers(ent: Entity with Constructors with Members): (List[List[ParamList]], List[Entity]) = { + val (constructors, members) = ent.members.partition(x => x.name == "<init>") + + val paramLists: List[List[ParamList]] = constructors.collect { + case df: Def => df.paramLists + } + + (ent.constructors ++ paramLists, members) + } + + override def transformClass(implicit ctx: Context) = { case cls: ClassImpl => + val (constructors, members) = partitionMembers(cls) + cls.copy(members = members, constructors = constructors) + } + + override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl => + val (constructors, members) = partitionMembers(cc) + cc.copy(members = members, constructors = constructors) + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala new file mode 100644 index 000000000..7744752ce --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala @@ -0,0 +1,191 @@ +package dotty.tools +package dottydoc +package core + +/** Dotty and Dottydoc imports */ +import dotc.ast.Trees._ +import dotc.CompilationUnit +import dotc.config.Printers.dottydoc +import dotc.core.Contexts.Context +import dotc.core.Phases.Phase +import dotc.core.Symbols.{ Symbol, NoSymbol } + +class DocASTPhase extends Phase { + import model._ + import model.factories._ + import model.internal._ + import model.parsers.WikiParser + import model.comment.Comment + import dotty.tools.dotc.core.Flags + import dotty.tools.dotc.ast.tpd._ + import util.traversing._ + import util.internal.setters._ + + def phaseName = "docphase" + + private[this] val commentParser = new WikiParser + + /** Saves the commentParser function for later evaluation, for when the AST has been filled */ + def track(symbol: Symbol, ctx: Context, parent: Symbol = NoSymbol)(op: => Entity) = { + val entity = op + + if (entity != NonEntity) + commentParser += (entity, symbol, parent, ctx) + + entity + } + + /** Build documentation hierarchy from existing tree */ + def collect(tree: Tree, prev: List[String] = Nil)(implicit ctx: Context): Entity = track(tree.symbol, ctx) { + val implicitConversions = ctx.docbase.defs(tree.symbol) + + def collectList(xs: List[Tree], ps: List[String]): List[Entity] = + xs.map(collect(_, ps)).filter(_ != NonEntity) + + def collectEntityMembers(xs: List[Tree], ps: List[String]) = + collectList(xs, ps).asInstanceOf[List[Entity with Members]] + + def collectMembers(tree: Tree, ps: List[String] = prev)(implicit ctx: Context): List[Entity] = { + val defs = (tree match { + case t: Template => collectList(t.body, ps) + case _ => Nil + }) + + defs ++ implicitConversions.flatMap(membersFromSymbol) + } + + def membersFromSymbol(sym: Symbol): List[Entity] = { + val defs = sym.info.bounds.hi.membersBasedOnFlags(Flags.Method, Flags.Synthetic | Flags.Private) + .filterNot(_.symbol.owner.name.show == "Any") + .map { meth => + track(meth.symbol, ctx, tree.symbol) { + DefImpl( + meth.symbol.name.show, + Nil, + path(meth.symbol), + returnType(meth.info), + typeParams(meth.symbol), + paramLists(meth.info), + implicitlyAddedFrom = Some(returnType(meth.symbol.owner.info)) + ) + } + }.toList + + val vals = sym.info.fields.filterNot(_.symbol.is(Flags.Private | Flags.Synthetic)).map { value => + track(value.symbol, ctx, tree.symbol) { + ValImpl( + value.symbol.name.show, + Nil, path(value.symbol), + returnType(value.info), + implicitlyAddedFrom = Some(returnType(value.symbol.owner.info)) + ) + } + } + + defs ++ vals + } + + + tree match { + /** package */ + case pd @ PackageDef(pid, st) => + val newPath = prev :+ pid.name.toString + addEntity(PackageImpl(newPath.mkString("."), collectEntityMembers(st, newPath), newPath)) + + /** trait */ + case t @ TypeDef(n, rhs) if t.symbol.is(Flags.Trait) => + val name = n.decode.toString + val newPath = prev :+ name + //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well + TraitImpl(name, collectMembers(rhs), flags(t), newPath, typeParams(t.symbol), traitParameters(t.symbol), superTypes(t)) + + /** objects, on the format "Object$" so drop the last letter */ + case o @ TypeDef(n, rhs) if o.symbol.is(Flags.Module) => + val name = n.decode.toString.dropRight(1) + //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well + ObjectImpl(name, collectMembers(rhs, prev :+ name), flags(o), prev :+ (name + "$"), superTypes(o)) + + /** class / case class */ + case c @ TypeDef(n, rhs) if c.symbol.isClass => + val name = n.decode.toString + val newPath = prev :+ name + //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well + (name, collectMembers(rhs), flags(c), newPath, typeParams(c.symbol), constructors(c.symbol), superTypes(c), None) match { + case x if c.symbol.is(Flags.CaseClass) => CaseClassImpl.tupled(x) + case x => ClassImpl.tupled(x) + } + + /** def */ + case d: DefDef => + DefImpl(d.name.decode.toString, flags(d), path(d.symbol), returnType(d.tpt.tpe), typeParams(d.symbol), paramLists(d.symbol.info)) + + /** val */ + case v: ValDef if !v.symbol.is(Flags.ModuleVal) => + ValImpl(v.name.decode.toString, flags(v), path(v.symbol), returnType(v.tpt.tpe)) + + case x => { + //dottydoc.println(s"Found unwanted entity: $x (${x.pos},\n${x.show}") + NonEntity + } + } + } + + var packages: Map[String, Package] = Map.empty + + def addEntity(p: Package): Package = { + def mergedChildren(x1s: List[Entity], x2s: List[Entity]): List[Entity] = { + val (packs1, others1) = x1s.partition(_.kind == "package") + val (packs2, others2) = x2s.partition(_.kind == "package") + + val others = others1 ::: others2 + val packs = (packs1 ::: packs2).groupBy(_.path).map(_._2.head) + + (others ++ packs).sortBy(_.name) + } + + val path = p.path.mkString(".") + val newPack = packages.get(path).map { + case ex: PackageImpl => + if (!ex.comment.isDefined) ex.comment = p.comment + ex.members = mergedChildren(ex.members, p.members) + ex + }.getOrElse(p) + + packages = packages + (path -> newPack) + newPack + } + + private[this] var totalRuns = 0 + private[this] var currentRun = 0 + + override def run(implicit ctx: Context): Unit = { + currentRun += 1 + println(s"Compiling ($currentRun/$totalRuns): ${ctx.compilationUnit.source.file.name}") + collect(ctx.compilationUnit.tpdTree) // Will put packages in `packages` var + } + + override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = { + // (1) Create package structure for all `units`, this will give us a complete structure + totalRuns = units.length + val compUnits = super.runOn(units) + + // (2) Set parents of entities, needed for linking + for { + parent <- packages.values + child <- parent.children + } setParent(child, to = parent) + + // (3) Create documentation template from docstrings, with internal links + println("Generating documentation, this might take a while...") + commentParser.parse(packages) + + // (4) Clear caches + commentParser.clear() + + // (5) Update Doc AST in ctx.base + for (kv <- packages) ctx.docbase.packages += kv + + // Return super's result + compUnits + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala new file mode 100644 index 000000000..f322d7a5a --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala @@ -0,0 +1,27 @@ +package dotty.tools +package dottydoc +package core + +import dotty.tools.dotc.transform.TreeTransforms.{ MiniPhaseTransform, TransformerInfo } +import dotty.tools.dotc.core.Flags +import dotc.core.Contexts.Context + +class DocImplicitsPhase extends MiniPhaseTransform { thisTransformer => + import dotty.tools.dotc.ast.tpd._ + + def phaseName = "addImplicitsPhase" + + override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = { + if ( + tree.symbol.is(Flags.Implicit) && // has to have an implicit flag + tree.symbol.owner.isStaticOwner && // owner has to be static (e.g. top-level `object`) + tree.vparamss.length > 0 && + tree.vparamss(0).length == 1 // should only take one arg, since it has to be a transformation + ) { + val convertee = tree.vparamss(0)(0).symbol.info.widenDealias.finalResultType.typeSymbol // the pimped type (i.e. `class`) + ctx.docbase.addDef(convertee, tree.symbol.info.widenDealias.finalResultType.typeSymbol) + } + + tree + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala b/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala new file mode 100644 index 000000000..2690ac7b7 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala @@ -0,0 +1,199 @@ +package dotty.tools +package dottydoc +package core + +import dotc.CompilationUnit +import dotc.core.Contexts.Context +import dotc.core.Phases.Phase +import model._ +import model.internal._ + +object transform { + /** + * The idea behind DocMiniTransformations is to fuse transformations to the + * doc AST, much like `MiniPhaseTransform` in dotty core - but in a much more + * simple implementation + * + * Usage + * ----- + * + * Create a `DocMiniPhase` which overrides the relevant method: + * + * {{{ + * override def transformDef(implicit ctx: Context) = { + * case x if shouldTransform(x) => x.copy(newValue = ...) + * } + * }}} + * + * On each node in the AST, the appropriate method in `DocMiniPhase` will be + * called in the order that they are supplied in + * `DocMiniphaseTransformations`. + * + * There won't be a match-error as `transformX` is composed with an + * `identity` function. + * + * The transformations in `DocMiniTransformations` will apply transformations + * to all nodes - this means that you do _not_ need to transform children in + * `transformPackage`, because `transformX` will be called for the relevant + * children. If you want to add children to `Package` you need to do that in + * `transformPackage`, these additions will be persisted. + */ + abstract class DocMiniTransformations(transformations: List[DocMiniPhase]) extends Phase { + + override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = { + for { + rootName <- rootPackages + pack = ctx.docbase.packages[Package](rootName) + transformed = performPackageTransform(pack) + } yield ctx.docbase.packages(rootName) = transformed + super.runOn(units) + } + + private def rootPackages(implicit ctx: Context): List[String] = { + var currentDepth = Int.MaxValue + var packs = List.empty[String] + + for (key <- ctx.docbase.packages.keys) { + val keyDepth = key.split("\\.").length + packs = + if (keyDepth < currentDepth) { + currentDepth = keyDepth + key :: Nil + } else if (keyDepth == currentDepth) { + key :: packs + } else packs + } + packs + } + + private def performPackageTransform(pack: Package)(implicit ctx: Context): Package = { + def transformEntity[E <: Entity](e: E, f: DocMiniPhase => E => E)(createNew: E => E): E = { + val transformedEntity = transformations.foldLeft(e) { case (oldE, transf) => + f(transf)(oldE) + } + createNew(transformedEntity) + } + + def traverse(ent: Entity): Entity = ent match { + case p: Package => transformEntity(p, _.packageTransformation) { p => + val newPackage = PackageImpl( + p.name, + p.members.map(traverse), + p.path, + p.comment + ) + + // Update reference in context to newPackage + ctx.docbase.packages[Package] += (newPackage.path.mkString(".") -> newPackage) + + newPackage + } + case c: Class => transformEntity(c, _.classTransformation) { cls => + ClassImpl( + cls.name, + cls.members.map(traverse), + cls.modifiers, + cls.path, + cls.typeParams, + cls.constructors, + cls.superTypes, + cls.comment + ) + } + case cc: CaseClass => transformEntity(cc, _.caseClassTransformation) { cc => + CaseClassImpl( + cc.name, + cc.members.map(traverse), + cc.modifiers, + cc.path, + cc.typeParams, + cc.constructors, + cc.superTypes, + cc.comment + ) + } + case trt: Trait => transformEntity(trt, _.traitTransformation) { trt => + TraitImpl( + trt.name, + trt.members.map(traverse), + trt.modifiers, + trt.path, + trt.typeParams, + trt.traitParams, + trt.superTypes, + trt.comment + ) + } + case obj: Object => transformEntity(obj, _.objectTransformation) { obj => + ObjectImpl( + obj.name, + obj.members.map(traverse), + obj.modifiers, + obj.path, + obj.superTypes, + obj.comment + ) + } + case df: Def => transformEntity(df, _.defTransformation) { df => + DefImpl( + df.name, + df.modifiers, + df.path, + df.returnValue, + df.typeParams, + df.paramLists, + df.comment, + df.implicitlyAddedFrom + ) + } + case vl: Val => transformEntity(vl, _.valTransformation) { vl => + ValImpl( + vl.name, + vl.modifiers, + vl.path, + vl.returnValue, + vl.comment, + vl.implicitlyAddedFrom + ) + } + } + + traverse(pack).asInstanceOf[Package] + } + + override def run(implicit ctx: Context): Unit = () + } + + object DocMiniTransformations { + private var previousPhase = 0 + def apply(transformations: DocMiniPhase*) = + new DocMiniTransformations(transformations.toList) { + val packages = Map.empty[String, Package] + + def phaseName = s"MiniTransformation${ previousPhase += 1 }" + } + } + + trait DocMiniPhase { phase => + private def identity[E]: PartialFunction[E, E] = { + case id => id + } + + // Partial functions instead???? + def transformPackage(implicit ctx: Context): PartialFunction[Package, Package] = identity + def transformClass(implicit ctx: Context): PartialFunction[Class, Class] = identity + def transformCaseClass(implicit ctx: Context): PartialFunction[CaseClass, CaseClass] = identity + def transformTrait(implicit ctx: Context): PartialFunction[Trait, Trait] = identity + def transformObject(implicit ctx: Context): PartialFunction[Object, Object] = identity + def transformDef(implicit ctx: Context): PartialFunction[Def, Def] = identity + def transformVal(implicit ctx: Context): PartialFunction[Val, Val] = identity + + private[transform] def packageTransformation(p: Package)(implicit ctx: Context) = (transformPackage orElse identity)(p) + private[transform] def classTransformation(cls: Class)(implicit ctx: Context) = (transformClass orElse identity)(cls) + private[transform] def caseClassTransformation(cc: CaseClass)(implicit ctx: Context) = (transformCaseClass orElse identity)(cc) + private[transform] def traitTransformation(trt: Trait)(implicit ctx: Context) = (transformTrait orElse identity)(trt) + private[transform] def objectTransformation(obj: Object)(implicit ctx: Context) = (transformObject orElse identity)(obj) + private[transform] def defTransformation(df: Def)(implicit ctx: Context) = (transformDef orElse identity)(df) + private[transform] def valTransformation(vl: Val)(implicit ctx: Context) = (transformVal orElse identity)(vl) + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala new file mode 100644 index 000000000..c8de532bb --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala @@ -0,0 +1,32 @@ +package dotty.tools +package dottydoc +package core + +import dotc.core.Contexts.Context + +import transform.DocMiniPhase +import model._ +import model.internal._ + +/** This DocMiniPhase sorts the members of all classes, traits, objects and packages */ +class SortMembers extends DocMiniPhase { + override def transformPackage(implicit ctx: Context) = { case p: PackageImpl => + p.copy(members = p.members.sortBy(_.name)) + } + + override def transformClass(implicit ctx: Context) = { case c: ClassImpl => + c.copy(members = c.members.sortBy(_.name)) + } + + override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl => + cc.copy(members = cc.members.sortBy(_.name)) + } + + override def transformTrait(implicit ctx: Context) = { case t: TraitImpl => + t.copy(members = t.members.sortBy(_.name)) + } + + override def transformObject(implicit ctx: Context) = { case o: ObjectImpl => + o.copy(members = o.members.sortBy(_.name)) + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala b/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala new file mode 100644 index 000000000..ae07effa9 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala @@ -0,0 +1,115 @@ +package dotty.tools +package dottydoc +package core + +import dotc.core.Contexts.Context +import dotc.util.Positions.NoPosition + +import transform.DocMiniPhase +import model._ +import model.internal._ +import model.comment._ +import model.references._ +import BodyParsers._ +import util.MemberLookup +import util.traversing._ +import util.internal.setters._ + +class LinkReturnTypes extends DocMiniPhase with TypeLinker { + override def transformDef(implicit ctx: Context) = { case df: DefImpl => + val returnValue = linkReference(df, df.returnValue, ctx.docbase.packages[Package].toMap) + df.copy(returnValue = returnValue) + } + + override def transformVal(implicit ctx: Context) = { case vl: ValImpl => + val returnValue = linkReference(vl, vl.returnValue, ctx.docbase.packages[Package].toMap) + vl.copy(returnValue = returnValue) + } +} + +class LinkParamListTypes extends DocMiniPhase with TypeLinker { + override def transformDef(implicit ctx: Context) = { case df: DefImpl => + val newParamLists = for { + ParamListImpl(list, isImplicit) <- df.paramLists + newList = list.map(linkReference(df, _, ctx.docbase.packages[Package].toMap)) + } yield ParamListImpl(newList.asInstanceOf[List[NamedReference]], isImplicit) + + df.copy(paramLists = newParamLists) + } +} + +class LinkSuperTypes extends DocMiniPhase with TypeLinker { + def linkSuperTypes(ent: Entity with SuperTypes)(implicit ctx: Context): List[MaterializableLink] = + ent.superTypes.collect { + case UnsetLink(title, query) => + val packages = ctx.docbase.packages[Package].toMap + val entityLink = makeEntityLink(ent, packages, Text(title), NoPosition, query).link + handleEntityLink(title, entityLink, ent) + } + + override def transformClass(implicit ctx: Context) = { case cls: ClassImpl => + cls.copy(superTypes = linkSuperTypes(cls)) + } + + override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl => + cc.copy(superTypes = linkSuperTypes(cc)) + } + + override def transformTrait(implicit ctx: Context) = { case trt: TraitImpl => + trt.copy(superTypes = linkSuperTypes(trt)) + } + + override def transformObject(implicit ctx: Context) = { case obj: ObjectImpl => + obj.copy(superTypes = linkSuperTypes(obj)) + } +} + +class LinkImplicitlyAddedTypes extends DocMiniPhase with TypeLinker { + override def transformDef(implicit ctx: Context) = { + case df: DefImpl if df.implicitlyAddedFrom.isDefined => + val implicitlyAddedFrom = linkReference(df, df.implicitlyAddedFrom.get, ctx.docbase.packages[Package].toMap) + df.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom)) + } + + override def transformVal(implicit ctx: Context) = { + case vl: ValImpl if vl.implicitlyAddedFrom.isDefined => + val implicitlyAddedFrom = linkReference(vl, vl.implicitlyAddedFrom.get, ctx.docbase.packages[Package].toMap) + vl.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom)) + } +} + +trait TypeLinker extends MemberLookup { + def handleEntityLink(title: String, lt: LinkTo, ent: Entity): MaterializableLink = lt match { + case Tooltip(str) => NoLink(title, str) + case LinkToExternal(_, url) => MaterializedLink(title, url) + case LinkToEntity(target) => MaterializedLink(title, util.traversing.relativePath(ent, target)) + } + + def linkReference(ent: Entity, ref: Reference, packs: Map[String, Package]): Reference = { + def linkRef(ref: Reference) = linkReference(ent, ref, packs) + + ref match { + case ref @ TypeReference(_, UnsetLink(t, query), tps) => + val inlineToHtml = InlineToHtml(ent) + val title = t + + val target = handleEntityLink(title, makeEntityLink(ent, packs, Text(t), NoPosition, query).link, ent) + val tpTargets = tps.map(linkReference(ent, _, packs)) + ref.copy(tpeLink = target, paramLinks = tpTargets) + case ref @ OrTypeReference(left, right) => + ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs)) + case ref @ AndTypeReference(left, right) => + ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs)) + case ref @ NamedReference(_, rf, _, _) => + ref.copy(ref = linkRef(rf)) + case ref @ FunctionReference(args, rv) => + ref.copy(args = args.map(linkReference(ent, _, packs)), returnValue = linkReference(ent, rv, packs)) + case ref @ TupleReference(args) => + ref.copy(args = args.map(linkRef)) + case ref @ BoundsReference(low, high) => + ref.copy(low = linkRef(low), high = linkRef(high)) + case _ => + ref + } + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala new file mode 100644 index 000000000..29fe48de3 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala @@ -0,0 +1,94 @@ +package dotty.tools.dottydoc +package model +package comment + +import scala.collection._ + +/** A body of text. A comment has a single body, which is composed of + * at least one block. Inside every body is exactly one summary (see + * [[scala.tools.nsc.doc.model.comment.Summary]]). */ +final case class Body(blocks: Seq[Block]) { + + /** The summary text of the comment body. */ + lazy val summary: Option[Body] = { + def summaryInBlock(block: Block): Seq[Inline] = block match { + case Title(text, _) => summaryInInline(text) + case Paragraph(text) => summaryInInline(text) + case UnorderedList(items) => items flatMap summaryInBlock + case OrderedList(items, _) => items flatMap summaryInBlock + case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock + case _ => Nil + } + def summaryInInline(text: Inline): Seq[Inline] = text match { + case Summary(text) => List(text) + case Chain(items) => items flatMap summaryInInline + case Italic(text) => summaryInInline(text) + case Bold(text) => summaryInInline(text) + case Underline(text) => summaryInInline(text) + case Superscript(text) => summaryInInline(text) + case Subscript(text) => summaryInInline(text) + case Link(_, title) => summaryInInline(title) + case _ => Nil + } + (blocks flatMap summaryInBlock).toList match { + case Nil => None + case inline :: Nil => Some(Body(Seq(Paragraph(inline)))) + case inlines => Some(Body(Seq(Paragraph(Chain(inlines))))) + } + } +} + +/** A block-level element of text, such as a paragraph or code block. */ +sealed abstract class Block + +final case class Title(text: Inline, level: Int) extends Block +final case class Paragraph(text: Inline) extends Block +final case class Code(data: String) extends Block +final case class UnorderedList(items: Seq[Block]) extends Block +final case class OrderedList(items: Seq[Block], style: String) extends Block +final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block +final case class HorizontalRule() extends Block + +/** An section of text inside a block, possibly with formatting. */ +sealed abstract class Inline + +final case class Chain(items: Seq[Inline]) extends Inline +final case class Italic(text: Inline) extends Inline +final case class Bold(text: Inline) extends Inline +final case class Underline(text: Inline) extends Inline +final case class Superscript(text: Inline) extends Inline +final case class Subscript(text: Inline) extends Inline +final case class Link(target: String, title: Inline) extends Inline +final case class Monospace(text: Inline) extends Inline +final case class Text(text: String) extends Inline +abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo } +object EntityLink { + def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo } + def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link)) +} +final case class HtmlTag(data: String) extends Inline { + private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r + private val (isEnd, tagName) = data match { + case Pattern(s1, s2) => + (! s1.isEmpty, Some(s2.toLowerCase)) + case _ => + (false, None) + } + + def canClose(open: HtmlTag) = { + isEnd && tagName == open.tagName + } + + private val TagsNotToClose = Set("br", "img") + def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") } +} + +/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */ +final case class Summary(text: Inline) extends Inline + +sealed trait LinkTo +final case class LinkToExternal(name: String, url: String) extends LinkTo +final case class Tooltip(name: String) extends LinkTo + +/** Linking directly to entities is not picklable because of cyclic references */ +final case class LinkToEntity(entity: Entity) extends LinkTo diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala new file mode 100644 index 000000000..8c1fa8d49 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala @@ -0,0 +1,82 @@ +package dotty.tools.dottydoc +package model +package comment + +object BodyParsers { + + implicit class BodyToHtml(val body: Body) extends AnyVal { + def toHtml(origin: Entity): String = { + val inlineToHtml = InlineToHtml(origin) + + def bodyToHtml(body: Body): String = + (body.blocks map blockToHtml).mkString + + def blockToHtml(block: Block): String = block match { + case Title(in, 1) => s"<h1>${inlineToHtml(in)}</h1>" + case Title(in, 2) => s"<h2>${inlineToHtml(in)}</h2>" + case Title(in, 3) => s"<h3>${inlineToHtml(in)}</h3>" + case Title(in, _) => s"<h4>${inlineToHtml(in)}</h4>" + case Paragraph(in) => s"<p>${inlineToHtml(in)}</p>" + case Code(data) => s"""<pre><code class="scala">$data</code></pre>""" + case UnorderedList(items) => + s"<ul>${listItemsToHtml(items)}</ul>" + case OrderedList(items, listStyle) => + s"<ol class=${listStyle}>${listItemsToHtml(items)}</ol>" + case DefinitionList(items) => + s"<dl>${items map { case (t, d) => s"<dt>${inlineToHtml(t)}</dt><dd>${blockToHtml(d)}</dd>" } }</dl>" + case HorizontalRule() => + "<hr/>" + } + + def listItemsToHtml(items: Seq[Block]) = + items.foldLeft(""){ (list, item) => + item match { + case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI + list + s"<li>${blockToHtml(item)}</li>" + case Paragraph(inline) => + list + s"<li>${inlineToHtml(inline)}</li>" // LIs are blocks, no need to use Ps + case block => + list + s"<li>${blockToHtml(block)}</li>" + } + } + + bodyToHtml(body) + } + } + + case class InlineToHtml(origin: Entity) { + def apply(inline: Inline) = toHtml(inline) + + def relativePath(target: Entity) = + util.traversing.relativePath(origin, target) + + def toHtml(inline: Inline): String = inline match { + case Chain(items) => (items map toHtml).mkString + case Italic(in) => s"<i>${toHtml(in)}</i>" + case Bold(in) => s"<b>${toHtml(in)}</b>" + case Underline(in) => s"<u>${toHtml(in)}</u>" + case Superscript(in) => s"<sup>${toHtml(in)}</sup>" + case Subscript(in) => s"<sub>${toHtml(in) }</sub>" + case Link(raw, title) => s"""<a href=$raw target="_blank">${toHtml(title)}</a>""" + case Monospace(in) => s"<code>${toHtml(in)}</code>" + case Text(text) => text + case Summary(in) => toHtml(in) + case HtmlTag(tag) => tag + case EntityLink(target, link) => enityLinkToHtml(target, link) + } + + def enityLinkToHtml(target: Inline, link: LinkTo) = link match { + case Tooltip(_) => toHtml(target) + case LinkToExternal(n, url) => s"""<a href="$url">$n</a>""" + case LinkToEntity(t: Entity) => t match { + // Entity is a package member + case e: Entity with Members => + s"""<a href="${relativePath(t)}">${toHtml(target)}</a>""" + // Entity is a Val / Def + case x => x.parent.fold(toHtml(target)) { xpar => + s"""<a href="${relativePath(xpar)}#${x.name}">${toHtml(target)}</a>""" + } + } + } + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala new file mode 100644 index 000000000..c4f6ccf5d --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala @@ -0,0 +1,28 @@ +package dotty.tools +package dottydoc +package model +package comment + +case class Comment ( + body: String, + short: String, + authors: List[String], + see: List[String], + result: Option[String], + throws: Map[String, String], + valueParams: Map[String, String], + typeParams: Map[String, String], + version: Option[String], + since: Option[String], + todo: List[String], + deprecated: Option[String], + note: List[String], + example: List[String], + constructor: Option[String], + group: Option[String], + groupDesc: Map[String, String], + groupNames: Map[String, String], + groupPrio: Map[String, String], + /** List of conversions to hide - containing e.g: `scala.Predef.FloatArrayOps` */ + hideImplicitConversions: List[String] +) diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala new file mode 100644 index 000000000..27b0ff977 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala @@ -0,0 +1,25 @@ +package dotty.tools.dottydoc +package model +package comment + +trait CommentCleaner { + import Regexes._ + + def clean(comment: String): List[String] = { + def cleanLine(line: String): String = { + // Remove trailing whitespaces + TrailingWhitespace.replaceAllIn(line, "") match { + case CleanCommentLine(ctl) => ctl + case tl => tl + } + } + val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/") + val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) }) + val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) }) + val markedTagComment = + SafeTags.replaceAllIn(javadoclessComment, { mtch => + _root_.java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker) + }) + markedTagComment.lines.toList map (cleanLine(_)) + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala new file mode 100644 index 000000000..32a0d8128 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala @@ -0,0 +1,344 @@ +/* + * Port of DocComment.scala from nsc + * @author Martin Odersky + * @author Felix Mulder + */ + +package dotty.tools +package dottydoc +package model +package comment + +import dotc.config.Printers.dottydoc +import dotc.core.Contexts.Context +import dotc.core.Symbols._ +import dotc.core.Flags +import dotc.util.Positions._ + +import scala.collection.mutable + +trait CommentExpander { + import CommentUtils._ + + def expand(sym: Symbol, site: Symbol)(implicit ctx: Context): String = { + val parent = if (site != NoSymbol) site else sym + defineVariables(parent) + expandedDocComment(sym, parent) + } + + /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. + * + * @param sym The symbol for which doc comment is returned + * @param site The class for which doc comments are generated + * @throws ExpansionLimitExceeded when more than 10 successive expansions + * of the same string are done, which is + * interpreted as a recursive variable definition. + */ + def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(implicit ctx: Context): String = { + // when parsing a top level class or module, use the (module-)class itself to look up variable definitions + val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym + else site + expandVariables(cookedDocComment(sym, docStr), sym, parent) + } + + private def template(raw: String): String = { + val sections = tagIndex(raw) + + val defines = sections filter { startsWithTag(raw, _, "@define") } + val usecases = sections filter { startsWithTag(raw, _, "@usecase") } + + val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) + + if (end == raw.length - 2) raw else raw.substring(0, end) + "*/" + } + + def defines(raw: String): List[String] = { + val sections = tagIndex(raw) + val defines = sections filter { startsWithTag(raw, _, "@define") } + val usecases = sections filter { startsWithTag(raw, _, "@usecase") } + val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) + + defines map { case (start, end) => raw.substring(start, end) } + } + + private def replaceInheritDocToInheritdoc(docStr: String): String = + docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") + + /** The cooked doc comment of an overridden symbol */ + protected def superComment(sym: Symbol)(implicit ctx: Context): Option[String] = + allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") + + private val cookedDocComments = mutable.HashMap[Symbol, String]() + + /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by + * missing sections of an inherited doc comment. + * If a symbol does not have a doc comment but some overridden version of it does, + * the doc comment of the overridden version is copied instead. + */ + def cookedDocComment(sym: Symbol, docStr: String = "")(implicit ctx: Context): String = cookedDocComments.getOrElseUpdate(sym, { + var ownComment = + if (docStr.length == 0) ctx.docbase.docstring(sym).map(c => template(c.chrs)).getOrElse("") + else template(docStr) + ownComment = replaceInheritDocToInheritdoc(ownComment) + + superComment(sym) match { + case None => + // SI-8210 - The warning would be false negative when this symbol is a setter + if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) + dottydoc.println(s"${sym.pos}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") + ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>") + case Some(sc) => + if (ownComment == "") sc + else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) + } + }) + + private def isMovable(str: String, sec: (Int, Int)): Boolean = + startsWithTag(str, sec, "@param") || + startsWithTag(str, sec, "@tparam") || + startsWithTag(str, sec, "@return") + + def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { + val srcSections = tagIndex(src) + val dstSections = tagIndex(dst) + val srcParams = paramDocs(src, "@param", srcSections) + val dstParams = paramDocs(dst, "@param", dstSections) + val srcTParams = paramDocs(src, "@tparam", srcSections) + val dstTParams = paramDocs(dst, "@tparam", dstSections) + val out = new StringBuilder + var copied = 0 + var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) + + if (copyFirstPara) { + val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment + (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) + out append src.substring(0, eop).trim + copied = 3 + tocopy = 3 + } + + def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { + case Some((start, end)) => + if (end > tocopy) tocopy = end + case None => + srcSec match { + case Some((start1, end1)) => { + out append dst.substring(copied, tocopy).trim + out append "\n" + copied = tocopy + out append src.substring(start1, end1).trim + } + case None => + } + } + + //TODO: enable this once you know how to get `sym.paramss` + /* + for (params <- sym.paramss; param <- params) + mergeSection(srcParams get param.name.toString, dstParams get param.name.toString) + for (tparam <- sym.typeParams) + mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString) + + mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections)) + mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections)) + */ + + if (out.length == 0) dst + else { + out append dst.substring(copied) + out.toString + } + } + + /** + * Expand inheritdoc tags + * - for the main comment we transform the inheritdoc into the super variable, + * and the variable expansion can expand it further + * - for the param, tparam and throws sections we must replace comments on the spot + * + * This is done separately, for two reasons: + * 1. It takes longer to run compared to merge + * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely + * impacts performance + * + * @param parent The source (or parent) comment + * @param child The child (overriding member or usecase) comment + * @param sym The child symbol + * @return The child comment with the inheritdoc sections expanded + */ + def expandInheritdoc(parent: String, child: String, sym: Symbol): String = + if (child.indexOf("@inheritdoc") == -1) + child + else { + val parentSections = tagIndex(parent) + val childSections = tagIndex(child) + val parentTagMap = sectionTagMap(parent, parentSections) + val parentNamedParams = Map() + + ("@param" -> paramDocs(parent, "@param", parentSections)) + + ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) + + ("@throws" -> paramDocs(parent, "@throws", parentSections)) + + val out = new StringBuilder + + def replaceInheritdoc(childSection: String, parentSection: => String) = + if (childSection.indexOf("@inheritdoc") == -1) + childSection + else + childSection.replaceAllLiterally("@inheritdoc", parentSection) + + def getParentSection(section: (Int, Int)): String = { + + def getSectionHeader = extractSectionTag(child, section) match { + case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) + case other => other + } + + def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = + paramMap.get(param) match { + case Some(section) => + // Cleanup the section tag and parameter + val sectionTextBounds = extractSectionText(parent, section) + cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) + case None => + dottydoc.println(s"""${sym.pos}: the """" + getSectionHeader + "\" annotation of the " + sym + + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") + "<invalid inheritdoc annotation>" + } + + child.substring(section._1, section._1 + 7) match { + case param@("@param "|"@tparam"|"@throws") => + sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) + case _ => + sectionString(extractSectionTag(child, section), parentTagMap) + } + } + + def mainComment(str: String, sections: List[(Int, Int)]): String = + if (str.trim.length > 3) + str.trim.substring(3, startTag(str, sections)) + else + "" + + // Append main comment + out.append("/**") + out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections))) + + // Append sections + for (section <- childSections) + out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section))) + + out.append("*/") + out.toString + } + + protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(implicit ctx: Context): String = { + val expandLimit = 10 + + def expandInternal(str: String, depth: Int): String = { + if (depth >= expandLimit) + throw new ExpansionLimitExceeded(str) + + val out = new StringBuilder + var copied, idx = 0 + // excluding variables written as \$foo so we can use them when + // necessary to document things like Symbol#decode + def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' + while (idx < str.length) { + if ((str charAt idx) != '$' || isEscaped) + idx += 1 + else { + val vstart = idx + idx = skipVariable(str, idx + 1) + def replaceWith(repl: String) { + out append str.substring(copied, vstart) + out append repl + copied = idx + } + variableName(str.substring(vstart + 1, idx)) match { + case "super" => + superComment(sym) foreach { sc => + val superSections = tagIndex(sc) + replaceWith(sc.substring(3, startTag(sc, superSections))) + for (sec @ (start, end) <- superSections) + if (!isMovable(sc, sec)) out append sc.substring(start, end) + } + case "" => idx += 1 + case vname => + lookupVariable(vname, site) match { + case Some(replacement) => replaceWith(replacement) + case None => + dottydoc.println(s"Variable $vname undefined in comment for $sym in $site") + } + } + } + } + if (out.length == 0) str + else { + out append str.substring(copied) + expandInternal(out.toString, depth + 1) + } + } + + // We suppressed expanding \$ throughout the recursion, and now we + // need to replace \$ with $ so it looks as intended. + expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$") + } + + def defineVariables(sym: Symbol)(implicit ctx: Context) = { + val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r + + val raw = ctx.docbase.docstring(sym).map(_.chrs).getOrElse("") + defs(sym) ++= defines(raw).map { + str => { + val start = skipWhitespace(str, "@define".length) + val (key, value) = str.splitAt(skipVariable(str, start)) + key.drop(start) -> value + } + } map { + case (key, Trim(value)) => + variableName(key) -> value.replaceAll("\\s+\\*+$", "") + } + } + + /** Maps symbols to the variable -> replacement maps that are defined + * in their doc comments + */ + private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() + + /** Lookup definition of variable. + * + * @param vble The variable for which a definition is searched + * @param site The class for which doc comments are generated + */ + def lookupVariable(vble: String, site: Symbol)(implicit ctx: Context): Option[String] = site match { + case NoSymbol => None + case _ => + val searchList = + if (site.flags.is(Flags.Module)) site :: site.info.baseClasses + else site.info.baseClasses + + searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { + case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) + case res => res orElse lookupVariable(vble, site.owner) + } + } + + /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing + * If a symbol does not have a doc comment but some overridden version of it does, + * the position of the doc comment of the overridden version is returned instead. + */ + def docCommentPos(sym: Symbol)(implicit ctx: Context): Position = + ctx.docbase.docstring(sym).map(_.pos).getOrElse(NoPosition) + + /** A version which doesn't consider self types, as a temporary measure: + * an infinite loop has broken out between superComment and cookedDocComment + * since r23926. + */ + private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = { + if (!sym.owner.isClass) Nil + else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` + //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) + } + + class ExpansionLimitExceeded(str: String) extends Exception +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala new file mode 100644 index 000000000..9685b6934 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala @@ -0,0 +1,846 @@ +package dotty.tools.dottydoc +package model +package comment + +import dotty.tools.dotc.util.Positions._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Contexts.Context +import scala.collection.mutable +import dotty.tools.dotc.config.Printers.dottydoc +import scala.util.matching.Regex + +trait CommentParser extends util.MemberLookup { + import Regexes._ + import model.internal._ + + case class FullComment ( + body: Body, + authors: List[Body], + see: List[Body], + result: Option[Body], + throws: Map[String, Body], + valueParams: Map[String, Body], + typeParams: Map[String, Body], + version: Option[Body], + since: Option[Body], + todo: List[Body], + deprecated: Option[Body], + note: List[Body], + example: List[Body], + constructor: Option[Body], + group: Option[Body], + groupDesc: Map[String, Body], + groupNames: Map[String, Body], + groupPrio: Map[String, Body], + hideImplicitConversions: List[Body], + shortDescription: List[Body] + ) { + + /** + * Transform this CommentParser.FullComment to a Comment using the supplied + * Body transformer + */ + def toComment(transform: Body => String) = Comment( + transform(body), + short = + if (shortDescription.nonEmpty) shortDescription.map(transform).mkString + else body.summary.map(transform).getOrElse(""), + authors.map(transform), + see.map(transform), + result.map(transform), + throws.map { case (k, v) => (k, transform(v)) }, + valueParams.map { case (k, v) => (k, transform(v)) }, + typeParams.map { case (k, v) => (k, transform(v)) }, + version.map(transform), + since.map(transform), + todo.map(transform), + deprecated.map(transform), + note.map(transform), + example.map(transform), + constructor.map(transform), + group.map(transform), + groupDesc.map { case (k, v) => (k, transform(v)) }, + groupNames.map { case (k, v) => (k, transform(v)) }, + groupPrio.map { case (k, v) => (k, transform(v)) }, + hideImplicitConversions.map(transform) + ) + } + + /** Parses a raw comment string into a `Comment` object. + * @param packages all packages parsed by Scaladoc tool, used for lookup + * @param cleanComment a cleaned comment to be parsed + * @param src the raw comment source string. + * @param pos the position of the comment in source. + */ + def parse( + entity: Entity, + packages: Map[String, Package], + comment: List[String], + src: String, + pos: Position, + site: Symbol = NoSymbol + )(implicit ctx: Context): FullComment = { + + /** Parses a comment (in the form of a list of lines) to a `Comment` + * instance, recursively on lines. To do so, it splits the whole comment + * into main body and tag bodies, then runs the `WikiParser` on each body + * before creating the comment instance. + * + * @param docBody The body of the comment parsed until now. + * @param tags All tags parsed until now. + * @param lastTagKey The last parsed tag, or `None` if the tag section + * hasn't started. Lines that are not tagged are part + * of the previous tag or, if none exists, of the body. + * @param remaining The lines that must still recursively be parsed. + * @param inCodeBlock Whether the next line is part of a code block (in + * which no tags must be read). + */ + def parseComment ( + docBody: StringBuilder, + tags: Map[TagKey, List[String]], + lastTagKey: Option[TagKey], + remaining: List[String], + inCodeBlock: Boolean + ): FullComment = remaining match { + + case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) => + if (!before.trim.isEmpty && !after.trim.isEmpty) + parseComment(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false) + else if (!before.trim.isEmpty) + parseComment(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false) + else if (!after.trim.isEmpty) + parseComment(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true) + else lastTagKey match { + case Some(key) => + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + marker) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + parseComment(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true) + case None => + parseComment(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true) + } + + case CodeBlockEndRegex(before, marker, after) :: ls => { + if (!before.trim.isEmpty && !after.trim.isEmpty) + parseComment(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true) + if (!before.trim.isEmpty) + parseComment(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true) + else if (!after.trim.isEmpty) + parseComment(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false) + else lastTagKey match { + case Some(key) => + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + marker) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + parseComment(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false) + case None => + parseComment(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false) + } + } + + case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => { + val key = SymbolTagKey(name, sym) + val value = body :: tags.getOrElse(key, Nil) + parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + } + + case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) => { + val key = SimpleTagKey(name) + val value = body :: tags.getOrElse(key, Nil) + parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + } + + case SingleTagRegex(name) :: ls if (!inCodeBlock) => { + val key = SimpleTagKey(name) + val value = "" :: tags.getOrElse(key, Nil) + parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + } + + case line :: ls if (lastTagKey.isDefined) => { + val newtags = if (!line.isEmpty) { + val key = lastTagKey.get + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + line) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + tags + (key -> value) + } else tags + parseComment(docBody, newtags, lastTagKey, ls, inCodeBlock) + } + + case line :: ls => { + if (docBody.length > 0) docBody append endOfLine + docBody append line + parseComment(docBody, tags, lastTagKey, ls, inCodeBlock) + } + + case Nil => { + // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing + val inheritDiagramTag = SimpleTagKey("inheritanceDiagram") + val contentDiagramTag = SimpleTagKey("contentDiagram") + + val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match { + case Some(list) => list + case None => List.empty + } + + val contentDiagramText: List[String] = tags.get(contentDiagramTag) match { + case Some(list) => list + case None => List.empty + } + + val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable")) + val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1)) + + val bodyTags: mutable.Map[TagKey, List[Body]] = + mutable.Map((tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(entity, packages, _, pos, site))}).toSeq: _*) + + def oneTag(key: SimpleTagKey, filterEmpty: Boolean = true): Option[Body] = + ((bodyTags remove key): @unchecked) match { + case Some(r :: rs) if !(filterEmpty && r.blocks.isEmpty) => + if (!rs.isEmpty) dottydoc.println(s"$pos: only one '@${key.name}' tag is allowed") + Some(r) + case _ => None + } + + def allTags[B](key: SimpleTagKey): List[Body] = + (bodyTags remove key).getOrElse(Nil).filterNot(_.blocks.isEmpty).reverse + + def allSymsOneTag(key: TagKey, filterEmpty: Boolean = true): Map[String, Body] = { + val keys: Seq[SymbolTagKey] = + bodyTags.keys.toSeq flatMap { + case stk: SymbolTagKey if (stk.name == key.name) => Some(stk) + case stk: SimpleTagKey if (stk.name == key.name) => + dottydoc.println(s"$pos: tag '@${stk.name}' must be followed by a symbol name") + None + case _ => None + } + val pairs: Seq[(String, Body)] = + for (key <- keys) yield { + val bs = (bodyTags remove key).get + if (bs.length > 1) + dottydoc.println(s"$pos: only one '@${key.name}' tag for symbol ${key.symbol} is allowed") + (key.symbol, bs.head) + } + Map.empty[String, Body] ++ (if (filterEmpty) pairs.filterNot(_._2.blocks.isEmpty) else pairs) + } + + def linkedExceptions: Map[String, Body] = { + val m = allSymsOneTag(SimpleTagKey("throws"), filterEmpty = false) + + m.map { case (targetStr,body) => + val link = lookup(entity, packages, targetStr, pos) + val newBody = body match { + case Body(List(Paragraph(Chain(content)))) => + val descr = Text(" ") +: content + val entityLink = EntityLink(Monospace(Text(targetStr)), link) + Body(List(Paragraph(Chain(entityLink +: descr)))) + case _ => body + } + (targetStr, newBody) + } + } + + val cmt = FullComment( + body = parseWikiAtSymbol(entity, packages, docBody.toString, pos, site), + authors = allTags(SimpleTagKey("author")), + see = allTags(SimpleTagKey("see")), + result = oneTag(SimpleTagKey("return")), + throws = linkedExceptions, + valueParams = allSymsOneTag(SimpleTagKey("param")), + typeParams = allSymsOneTag(SimpleTagKey("tparam")), + version = oneTag(SimpleTagKey("version")), + since = oneTag(SimpleTagKey("since")), + todo = allTags(SimpleTagKey("todo")), + deprecated = oneTag(SimpleTagKey("deprecated"), filterEmpty = false), + note = allTags(SimpleTagKey("note")), + example = allTags(SimpleTagKey("example")), + constructor = oneTag(SimpleTagKey("constructor")), + group = oneTag(SimpleTagKey("group")), + groupDesc = allSymsOneTag(SimpleTagKey("groupdesc")), + groupNames = allSymsOneTag(SimpleTagKey("groupname")), + groupPrio = allSymsOneTag(SimpleTagKey("groupprio")), + hideImplicitConversions = allTags(SimpleTagKey("hideImplicitConversion")), + shortDescription = allTags(SimpleTagKey("shortDescription")) + ) + + for ((key, _) <- bodyTags) + dottydoc.println(s"$pos: Tag '@${key.name}' is not recognised") + + cmt + } + } + + parseComment(new StringBuilder(comment.size), Map.empty, None, comment, inCodeBlock = false) + } + + /** A key used for a tag map. The key is built from the name of the tag and + * from the linked symbol if the tag has one. + * Equality on tag keys is structural. */ + private sealed abstract class TagKey { + def name: String + } + + private final case class SimpleTagKey(name: String) extends TagKey + private final case class SymbolTagKey(name: String, symbol: String) extends TagKey + + /** Something that should not have happened, happened, and Scaladoc should exit. */ + private def oops(msg: String): Nothing = + throw new IllegalArgumentException("program logic: " + msg) + + /** Parses a string containing wiki syntax into a `Comment` object. + * Note that the string is assumed to be clean: + * - Removed Scaladoc start and end markers. + * - Removed start-of-line star and one whitespace afterwards (if present). + * - Removed all end-of-line whitespace. + * - Only `endOfLine` is used to mark line endings. */ + def parseWikiAtSymbol( + entity: Entity, + packages: Map[String, Package], + string: String, + pos: Position, + site: Symbol + )(implicit ctx: Context): Body = new WikiParser(entity, packages, string, pos, site).document() + + /** Original wikiparser from NSC + * @author Ingo Maier + * @author Manohar Jonnalagedda + * @author Gilles Dubochet + */ + protected final class WikiParser( + entity: Entity, + packages: Map[String, Package], + val buffer: String, + pos: Position, + site: Symbol + )(implicit ctx: Context) extends CharReader(buffer) { wiki => + var summaryParsed = false + + def document(): Body = { + val blocks = new mutable.ListBuffer[Block] + while (char != endOfText) + blocks += block() + Body(blocks.toList) + } + + /* BLOCKS */ + + /** {{{ block ::= code | title | hrule | listBlock | para }}} */ + def block(): Block = { + if (checkSkipInitWhitespace("{{{")) + code() + else if (checkSkipInitWhitespace('=')) + title() + else if (checkSkipInitWhitespace("----")) + hrule() + else if (checkList) + listBlock + else { + para() + } + } + + /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc + * Characters used to build lists and their constructors */ + protected val listStyles = Map[String, (Seq[Block] => Block)]( + "- " -> ( UnorderedList(_) ), + "1. " -> ( OrderedList(_,"decimal") ), + "I. " -> ( OrderedList(_,"upperRoman") ), + "i. " -> ( OrderedList(_,"lowerRoman") ), + "A. " -> ( OrderedList(_,"upperAlpha") ), + "a. " -> ( OrderedList(_,"lowerAlpha") ) + ) + + /** Checks if the current line is formed with more than one space and one the listStyles */ + def checkList = + (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) }) + + /** {{{ + * nListBlock ::= nLine { mListBlock } + * nLine ::= nSpc listStyle para '\n' + * }}} + * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */ + def listBlock(): Block = { + + /** Consumes one list item block and returns it, or None if the block is + * not a list or a different list. */ + def listLine(indent: Int, style: String): Option[Block] = + if (countWhitespace > indent && checkList) + Some(listBlock) + else if (countWhitespace != indent || !checkSkipInitWhitespace(style)) + None + else { + jumpWhitespace() + jump(style) + val p = Paragraph(inline(isInlineEnd = false)) + blockEnded("end of list line ") + Some(p) + } + + /** Consumes all list item blocks (possibly with nested lists) of the + * same list and returns the list block. */ + def listLevel(indent: Int, style: String): Block = { + val lines = mutable.ListBuffer.empty[Block] + var line: Option[Block] = listLine(indent, style) + while (line.isDefined) { + lines += line.get + line = listLine(indent, style) + } + val constructor = listStyles(style) + constructor(lines) + } + + val indent = countWhitespace + val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head) + listLevel(indent, style) + } + + def code(): Block = { + jumpWhitespace() + jump("{{{") + val str = readUntil("}}}") + if (char == endOfText) + reportError(pos, "unclosed code block") + else + jump("}}}") + blockEnded("code block") + Code(normalizeIndentation(str)) + } + + /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */ + def title(): Block = { + jumpWhitespace() + val inLevel = repeatJump('=') + val text = inline(check("=" * inLevel)) + val outLevel = repeatJump('=', inLevel) + if (inLevel != outLevel) + reportError(pos, "unbalanced or unclosed heading") + blockEnded("heading") + Title(text, inLevel) + } + + /** {{{ hrule ::= "----" { '-' } '\n' }}} */ + def hrule(): Block = { + jumpWhitespace() + repeatJump('-') + blockEnded("horizontal rule") + HorizontalRule() + } + + /** {{{ para ::= inline '\n' }}} */ + def para(): Block = { + val p = + if (summaryParsed) + Paragraph(inline(isInlineEnd = false)) + else { + val s = summary() + val r = + if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false)) + summaryParsed = true + Paragraph(Chain(r)) + } + while (char == endOfLine && char != endOfText) + nextChar() + p + } + + /* INLINES */ + + val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r + val CLOSE_TAG = "^</([A-Za-z]+)>$".r + private def readHTMLFrom(begin: HtmlTag): String = { + val list = mutable.ListBuffer.empty[String] + val stack = mutable.ListBuffer.empty[String] + + begin.close match { + case Some(HtmlTag(CLOSE_TAG(s))) => + stack += s + case _ => + return "" + } + + do { + val str = readUntil { char == safeTagMarker || char == endOfText } + nextChar() + + list += str + + str match { + case OPEN_TAG(s, _, standalone) => { + if (standalone != "/") { + stack += s + } + } + case CLOSE_TAG(s) => { + if (s == stack.last) { + stack.remove(stack.length-1) + } + } + case _ => ; + } + } while (stack.length > 0 && char != endOfText) + + list mkString "" + } + + def inline(isInlineEnd: => Boolean): Inline = { + + def inline0(): Inline = { + if (char == safeTagMarker) { + val tag = htmlTag() + HtmlTag(tag.data + readHTMLFrom(tag)) + } + else if (check("'''")) bold() + else if (check("''")) italic() + else if (check("`")) monospace() + else if (check("__")) underline() + else if (check("^")) superscript() + else if (check(",,")) subscript() + else if (check("[[")) link() + else { + val str = readUntil { + char == safeTagMarker || + check("''") || + char == '`' || + check("__") || + char == '^' || + check(",,") || + check("[[") || + isInlineEnd || + checkParaEnded || + char == endOfLine + } + Text(str) + } + } + + val inlines: List[Inline] = { + val iss = mutable.ListBuffer.empty[Inline] + iss += inline0() + while (!isInlineEnd && !checkParaEnded) { + val skipEndOfLine = if (char == endOfLine) { + nextChar() + true + } else { + false + } + + val current = inline0() + (iss.last, current) match { + case (Text(t1), Text(t2)) if skipEndOfLine => + iss.update(iss.length - 1, Text(t1 + endOfLine + t2)) + case (i1, i2) if skipEndOfLine => + iss ++= List(Text(endOfLine.toString), i2) + case _ => iss += current + } + } + iss.toList + } + + inlines match { + case Nil => Text("") + case i :: Nil => i + case is => Chain(is) + } + + } + + def htmlTag(): HtmlTag = { + jump(safeTagMarker) + val read = readUntil(safeTagMarker) + if (char != endOfText) jump(safeTagMarker) + HtmlTag(read) + } + + def bold(): Inline = { + jump("'''") + val i = inline(check("'''")) + jump("'''") + Bold(i) + } + + def italic(): Inline = { + jump("''") + val i = inline(check("''")) + jump("''") + Italic(i) + } + + def monospace(): Inline = { + jump("`") + val i = inline(check("`")) + jump("`") + Monospace(i) + } + + def underline(): Inline = { + jump("__") + val i = inline(check("__")) + jump("__") + Underline(i) + } + + def superscript(): Inline = { + jump("^") + val i = inline(check("^")) + if (jump("^")) { + Superscript(i) + } else { + Chain(Seq(Text("^"), i)) + } + } + + def subscript(): Inline = { + jump(",,") + val i = inline(check(",,")) + jump(",,") + Subscript(i) + } + + def summary(): Inline = { + val i = inline(checkSentenceEnded()) + Summary( + if (jump(".")) + Chain(List(i, Text("."))) + else + i + ) + } + + def link(): Inline = { + val SchemeUri = """([a-z]+:.*)""".r + jump("[[") + val parens = 2 + repeatJump('[') + val stop = "]" * parens + val target = readUntil { check(stop) || isWhitespaceOrNewLine(char) } + val title = + if (!check(stop)) Some({ + jumpWhitespaceOrNewLine() + inline(check(stop)) + }) + else None + jump(stop) + + (target, title) match { + case (SchemeUri(uri), optTitle) => + Link(uri, optTitle getOrElse Text(uri)) + case (qualName, optTitle) => + makeEntityLink(entity, packages, optTitle getOrElse Text(target), pos, target) + } + } + + /* UTILITY */ + + /** {{{ eol ::= { whitespace } '\n' }}} */ + def blockEnded(blockType: String): Unit = { + if (char != endOfLine && char != endOfText) { + reportError(pos, "no additional content on same line after " + blockType) + jumpUntil(endOfLine) + } + while (char == endOfLine) + nextChar() + } + + /** + * Eliminates the (common) leading spaces in all lines, based on the first line + * For indented pieces of code, it reduces the indent to the least whitespace prefix: + * {{{ + * indented example + * another indented line + * if (condition) + * then do something; + * ^ this is the least whitespace prefix + * }}} + */ + def normalizeIndentation(_code: String): String = { + + val code = _code.replaceAll("\\s+$", "").dropWhile(_ == '\n') // right-trim + remove all leading '\n' + val lines = code.split("\n") + + // maxSkip - size of the longest common whitespace prefix of non-empty lines + val nonEmptyLines = lines.filter(_.trim.nonEmpty) + val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.prefixLength(_ == ' ')).min + + // remove common whitespace prefix + lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n") + } + + def checkParaEnded(): Boolean = { + (char == endOfText) || + ((char == endOfLine) && { + val poff = offset + nextChar() // read EOL + val ok = { + checkSkipInitWhitespace(endOfLine) || + checkSkipInitWhitespace('=') || + checkSkipInitWhitespace("{{{") || + checkList || + checkSkipInitWhitespace('\u003D') + } + offset = poff + ok + }) + } + + def checkSentenceEnded(): Boolean = { + (char == '.') && { + val poff = offset + nextChar() // read '.' + val ok = char == endOfText || char == endOfLine || isWhitespace(char) + offset = poff + ok + } + } + + def reportError(pos: Position, message: String) = + dottydoc.println(s"$pos: $message") + } + + protected sealed class CharReader(buffer: String) { reader => + + var offset: Int = 0 + def char: Char = + if (offset >= buffer.length) endOfText else buffer charAt offset + + final def nextChar() = + offset += 1 + + final def check(chars: String): Boolean = { + val poff = offset + val ok = jump(chars) + offset = poff + ok + } + + def checkSkipInitWhitespace(c: Char): Boolean = { + val poff = offset + jumpWhitespace() + val ok = jump(c) + offset = poff + ok + } + + def checkSkipInitWhitespace(chars: String): Boolean = { + val poff = offset + jumpWhitespace() + val (ok0, chars0) = + if (chars.charAt(0) == ' ') + (offset > poff, chars substring 1) + else + (true, chars) + val ok = ok0 && jump(chars0) + offset = poff + ok + } + + def countWhitespace: Int = { + var count = 0 + val poff = offset + while (isWhitespace(char) && char != endOfText) { + nextChar() + count += 1 + } + offset = poff + count + } + + /* Jumpers */ + + /** Jumps a character and consumes it + * @return true only if the correct character has been jumped */ + final def jump(ch: Char): Boolean = { + if (char == ch) { + nextChar() + true + } + else false + } + + /** Jumps all the characters in chars, consuming them in the process. + * @return true only if the correct characters have been jumped + */ + final def jump(chars: String): Boolean = { + var index = 0 + while (index < chars.length && char == chars.charAt(index) && char != endOfText) { + nextChar() + index += 1 + } + index == chars.length + } + + final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = { + var count = 0 + while (jump(c) && count < max) + count += 1 + count + } + + final def jumpUntil(ch: Char): Int = { + var count = 0 + while (char != ch && char != endOfText) { + nextChar() + count += 1 + } + count + } + + final def jumpUntil(pred: => Boolean): Int = { + var count = 0 + while (!pred && char != endOfText) { + nextChar() + count += 1 + } + count + } + + def jumpWhitespace() = jumpUntil(!isWhitespace(char)) + + def jumpWhitespaceOrNewLine() = jumpUntil(!isWhitespaceOrNewLine(char)) + + + /* Readers */ + final def readUntil(c: Char): String = { + withRead { + while (char != c && char != endOfText) { + nextChar() + } + } + } + + final def readUntil(chars: String): String = { + assert(chars.length > 0) + withRead { + val c = chars.charAt(0) + while (!check(chars) && char != endOfText) { + nextChar() + while (char != c && char != endOfText) + nextChar() + } + } + } + + final def readUntil(pred: => Boolean): String = { + withRead { + while (char != endOfText && !pred) { + nextChar() + } + } + } + + private def withRead(read: => Unit): String = { + val start = offset + read + buffer.substring(start, offset) + } + + /* Chars classes */ + def isWhitespace(c: Char) = c == ' ' || c == '\t' + + def isWhitespaceOrNewLine(c: Char) = isWhitespace(c) || c == '\n' + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala new file mode 100644 index 000000000..2d75b0c66 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala @@ -0,0 +1,84 @@ +package dotty.tools.dottydoc +package model +package comment + +import scala.util.matching.Regex + +object Regexes { + val TrailingWhitespace = """\s+$""".r + + /** The body of a line, dropping the (optional) start star-marker, + * one leading whitespace and all trailing whitespace + */ + val CleanCommentLine = + new Regex("""(?:\s*\*\s?)?(.*)""") + + /** Dangerous HTML tags that should be replaced by something safer, + * such as wiki syntax, or that should be dropped + */ + val DangerousTags = + new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""") + + /** Javadoc tags that should be replaced by something useful, such as wiki + * syntax, or that should be dropped. */ + val JavadocTags = + new Regex("""\{\@(code|docRoot|linkplain|link|literal|value)\p{Zs}*([^}]*)\}""") + + /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */ + def javadocReplacement(mtch: Regex.Match): String = { + mtch.group(1) match { + case "code" => "<code>" + mtch.group(2) + "</code>" + case "docRoot" => "" + case "link" => "`[[" + mtch.group(2) + "]]`" + case "linkplain" => "[[" + mtch.group(2) + "]]" + case "literal" => "`" + mtch.group(2) + "`" + case "value" => "`" + mtch.group(2) + "`" + case _ => "" + } + } + + /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string + * if it cannot be salvaged. */ + def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match { + case "p" | "div" => "\n\n" + case "h1" => "\n= " + case "/h1" => " =\n" + case "h2" => "\n== " + case "/h2" => " ==\n" + case "h3" => "\n=== " + case "/h3" => " ===\n" + case "h4" | "h5" | "h6" => "\n==== " + case "/h4" | "/h5" | "/h6" => " ====\n" + case "li" => "\n * - " + case _ => "" + } + + /** Safe HTML tags that can be kept. */ + val SafeTags = + new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""") + + val safeTagMarker = '\u000E' + val endOfLine = '\u000A' + val endOfText = '\u0003' + + /** A Scaladoc tag not linked to a symbol and not followed by text */ + val SingleTagRegex = + new Regex("""\s*@(\S+)\s*""") + + /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */ + val SimpleTagRegex = + new Regex("""\s*@(\S+)\s+(.*)""") + + /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name + * of the symbol, and the rest of the line. */ + val SymbolTagRegex = + new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""") + + /** The start of a Scaladoc code block */ + val CodeBlockStartRegex = + new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""") + + /** The end of a Scaladoc code block */ + val CodeBlockEndRegex = + new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""") +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala new file mode 100644 index 000000000..e5307bd3c --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala @@ -0,0 +1,224 @@ +/* + * Port of DocStrings.scala from nsc + * @author Martin Odersky + * @author Felix Mulder + */ + +package dotty.tools +package dottydoc +package model +package comment + +import scala.reflect.internal.Chars._ + +object CommentUtils { + + /** Returns index of string `str` following `start` skipping longest + * sequence of whitespace characters characters (but no newlines) + */ + def skipWhitespace(str: String, start: Int): Int = + if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1) + else start + + /** Returns index of string `str` following `start` skipping + * sequence of identifier characters. + */ + def skipIdent(str: String, start: Int): Int = + if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1) + else start + + /** Returns index of string `str` following `start` skipping + * sequence of identifier characters. + */ + def skipTag(str: String, start: Int): Int = + if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1) + else start + + + /** Returns index of string `str` after `start` skipping longest + * sequence of space and tab characters, possibly also containing + * a single `*` character or the `/``**` sequence. + * @pre start == str.length || str(start) == `\n` + */ + def skipLineLead(str: String, start: Int): Int = + if (start == str.length) start + else { + val idx = skipWhitespace(str, start + 1) + if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1) + else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*') + skipWhitespace(str, idx + 3) + else idx + } + + /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`. + */ + def skipToEol(str: String, start: Int): Int = + if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3 + else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1) + else start + + /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment + * which satisfies predicate `p`. + */ + def findNext(str: String, start: Int)(p: Int => Boolean): Int = { + val idx = skipLineLead(str, skipToEol(str, start)) + if (idx < str.length && !p(idx)) findNext(str, idx)(p) + else idx + } + + /** Return first index following `start` and starting a line (i.e. after skipLineLead) + * which satisfies predicate `p`. + */ + def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = { + val idx = findNext(str, start)(p) + if (idx == str.length) List() + else idx :: findAll(str, idx)(p) + } + + /** Produces a string index, which is a list of `sections`, i.e + * pairs of start/end positions of all tagged sections in the string. + * Every section starts with an at sign and extends to the next at sign, + * or to the end of the comment string, but excluding the final two + * characters which terminate the comment. + * + * Also take usecases into account - they need to expand until the next + * usecase or the end of the string, as they might include other sections + * of their own + */ + def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = { + var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx)) + indices = mergeUsecaseSections(str, indices) + indices = mergeInheritdocSections(str, indices) + + indices match { + case List() => List() + case idxs => idxs zip (idxs.tail ::: List(str.length - 2)) + } + } + + /** + * Merge sections following an usecase into the usecase comment, so they + * can override the parent symbol's sections + */ + def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = { + idxs.indexWhere(str.startsWith("@usecase", _)) match { + case firstUCIndex if firstUCIndex != -1 => + val commentSections = idxs.take(firstUCIndex) + val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _)) + commentSections ::: usecaseSections + case _ => + idxs + } + } + + /** + * Merge the inheritdoc sections, as they never make sense on their own + */ + def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] = + idxs.filterNot(str.startsWith("@inheritdoc", _)) + + /** Does interval `iv` start with given `tag`? + */ + def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean = + startsWithTag(str, section._1, tag) + + def startsWithTag(str: String, start: Int, tag: String): Boolean = + str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length)) + + /** The first start tag of a list of tag intervals, + * or the end of the whole comment string - 2 if list is empty + */ + def startTag(str: String, sections: List[(Int, Int)]) = sections match { + case Nil => str.length - 2 + case (start, _) :: _ => start + } + + /** A map from parameter names to start/end indices describing all parameter + * sections in `str` tagged with `tag`, where `sections` is the index of `str`. + */ + def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] = + Map() ++ { + for (section <- sections if startsWithTag(str, section, tag)) yield { + val start = skipWhitespace(str, section._1 + tag.length) + str.substring(start, skipIdent(str, start)) -> section + } + } + + /** Optionally start and end index of return section in `str`, or `None` + * if `str` does not have a @group. */ + def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] = + sections find (startsWithTag(str, _, "@group")) + + + /** Optionally start and end index of return section in `str`, or `None` + * if `str` does not have a @return. + */ + def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] = + sections find (startsWithTag(str, _, "@return")) + + /** Extracts variable name from a string, stripping any pair of surrounding braces */ + def variableName(str: String): String = + if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}') + str.substring(1, str.length - 1) + else + str + + /** Returns index following variable, or start index if no variable was recognized + */ + def skipVariable(str: String, start: Int): Int = { + var idx = start + if (idx < str.length && (str charAt idx) == '{') { + do idx += 1 + while (idx < str.length && (str charAt idx) != '}') + if (idx < str.length) idx + 1 else start + } else { + while (idx < str.length && isVarPart(str charAt idx)) + idx += 1 + idx + } + } + + /** A map from the section tag to section parameters */ + def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] = + Map() ++ { + for (section <- sections) yield + extractSectionTag(str, section) -> section + } + + /** Extract the section tag, treating the section tag as an identifier */ + def extractSectionTag(str: String, section: (Int, Int)): String = + str.substring(section._1, skipTag(str, section._1)) + + /** Extract the section parameter */ + def extractSectionParam(str: String, section: (Int, Int)): String = { + val (beg, _) = section + assert(str.startsWith("@param", beg) || + str.startsWith("@tparam", beg) || + str.startsWith("@throws", beg)) + + val start = skipWhitespace(str, skipTag(str, beg)) + val finish = skipIdent(str, start) + + str.substring(start, finish) + } + + /** Extract the section text, except for the tag and comment newlines */ + def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = { + val (beg, end) = section + if (str.startsWith("@param", beg) || + str.startsWith("@tparam", beg) || + str.startsWith("@throws", beg)) + (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end) + else + (skipWhitespace(str, skipTag(str, beg)), end) + } + + /** Cleanup section text */ + def cleanupSectionText(str: String) = { + var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n") + while (result.endsWith("\n")) + result = result.substring(0, str.length - 1) + result + } + +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/entities.scala b/dottydoc/src/dotty/tools/dottydoc/model/entities.scala new file mode 100644 index 000000000..76792070c --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/entities.scala @@ -0,0 +1,115 @@ +package dotty.tools.dottydoc +package model + +import comment._ +import references._ + +trait Entity { + def name: String + + /** Path from root, i.e. `scala.Option$` */ + def path: List[String] + + def comment: Option[Comment] + + def kind: String + + def parent: Entity + + /** All parents from package level i.e. Package to Object to Member etc */ + def parents: List[Entity] = parent match { + case NonEntity => Nil + case e => e :: e.parents + } + + /** Applies `f` to entity if != `NonEntity` */ + def fold[A](nonEntity: A)(f: Entity => A) = this match { + case NonEntity => nonEntity + case x => f(x) + } +} + +trait SuperTypes { + def superTypes: List[MaterializableLink] +} + +trait Members { + def members: List[Entity] +} + +trait Modifiers { + def modifiers: List[String] + + val isPrivate: Boolean = + modifiers.contains("private") +} + +trait TypeParams { + def typeParams: List[String] +} + +trait ReturnValue { + def returnValue: Reference +} + +trait ParamList { + def list: List[NamedReference] + def isImplicit: Boolean +} + +trait Constructors { + def constructors: List[List[ParamList]] +} + +trait ImplicitlyAddedEntity extends Entity { + def implicitlyAddedFrom: Option[Reference] +} + +trait Package extends Entity with Members { + val kind = "package" + + def children: List[Entity with Members] +} + +trait Class extends Entity with Modifiers with TypeParams with Constructors with SuperTypes with Members { + val kind = "class" +} + +trait CaseClass extends Entity with Modifiers with TypeParams with Constructors with SuperTypes with Members { + override val kind = "case class" +} + +trait Trait extends Entity with Modifiers with TypeParams with SuperTypes with Members { + def traitParams: List[ParamList] + override val kind = "trait" +} + +trait Object extends Entity with Modifiers with SuperTypes with Members { + override val kind = "object" +} + +trait Def extends Entity with Modifiers with TypeParams with ReturnValue with ImplicitlyAddedEntity { + val kind = "def" + def paramLists: List[ParamList] +} + +trait Val extends Entity with Modifiers with ReturnValue with ImplicitlyAddedEntity { + val kind = "val" +} + +trait Var extends Entity with Modifiers with ReturnValue { + val kind = "var" +} + +trait NonEntity extends Entity { + val name = "" + val comment = None + val path = Nil + val kind = "" + val parent = NonEntity +} + +final case object NonEntity extends NonEntity +final case object RootEntity extends NonEntity { + override val name = "root" +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/factories.scala b/dottydoc/src/dotty/tools/dottydoc/model/factories.scala new file mode 100644 index 000000000..b19b836ee --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/factories.scala @@ -0,0 +1,183 @@ +package dotty.tools.dottydoc +package model + +import comment._ +import references._ +import dotty.tools.dotc +import dotc.core.Types._ +import dotc.core.TypeApplications._ +import dotc.core.Contexts.Context +import dotc.core.Symbols.{ Symbol, ClassSymbol } +import dotty.tools.dotc.core.SymDenotations._ +import dotty.tools.dotc.core.Names.TypeName +import dotc.ast.Trees._ + + +object factories { + import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dottydoc.model.internal.ParamListImpl + import dotc.core.Flags._ + + type TypeTree = dotty.tools.dotc.ast.Trees.Tree[Type] + + def flags(t: Tree)(implicit ctx: Context): List[String] = + (t.symbol.flags & SourceModifierFlags) + .flagStrings.toList + .filter(_ != "<trait>") + .filter(_ != "interface") + + def path(sym: Symbol)(implicit ctx: Context): List[String] = sym match { + case sym if sym.name.decode.toString == "<root>" => Nil + case sym => path(sym.owner) :+ sym.name.show + } + + + private val product = """Product[1-9][0-9]*""".r + + def returnType(t: Type)(implicit ctx: Context): Reference = { + val defn = ctx.definitions + + def typeRef(name: String, query: String = "", params: List[Reference] = Nil) = { + val realQuery = if (query != "") query else name + TypeReference(name, UnsetLink(name, realQuery), params) + } + + def expandTpe(t: Type, params: List[Reference] = Nil): Reference = t match { + case tl: TypeLambda => + //FIXME: should be handled correctly + // example, in `Option`: + // + // {{{ + // def companion: GenericCompanion[collection.Iterable] + // }}} + // + // Becomes: def companion: [+X0] -> collection.Iterable[X0] + typeRef(tl.show + " (not handled)") + case AppliedType(tycon, args) => + val cls = tycon.typeSymbol + if (tycon.isRepeatedParam) + expandTpe(args.head) + else if (defn.isFunctionClass(cls)) + FunctionReference(args.init.map(expandTpe(_, Nil)), expandTpe(args.last)) + else if (defn.isTupleClass(cls)) + TupleReference(args.map(expandTpe(_, Nil))) + else { + val query = tycon.show + val name = query.split("\\.").last + typeRef(name, query, params = args.map(expandTpe(_, Nil))) + } + + case ref @ RefinedType(parent, rn, info) => + expandTpe(parent) //FIXME: will be a refined HK, aka class Foo[X] { def bar: List[X] } or similar + case ref @ HKApply(tycon, args) => + expandTpe(tycon, args.map(expandTpe(_, params))) + case TypeRef(_, n) => + val name = n.decode.toString.split("\\$").last + typeRef(name, params = params) + case ta: TypeAlias => + expandTpe(ta.alias.widenDealias) + case OrType(left, right) => + OrTypeReference(expandTpe(left), expandTpe(right)) + case AndType(left, right) => + AndTypeReference(expandTpe(left), expandTpe(right)) + case tb @ TypeBounds(lo, hi) => + BoundsReference(expandTpe(lo), expandTpe(hi)) + case AnnotatedType(tpe, _) => + expandTpe(tpe) + case ExprType(tpe) => + expandTpe(tpe) + case c: ConstantType => + ConstantReference(c.show) + case tt: ThisType => + expandTpe(tt.underlying) + case ci: ClassInfo => + val query = path(ci.typeSymbol).mkString(".") + typeRef(ci.cls.name.show, query = query) + case mt: MethodType => + expandTpe(mt.resultType) + case pt: PolyType => + expandTpe(pt.resultType) + case pp: PolyParam => + val paramName = pp.paramName.show + val name = + if (paramName.contains('$')) + paramName.split("\\$\\$").last + else paramName + + typeRef(name) + } + + expandTpe(t) + } + + def typeParams(sym: Symbol)(implicit ctx: Context): List[String] = + sym.info match { + case pt: PolyType => // TODO: not sure if this case is needed anymore + pt.paramNames.map(_.show.split("\\$").last) + case ClassInfo(_, _, _, decls, _) => + decls.iterator + .filter(_.flags is TypeParam) + .map { tp => + val prefix = + if (tp.flags is Covariant) "+" + else if (tp.flags is Contravariant) "-" + else "" + prefix + tp.name.show.split("\\$").last + } + .toList + case _ => + Nil + } + + def constructors(sym: Symbol)(implicit ctx: Context): List[List[ParamList]] = sym match { + case sym: ClassSymbol => + paramLists(sym.primaryConstructor.info) :: Nil + case _ => Nil + } + + def traitParameters(sym: Symbol)(implicit ctx: Context): List[ParamList] = + constructors(sym).head + + def paramLists(tpe: Type)(implicit ctx: Context): List[ParamList] = tpe match { + case pt: PolyType => + paramLists(pt.resultType) + + case mt: MethodType => + ParamListImpl(mt.paramNames.zip(mt.paramTypes).map { case (name, tpe) => + NamedReference( + name.decode.toString, + returnType(tpe), + isByName = tpe.isInstanceOf[ExprType], + isRepeated = tpe.isRepeatedParam + ) + }, mt.isImplicit) :: paramLists(mt.resultType) + + case annot: AnnotatedType => paramLists(annot.tpe) + case (_: PolyParam | _: RefinedType | _: TypeRef | _: ThisType | + _: ExprType | _: OrType | _: AndType | _: HKApply) => Nil // return types should not be in the paramlist + } + + def superTypes(t: Tree)(implicit ctx: Context): List[MaterializableLink] = t.symbol.denot match { + case cd: ClassDenotation => + def isJavaLangObject(prefix: Type): Boolean = + prefix match { + case TypeRef(ThisType(TypeRef(NoPrefix, outerName)), innerName) => + outerName.toString == "lang" && innerName.toString == "Object" + case _ => false + } + + def isProductWithArity(prefix: Type): Boolean = prefix match { + case TypeRef(TermRef(TermRef(NoPrefix, root), scala), prod) => + root.toString == "_root_" && + scala.toString == "scala" && + product.findFirstIn(prod.toString).isDefined + case _ => false + } + + cd.classParents.collect { + case t: TypeRef if !isJavaLangObject(t) && !isProductWithArity(t) => + UnsetLink(t.name.toString, path(t.symbol).mkString(".")) + } + case _ => Nil + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/internal.scala b/dottydoc/src/dotty/tools/dottydoc/model/internal.scala new file mode 100644 index 000000000..6afb1ec9b --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/internal.scala @@ -0,0 +1,89 @@ +package dotty.tools.dottydoc +package model + +import comment.Comment +import references._ + +object internal { + + trait Impl { + var parent: Entity = NonEntity + } + + final case class PackageImpl( + name: String, + var members: List[Entity], + path: List[String], + var comment: Option[Comment] = None + ) extends Package with Impl { + def children: List[Entity with Members] = + members.collect { case x: Entity with Members => x } + } + + final case class ClassImpl( + name: String, + members: List[Entity], + modifiers: List[String], + path: List[String], + typeParams: List[String] = Nil, + constructors: List[List[ParamList]] = Nil, + superTypes: List[MaterializableLink] = Nil, + var comment: Option[Comment] = None + ) extends Class with Impl + + final case class CaseClassImpl( + name: String, + members: List[Entity], + modifiers: List[String], + path: List[String], + typeParams: List[String] = Nil, + constructors: List[List[ParamList]] = Nil, + superTypes: List[MaterializableLink] = Nil, + var comment: Option[Comment] = None + ) extends CaseClass with Impl + + final case class TraitImpl( + name: String, + members: List[Entity], + modifiers: List[String], + path: List[String], + typeParams: List[String] = Nil, + traitParams: List[ParamList] = Nil, + superTypes: List[MaterializableLink] = Nil, + var comment: Option[Comment] = None + ) extends Trait with Impl + + final case class ObjectImpl( + name: String, + members: List[Entity], + modifiers: List[String], + path: List[String], + superTypes: List[MaterializableLink] = Nil, + var comment: Option[Comment] = None + ) extends Object with Impl + + final case class DefImpl( + name: String, + modifiers: List[String], + path: List[String], + returnValue: Reference, + typeParams: List[String] = Nil, + paramLists: List[ParamList] = Nil, + var comment: Option[Comment] = None, + implicitlyAddedFrom: Option[Reference] = None + ) extends Def with Impl + + final case class ValImpl( + name: String, + modifiers: List[String], + path: List[String], + returnValue: Reference, + var comment: Option[Comment] = None, + implicitlyAddedFrom: Option[Reference] = None + ) extends Val with Impl + + final case class ParamListImpl( + list: List[NamedReference], + isImplicit: Boolean + ) extends ParamList +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/java.scala b/dottydoc/src/dotty/tools/dottydoc/model/java.scala new file mode 100644 index 000000000..410085061 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/java.scala @@ -0,0 +1,223 @@ +package dotty.tools.dottydoc +package model + +import comment._ +import references._ + +object java { + import scala.collection.JavaConverters._ + import _root_.java.util.{ Optional => JOptional, Map => JMap } + + implicit class JavaOption[A](val opt: Option[A]) extends AnyVal { + def asJava: JOptional[A] = + opt.map(a => JOptional.of(a)).getOrElse(JOptional.empty[A]) + } + + implicit class JavaComment(val cmt: Comment) extends AnyVal { + def asJava: JMap[String, _] = Map( + "body" -> cmt.body, + "short" -> cmt.short, + "authors" -> cmt.authors.asJava, + "see" -> cmt.see.asJava, + "result" -> cmt.result.asJava, + "throws" -> cmt.throws.asJava, + "valueParams" -> cmt.valueParams.asJava, + "typeParams" -> cmt.typeParams.asJava, + "version" -> cmt.version.asJava, + "since" -> cmt.since.asJava, + "todo" -> cmt.todo.asJava, + "deprecated" -> cmt.deprecated.asJava, + "note" -> cmt.note.asJava, + "example" -> cmt.example.asJava, + "constructor" -> cmt.constructor.asJava, + "group" -> cmt.group.asJava, + "groupDesc" -> cmt.groupDesc.asJava, + "groupNames" -> cmt.groupNames.asJava, + "groupPrio" -> cmt.groupPrio.asJava, + "hideImplicitConversions" -> cmt.hideImplicitConversions.asJava + ).asJava + } + + implicit class JavaPackage(val ent: Package) extends AnyVal { + def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map( + "kind" -> ent.kind, + "name" -> ent.name, + "path" -> ent.path.asJava, + "members" -> ent.members.map(_.asJava()).asJava, + "comment" -> ent.comment.map(_.asJava).asJava + ) ++ extras).asJava + } + + implicit class JavaCaseClass(val ent: CaseClass) extends AnyVal { + def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map( + "kind" -> ent.kind, + "name" -> ent.name, + "members" -> ent.members.map(_.asJava()).asJava, + "modifiers" -> ent.modifiers.asJava, + "path" -> ent.path.asJava, + "typeParams" -> ent.typeParams.asJava, + "superTypes" -> ent.superTypes.map(_.asJava).asJava, + "comment" -> ent.comment.map(_.asJava).asJava + ) ++ extras).asJava + } + + implicit class JavaClass(val ent: Class) extends AnyVal { + def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map( + "kind" -> ent.kind, + "name" -> ent.name, + "members" -> ent.members.map(_.asJava()).asJava, + "modifiers" -> ent.modifiers.asJava, + "path" -> ent.path.asJava, + "typeParams" -> ent.typeParams.asJava, + "superTypes" -> ent.superTypes.map(_.asJava).asJava, + "comment" -> ent.comment.map(_.asJava).asJava + ) ++ extras).asJava + } + + implicit class JavaTrait(val ent: Trait) extends AnyVal { + def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map( + "kind" -> ent.kind, + "name" -> ent.name, + "members" -> ent.members.map(_.asJava()).asJava, + "modifiers" -> ent.modifiers.asJava, + "path" -> ent.path.asJava, + "typeParams" -> ent.typeParams.asJava, + "superTypes" -> ent.superTypes.map(_.asJava).asJava, + "comment" -> ent.comment.map(_.asJava).asJava + ) ++ extras).asJava + } + + implicit class JavaObject(val ent: Object) extends AnyVal { + def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map( + "kind" -> ent.kind, + "name" -> ent.name, + "members" -> ent.members.map(_.asJava()).asJava, + "modifiers" -> ent.modifiers.asJava, + "path" -> ent.path.asJava, + "superTypes" -> ent.superTypes.map(_.asJava).asJava, + "comment" -> ent.comment.map(_.asJava).asJava + ) ++ extras).asJava + } + + implicit class JavaDef(val ent: Def) extends AnyVal { + def asJava: JMap[String, _] = Map( + "kind" -> ent.kind, + "name" -> ent.name, + "modifiers" -> ent.modifiers.asJava, + "path" -> ent.path.asJava, + "returnValue" -> ent.returnValue.asJava, + "typeParams" -> ent.typeParams.asJava, + "paramLists" -> ent.paramLists.map(_.asJava).asJava, + "comment" -> ent.comment.map(_.asJava).asJava, + "implicitlyAddedFrom" -> ent.implicitlyAddedFrom.map(_.asJava).asJava + ).asJava + } + + implicit class JavaVal(val ent: Val) extends AnyVal { + def asJava: JMap[String, _] = Map( + "kind" -> ent.kind, + "name" -> ent.name, + "modifiers" -> ent.modifiers.asJava, + "path" -> ent.path.asJava, + "returnValue" -> ent.returnValue.asJava, + "comment" -> ent.comment.map(_.asJava).asJava, + "implicitlyAddedFrom" -> ent.implicitlyAddedFrom.map(_.asJava).asJava + ).asJava + } + + implicit class JavaParamList(val pl: ParamList) extends AnyVal { + def asJava: JMap[String, _] = Map( + "list" -> pl.list.map(_.asJava).asJava, + "isImplicit" -> pl.isImplicit + ).asJava + } + + implicit class JavaReference(val ref: Reference) extends AnyVal { + def asJava: JMap[String, _] = ref match { + case TypeReference(title, tpeLink, paramLinks) => Map( + "kind" -> "TypeReference", + "title" -> title, + "tpeLink" -> tpeLink.asJava, + "paramLinks" -> paramLinks.map(_.asJava).asJava + ).asJava + + case OrTypeReference(left, right) => Map( + "kind" -> "OrTypeReference", + "left" -> left.asJava, + "right" -> right.asJava + ).asJava + + case AndTypeReference(left, right) => Map( + "kind" -> "AndTypeReference", + "left" -> left.asJava, + "right" -> right.asJava + ).asJava + + case FunctionReference(args, returnValue) => Map( + "kind" -> "FunctionReference", + "args" -> args.map(_.asJava).asJava, + "returnValue" -> returnValue + ).asJava + + case TupleReference(args) => Map( + "kind" -> "TupleReference", + "args" -> args.map(_.asJava).asJava + ).asJava + + case BoundsReference(low, high) => Map( + "kind" -> "BoundsReference", + "low" -> low.asJava, + "hight" -> high.asJava + ).asJava + + case NamedReference(title, ref, isByName, isRepeated) => Map( + "kind" -> "NamedReference", + "title" -> title, + "ref" -> ref.asJava, + "isByName" -> isByName, + "isRepeated" -> isRepeated + ).asJava + + case ConstantReference(title) => Map( + "kind" -> "ConstantReference", + "title" -> title + ).asJava + } + } + + implicit class JavaMaterializableLink(val link: MaterializableLink) extends AnyVal { + def asJava: JMap[String, _] = link match { + case UnsetLink(title, query) => Map( + "kind" -> "UnsetLink", + "title" -> title, + "query" -> query + ).asJava + + case MaterializedLink(title, target) => Map( + "kind" -> "MaterializedLink", + "title" -> title, + "target" -> target + ).asJava + + case NoLink(title, target) => Map( + "kind" -> "NoLink", + "title" -> title, + "target" -> target + ).asJava + } + } + + implicit class JavaEntity(val ent: Entity) extends AnyVal { + def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = parseEntity(ent, extras) + } + + private def parseEntity(ent: Entity, extras: Map[String, _]): JMap[String, _] = ent match { + case ent: Package => ent.asJava(extras) + case ent: CaseClass => ent.asJava(extras) + case ent: Class => ent.asJava(extras) + case ent: Trait => ent.asJava(extras) + case ent: Object => ent.asJava(extras) + case ent: Def => ent.asJava + case ent: Val => ent.asJava + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/json.scala b/dottydoc/src/dotty/tools/dottydoc/model/json.scala new file mode 100644 index 000000000..145728f8a --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/json.scala @@ -0,0 +1,93 @@ +package dotty.tools.dottydoc +package model + +import comment._ +import references._ + +/** This object provides a protocol for serializing the package AST to JSON + * + * TODO: It might be a good ideat to represent the JSON better than just + * serializing a big string-blob in the future. + */ +object json { + implicit class JsonString(val str: String) extends AnyVal { + def json: String = { + val cleanedString = str + .replaceAll("\\\\","\\\\\\\\") + .replaceAll("\\\"", "\\\\\"") + .replaceAll("\n", "\\\\n") + + s""""$cleanedString"""" + } + } + + implicit class JsonBoolean(val boo: Boolean) extends AnyVal { + def json: String = if (boo) "true" else "false" + } + + implicit class JsonComment(val cmt: Comment) extends AnyVal { + def json: String = + s"""{"body":${cmt.body.json},"short":${cmt.short.json},"authors":${cmt.authors.map(_.json).mkString("[",",","]")},"see":${cmt.see.map(_.json).mkString("[",",","]")},${cmt.result.map(res => s""""result":${res.json},""").getOrElse("")}"throws":${cmt.throws.map { case (k, v) => s"${k.json}:${v.json}" }.mkString("{",",","}")},"valueParams":${cmt.valueParams.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"typeParams":${cmt.typeParams.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},${cmt.version.map(x => s""""version":${x.json},""").getOrElse("")}${cmt.since.map(x => s""""since":${x.json},""").getOrElse("")}"todo":${cmt.todo.map(_.json).mkString("[",",","]")},${cmt.deprecated.map(x => s""""deprecated":${x.json},""").getOrElse("")}"note":${cmt.note.map(_.json).mkString("[",",","]")},"example":${cmt.example.map(_.json).mkString("[",",","]")},${cmt.constructor.map(x => s""""constructor":${x.json},""").getOrElse("")}${cmt.group.map(x => s""""group":${x.json},""").getOrElse("")}"groupDesc":${cmt.groupDesc.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"groupNames":${cmt.groupNames.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"groupPrio":${cmt.groupPrio.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"hideImplicitConversions":${cmt.hideImplicitConversions.map(_.json).mkString("[",",","]")}}""" + } + + implicit class LinkJson(val link: MaterializableLink) extends AnyVal { + def json: String = { + val (secondTitle, secondValue, kind) = link match { + case ul: UnsetLink => ("query".json, ul.query.json, "UnsetLink".json) + case ml: MaterializedLink => ("target".json, ml.target.json, "MaterializedLink".json) + case nl: NoLink => ("target".json, nl.target.json, "NoLink".json) + } + s"""{"title":${link.title.json},$secondTitle:${secondValue},"kind":$kind}""" + } + } + + implicit class ParamListJson(val plist: ParamList) extends AnyVal { + def json: String = + s"""{"list":${plist.list.map(_.json).mkString("[",",","]")},"isImplicit":${plist.isImplicit.json}}""" + } + + private def refToJson(ref: Reference): String = ref match { + case ref: TypeReference => + s"""{"title":${ref.title.json},"tpeLink":${ref.tpeLink.json},"paramLinks":${ref.paramLinks.map(_.json).mkString("[",",","]")},"kind":"TypeReference"}""" + case ref: AndTypeReference => + s"""{"left":${refToJson(ref.left)},"right":${refToJson(ref.right)},"kind":"AndTypeReference"}""" + case ref: OrTypeReference => + s"""{"left":${refToJson(ref.left)},"right":${refToJson(ref.right)},"kind":"OrTypeReference"}""" + case ref: BoundsReference => + s"""{"low":${refToJson(ref.low)},"high":${refToJson(ref.high)},"kind":"BoundsReference"}""" + case ref: NamedReference => + s"""{"title":${ref.title.json},"ref":${refToJson(ref.ref)},"isByName":${ref.isByName.json},"isRepeated":${ref.isRepeated.json},"kind":"NamedReference"}""" + case ref: ConstantReference => + s"""{"title":${ref.title.json},"kind": "ConstantReference"}""" + case ref: FunctionReference => + s"""{"args":${ref.args.map(refToJson).mkString("[",",","]")},"returnValue":${refToJson(ref.returnValue)},"kind": "FunctionReference"}""" + case ref: TupleReference => + s"""{"args":${ref.args.map(refToJson).mkString("[",",","]")},"kind": "TupleReference"}""" + } + implicit class ReferenceJson(val ref: Reference) extends AnyVal { def json: String = refToJson(ref) } + + private def entToJson(ent: Entity): String = ent match { + case ent: Package => + s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"package"}""" + case ent: Class => + s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"constructors":${ent.constructors.map(xs => xs.map(_.json).mkString("[",",","]")).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"class"}""" + case ent: CaseClass => + s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"constructors":${ent.constructors.map(xs => xs.map(_.json).mkString("[",",","]")).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"case class"}""" + case ent: Trait => + s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"traitParams":${ent.traitParams.map(_.json).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"trait"}""" + case ent: Object => + s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"object"}""" + case ent: Def => + s"""{"name":${ent.name.json},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"returnValue":${ent.returnValue.json},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"paramLists":${ent.paramLists.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}${ent.implicitlyAddedFrom.fold("")(ref => s""""implicitlyAddedFrom":${ref.json},""")}"kind":"def"}""" + case ent: Val => + s"""{"name":${ent.name.json},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"returnValue":${ent.returnValue.json},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}${ent.implicitlyAddedFrom.fold("")(ref => s""""implicitlyAddedFrom":${ref.json},""")}"kind":"val"}""" + } + implicit class EntityJson(val ent: Entity) extends AnyVal { def json: String = entToJson(ent) } + implicit class PackageJson(val pack: Package) extends AnyVal { def json: String = (pack: Entity).json } + + implicit class PackMapJson(val packs: collection.Map[String, Package]) extends AnyVal { + def json: String = packs + .map { case (k, v) => s"${k.json}: ${v.json}" } + .mkString("{",",","}") + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala b/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala new file mode 100644 index 000000000..fa54163e5 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala @@ -0,0 +1,98 @@ +package dotty.tools +package dottydoc +package model + +import dotc.core.Symbols.Symbol +import dotc.core.Contexts.Context +import dotc.util.Positions.NoPosition + +object parsers { + import comment._ + import BodyParsers._ + import model.internal._ + import util.MemberLookup + import util.traversing._ + import util.internal.setters._ + + class WikiParser extends CommentCleaner with CommentParser with CommentExpander { + private[this] var commentCache: Map[String, (Entity, Map[String, Package]) => Option[Comment]] = Map.empty + + /** Parses comment and returns the path to the entity with an optional comment + * + * The idea here is to use this fact to create `Future[Seq[(String, Option[Comment]]]` + * which can then be awaited near the end of the run - before the pickling. + */ + def parseHtml(sym: Symbol, parent: Symbol, entity: Entity, packages: Map[String, Package])(implicit ctx: Context): (String, Option[Comment]) = { + val cmt = ctx.docbase.docstring(sym).map { d => + val expanded = expand(sym, parent) + parse(entity, packages, clean(expanded), expanded, d.pos).toComment(_.toHtml(entity)) + } + + (entity.path.mkString("."), cmt) + } + + + def add(entity: Entity, symbol: Symbol, parent: Symbol, ctx: Context): Unit = { + val commentParser = { (entity: Entity, packs: Map[String, Package]) => + parseHtml(symbol, parent, entity, packs)(ctx)._2 + } + + /** TODO: this if statement searches for doc comments in parent + * definitions if one is not defined for the current symbol. + * + * It might be a good idea to factor this out of the WikiParser - since + * it mutates the state of docbase sort of silently. + */ + implicit val implCtx = ctx + if (!ctx.docbase.docstring(symbol).isDefined) { + val parentCmt = + symbol.extendedOverriddenSymbols + .find(ctx.docbase.docstring(_).isDefined) + .flatMap(p => ctx.docbase.docstring(p)) + + ctx.docbase.addDocstring(symbol, parentCmt) + } + + + val path = entity.path.mkString(".") + if (!commentCache.contains(path) || ctx.docbase.docstring(symbol).isDefined) + commentCache = commentCache + (path -> commentParser) + } + + def +=(entity: Entity, symbol: Symbol, parent: Symbol, ctx: Context) = add(entity, symbol, parent, ctx) + + def size: Int = commentCache.size + + private def parse(entity: Entity, packs: Map[String, Package]): Option[Comment] = + commentCache(entity.path.mkString("."))(entity, packs) + + def parse(packs: Map[String, Package]): Unit = { + def rootPackages: List[String] = { + var currentDepth = Int.MaxValue + var packages: List[String] = Nil + + for (key <- packs.keys) { + val keyDepth = key.split("\\.").length + packages = + if (keyDepth < currentDepth) { + currentDepth = keyDepth + key :: Nil + } else if (keyDepth == currentDepth) { + key :: packages + } else packages + } + + packages + } + + for (pack <- rootPackages) { + mutateEntities(packs(pack)) { e => + val comment = parse(e, packs) + setComment(e, to = comment) + } + } + } + + def clear(): Unit = commentCache = Map.empty + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/model/references.scala b/dottydoc/src/dotty/tools/dottydoc/model/references.scala new file mode 100644 index 000000000..a28148fa7 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/model/references.scala @@ -0,0 +1,20 @@ +package dotty.tools.dottydoc +package model + +object references { + sealed trait Reference + final case class TypeReference(title: String, tpeLink: MaterializableLink, paramLinks: List[Reference]) extends Reference + final case class OrTypeReference(left: Reference, right: Reference) extends Reference + final case class AndTypeReference(left: Reference, right: Reference) extends Reference + final case class FunctionReference(args: List[Reference], returnValue: Reference) extends Reference + final case class TupleReference(args: List[Reference]) extends Reference + final case class BoundsReference(low: Reference, high: Reference) extends Reference + final case class NamedReference(title: String, ref: Reference, isByName: Boolean = false, isRepeated: Boolean = false) extends Reference + final case class ConstantReference(title: String) extends Reference + + /** Use MaterializableLink for entities that need be picklable */ + sealed trait MaterializableLink { def title: String } + final case class UnsetLink(title: String, query: String) extends MaterializableLink + final case class MaterializedLink(title: String, target: String) extends MaterializableLink + final case class NoLink(title: String, target: String) extends MaterializableLink +} diff --git a/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala b/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala new file mode 100644 index 000000000..40c775428 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala @@ -0,0 +1,92 @@ +package dotty.tools +package dottydoc +package util + +import dotc.config.Printers.dottydoc +import dotc.core.Contexts.Context +import dotc.core.Flags +import dotc.core.Names._ +import dotc.core.Symbols._ +import dotc.core.Types._ +import dotc.core.Names._ +import dotc.util.Positions._ +import model.internal._ +import model.comment._ +import model._ + +trait MemberLookup { + /** Performs a lookup based on the provided (pruned) query string + * + * Will return a `Tooltip` if unsucessfull, otherwise a LinkToEntity or LinkToExternal + */ + def lookup( + entity: Entity, + packages: Map[String, Package], + query: String, + pos: Position + ): LinkTo = { + val notFound: LinkTo = Tooltip(query) + val querys = query.split("\\.").toList + + /** Looks for the specified entity among `ent`'s members */ + def localLookup(ent: Entity with Members, searchStr: String): LinkTo = + ent + .members + .collect { case x if x.name == searchStr => x } + .sortBy(_.path.last) + .headOption + .fold(notFound)(e => LinkToEntity(e)) + + /** Looks for an entity down in the structure, if the search list is Nil, + * the search stops + */ + def downwardLookup(ent: Entity with Members, search: List[String]): LinkTo = + search match { + case Nil => notFound + case x :: Nil => + localLookup(ent, x) + case x :: xs => + ent + .members + .collect { case e: Entity with Members if e.name == x => e } + .headOption + .fold(notFound)(e => downwardLookup(e, xs)) + } + + /** Finds package with longest matching name, then does downwardLookup in + * the package + */ + def globalLookup: LinkTo = { + def longestMatch(list: List[String]): List[String] = + if (list == Nil) Nil + else + packages + .get(list.mkString(".")) + .map(_ => list) + .getOrElse(longestMatch(list.dropRight(1))) + + longestMatch(querys) match { + case Nil => notFound + case xs => downwardLookup(packages(xs.mkString(".")), querys diff xs) + } + } + + (querys, entity) match { + case (x :: Nil, e: Entity with Members) => + localLookup(e, x) + case (x :: _, e: Entity with Members) if x == entity.name => + downwardLookup(e, querys) + case (x :: xs, _) => + if (xs.nonEmpty) globalLookup + else lookup(entity, packages, "scala." + query, pos) + } + } + + def makeEntityLink( + entity: Entity, + packages: Map[String, Package], + title: Inline, + pos: Position, + query: String + ): EntityLink = EntityLink(title, lookup(entity, packages, query, pos)) +} diff --git a/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala b/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala new file mode 100644 index 000000000..2084e0a97 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala @@ -0,0 +1,125 @@ +package dotty.tools.dottydoc +package util + +import dotty.tools.dotc.config.Printers.dottydoc + +import _root_.java.io.{ + File => JFile, + PrintWriter => JPrintWriter, + FileReader => JFileReader, + BufferedInputStream, + InputStream, + InputStreamReader, + FileOutputStream, + BufferedOutputStream, + FileNotFoundException +} +import _root_.java.net.URL +import _root_.java.util.{ Map => JMap, List => JList } +import model.{ Entity, Package } +import model.json._ +import com.github.mustachejava.DefaultMustacheFactory +import scala.collection.JavaConverters._ + +class OutputWriter { + + def writeJava(packs: JMap[String, Package], outPath: String, template: URL, resources: JList[URL]): Unit = { + write(packs.asScala, outPath, template, resources.asScala) + } + + def write(packs: collection.Map[String, Package], outPath: String, template: URL, resources: Traversable[URL]): Unit = { + // Write all packages to `outPath` + for (pack <- packs.values) { + println(s"""Writing '${pack.path.mkString(".")}'""") + writeFile( + expandTemplate(template, pack, outPath), + outPath + pack.path.mkString("/", "/", "/"), + "index.html") + + // Write all package children to outPath + for { + child <- pack.children + if child.kind != "package" + } { + println(s"""Writing '${child.path.mkString(".")}'""") + writeFile( + expandTemplate(template, child, outPath), + outPath + child.path.dropRight(1).mkString("/", "/", "/"), + child.path.last + ".html") + } + } + + // Write full index to outPath + val js = "Index = {}; Index.packages = " + packs.json + ";" + println("Writing index.js...") + writeFile(js, outPath + "/docassets/", "index.js") + + // Write resources to outPath + println("Copying CSS/JS resources to destination...") + assert(resources.nonEmpty) + + // TODO: splitting the URL by '/' and taking the last means that we don't + // allow folders among the resources + resources.foreach(url => copy(url.openStream, outPath, url.getFile.split("/").last)) + + println("Done writing static material, building js-app") + } + + def writeJsonJava(index: JMap[String, Package], outputDir: String): Unit = + writeJson(index.asScala, outputDir) + + def writeJson(index: collection.Map[String, Package], outputDir: String): Unit = + writeFile(index.json, outputDir + "/", "index.json") + + def expandTemplate(template: URL, entity: Entity, outPath: String): String = try { + import model.json._ + import model.java._ + + val inputStream = template.openStream + val writer = new _root_.java.io.StringWriter() + val mf = new DefaultMustacheFactory() + + def toRoot = "../" * (entity.path.length - { if (entity.isInstanceOf[Package]) 0 else 1 }) + + val entityWithExtras = entity.asJava(Map( + "assets" -> s"${toRoot}docassets", + "index" -> s"${toRoot}docassets/index.js", + "currentEntity" -> entity.json + )) + + mf.compile(new InputStreamReader(inputStream), "template") + .execute(writer, entityWithExtras) + + inputStream.close() + writer.flush() + writer.toString + } catch { + case fnf: FileNotFoundException => + dottydoc.println(s"""Couldn't find the template: "${template.getFile}"...exiting""") + System.exit(1); "" + } + + def writeFile(str: String, path: String, file: String): Unit = + writeFile(str.map(_.toByte).toArray, path, file) + + def writeFile(bytes: Array[Byte], path: String, file: String): Unit = { + def printToFile(f: JFile)(op: JPrintWriter => Unit) = { + val bos = new BufferedOutputStream(new FileOutputStream(f)) + try { + Stream.continually(bos.write(bytes)) + } finally bos.close() + } + + new JFile(path).mkdirs() + printToFile(new JFile(path + file))(printer => bytes.foreach(printer.print)) + } + + def copy(src: InputStream, path: String, name: String): Unit = { + val reader = new BufferedInputStream(src) + try { + val bytes = Stream.continually(reader.read).takeWhile(-1 != _).map(_.toByte) + writeFile(bytes.toArray, path + "/docassets/", name) + src.close() + } finally reader.close() + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala b/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala new file mode 100644 index 000000000..a3b60fa44 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala @@ -0,0 +1,25 @@ +package dotty.tools.dottydoc +package util + +object traversing { + import model._ + + def mutateEntities(e: Entity)(trans: Entity => Unit): Unit = e match { + case e: Entity with Members => + trans(e) + e.members.map(mutateEntities(_)(trans)) + case e: Entity => trans(e) + } + + def relativePath(from: Entity, to: Entity) = { + val offset = from match { + case v: Val if v.implicitlyAddedFrom.isDefined => 3 + case d: Def if d.implicitlyAddedFrom.isDefined => 3 + case _: Val | _: Def => 2 + case _ => 1 + } + + "../" * (from.path.length - offset) + + to.path.mkString("", "/", ".html") + } +} diff --git a/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala b/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala new file mode 100644 index 000000000..a5a4dfec6 --- /dev/null +++ b/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala @@ -0,0 +1,65 @@ +package dotty.tools.dottydoc +package util +package internal + +object setters { + import model._ + import comment.Comment + import model.references._ + import internal._ + + def setComment(ent: Entity, to: Option[Comment]) = ent match { + case x: PackageImpl => x.comment = to + case x: ClassImpl => x.comment = to + case x: CaseClassImpl => x.comment = to + case x: TraitImpl => x.comment = to + case x: ObjectImpl => x.comment = to + case x: DefImpl => x.comment = to + case x: ValImpl => x.comment = to + } + + def setParent(ent: Entity, to: Entity): Unit = ent match { + case e: ClassImpl => + e.parent = to + e.members.foreach(setParent(_, e)) + case e: CaseClassImpl => + e.parent = to + e.members.foreach(setParent(_, e)) + case e: ObjectImpl => + e.parent = to + e.members.foreach(setParent(_, e)) + case e: TraitImpl => + e.parent = to + e.members.foreach(setParent(_, e)) + case e: ValImpl => + e.parent = to + case e: DefImpl => + e.parent = to + case _ => () + } + + implicit class FlattenedEntity(val ent: Entity) extends AnyVal { + /** Returns a flat copy if anything was changed (Entity with Members) else + * the identity + */ + def flat: Entity = { + def flattenMember: Entity => Entity = { + case e: PackageImpl => e.copy(members = Nil) + case e: ObjectImpl => e.copy(members = Nil) + case e: CaseClassImpl => e.copy(members = Nil) + case e: ClassImpl => e.copy(members = Nil) + case e: TraitImpl => e.copy(members = Nil) + case other => other + } + + ent match { + case e: PackageImpl => e.copy(members = e.members.map(flattenMember)) + case e: ObjectImpl => e.copy(members = e.members.map(flattenMember)) + case e: CaseClassImpl => e.copy(members = e.members.map(flattenMember)) + case e: ClassImpl => e.copy(members = e.members.map(flattenMember)) + case e: TraitImpl => e.copy(members = e.members.map(flattenMember)) + case other => other + } + } + } +} diff --git a/dottydoc/test/BaseTest.scala b/dottydoc/test/BaseTest.scala new file mode 100644 index 000000000..2233d03c8 --- /dev/null +++ b/dottydoc/test/BaseTest.scala @@ -0,0 +1,57 @@ +package dotty.tools +package dottydoc + +import dotc.core.Contexts +import Contexts.{ Context, ContextBase, FreshContext } +import dotc.util.SourceFile +import dotc.core.Phases.Phase +import dotc.typer.FrontEnd +import dottydoc.core.DocASTPhase +import model.Package + +trait DottyTest { + dotty.tools.dotc.parsing.Scanners // initialize keywords + + implicit var ctx: FreshContext = { + val base = new ContextBase + import base.settings._ + val ctx = base.initialCtx.fresh + ctx.setSetting(ctx.settings.language, List("Scala2")) + ctx.setSetting(ctx.settings.YkeepComments, true) + base.initialize()(ctx) + ctx + } + + private def compilerWithChecker(assertion: Map[String, Package] => Unit) = new DocCompiler { + private[this] val assertionPhase: List[List[Phase]] = + List(new Phase { + def phaseName = "assertionPhase" + override def run(implicit ctx: Context): Unit = + assertion(ctx.docbase.packages[Package].toMap) + }) :: Nil + + override def phases = + super.phases ++ assertionPhase + } + + def checkSource(source: String)(assertion: Map[String, Package] => Unit): Unit = { + val c = compilerWithChecker(assertion) + c.rootContext(ctx) + val run = c.newRun + run.compile(source) + } + + def checkFiles(sources: List[String])(assertion: Map[String, Package] => Unit): Unit = { + val c = compilerWithChecker(assertion) + c.rootContext(ctx) + val run = c.newRun + run.compile(sources) + } + + def checkSources(sourceFiles: List[SourceFile])(assertion: Map[String, Package] => Unit): Unit = { + val c = compilerWithChecker(assertion) + c.rootContext(ctx) + val run = c.newRun + run.compileSources(sourceFiles) + } +} diff --git a/dottydoc/test/ConstructorTest.scala b/dottydoc/test/ConstructorTest.scala new file mode 100644 index 000000000..8aa883022 --- /dev/null +++ b/dottydoc/test/ConstructorTest.scala @@ -0,0 +1,211 @@ +package dotty.tools +package dottydoc + +import org.junit.Test +import org.junit.Assert._ + +import dotc.util.SourceFile +import model._ +import model.internal._ +import model.references._ + +class Constructors extends DottyTest { + @Test def singleClassConstructor = { + val source = new SourceFile ( + "Class.scala", + """ + |package scala + | + |class Class(val str: String) + """.stripMargin + ) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(cls: Class), _, _) => + cls.constructors.headOption match { + case Some(ParamListImpl(NamedReference("str", _, false, false) :: Nil, false) :: Nil) => + // success! + case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}") + } + } + } + } + + @Test def constructorPlusImplicitArgList = { + val source = new SourceFile ( + "Class.scala", + """ + |package scala + | + |class Class(val str1: String)(implicit str2: String) + """.stripMargin + ) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(cls: Class), _, _) => + cls.constructors match { + case ( + ParamListImpl(NamedReference("str1", _, false, false) :: Nil, false) :: + ParamListImpl(NamedReference("str2", _, false, false) :: Nil, true) :: Nil + ) :: Nil => + // success! + case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}") + } + } + } + } + + @Test def multipleArgumentListsForConstructor = { + val source = new SourceFile ( + "Class.scala", + """ + |package scala + | + |class Class(val str1: String)(val str2: String)(implicit str3: String) + """.stripMargin + ) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(cls: Class), _, _) => + cls.constructors match { + case ( + ParamListImpl(NamedReference("str1", _, false, false) :: Nil, false) :: + ParamListImpl(NamedReference("str2", _, false, false) :: Nil, false) :: + ParamListImpl(NamedReference("str3", _, false, false) :: Nil, true) :: Nil + ) :: Nil => + // success! + case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}") + } + } + } + } + + @Test def multipleConstructors = { + val source = new SourceFile ( + "Class.scala", + """ + |package scala + | + |class Class(val main: String) { + | def this(alt1: Int) = + | this("String") + | + | def this(alt2: List[String]) = + | this(alt2.head) + |} + """.stripMargin + ) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(cls: Class), _, _) => + cls.constructors match { + case ( + ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil + ) :: ( + ParamListImpl(NamedReference("alt1", _, false, false) :: Nil, false) :: Nil + ) :: ( + ParamListImpl(NamedReference("alt2", _, false, false) :: Nil, false) :: Nil + ) :: Nil => + // success! + case _ => + assert( + false, + s"""Incorrect constructor found:\n${cls.constructors.mkString("\n")}""" + ) + } + } + } + } + + @Test def multipleConstructorsCC = { + val source = new SourceFile ( + "Class.scala", + """ + |package scala + | + |case class Class(val main: String) { + | def this(alt1: Int) = + | this("String") + | + | def this(alt2: List[String]) = + | this(alt2.head) + |} + """.stripMargin + ) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(cls: CaseClass, obj: Object), _, _) => + cls.constructors match { + case ( + ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil + ) :: ( + ParamListImpl(NamedReference("alt1", _, false, false) :: Nil, false) :: Nil + ) :: ( + ParamListImpl(NamedReference("alt2", _, false, false) :: Nil, false) :: Nil + ) :: Nil => + // success! + case _ => + println(obj.members.map(x => x.kind + " " + x.name)) + assert( + false, + s"""Incorrect constructor found:\n${cls.constructors.mkString("\n")}""" + ) + } + } + } + } + + @Test def traitParameters = { + val source = new SourceFile ( + "Trait.scala", + """ + |package scala + | + |trait Trait(val main: String) + """.stripMargin + ) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(trt: Trait), _, _) => + trt.traitParams match { + case ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil => + case _ => + assert( + false, + s"""Incorrect constructor found:\n${trt.traitParams.mkString("\n")}""" + ) + } + } + } + } + + @Test def testJson = { + val actualSource = + """ + |package scala + | + |trait Trait(val main: String) + |class Class(val main: String) + |case class CaseClass(main: String) + """.stripMargin + + val source = new SourceFile ("JsonTest.scala", actualSource) + + checkSources(source :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(cc: CaseClass, _, cls: Class, trt: Trait), _, _) => + import model.json._ + lazy val incorrectJson = s"The json generated for:\n$actualSource\n\nIs not correct" + assert(cc.json.contains(s""""constructors":[[{"list":[{"title":"main""""), incorrectJson) + assert(cls.json.contains(s""""constructors":[[{"list":[{"title":"main""""), incorrectJson) + assert(trt.json.contains(s""""traitParams":[{"list":[{"title":"main""""), incorrectJson) + } + } + } +} diff --git a/dottydoc/test/PackageStructure.scala b/dottydoc/test/PackageStructure.scala new file mode 100644 index 000000000..00caaa2c0 --- /dev/null +++ b/dottydoc/test/PackageStructure.scala @@ -0,0 +1,89 @@ +package dotty.tools +package dottydoc + +import org.junit.Test +import org.junit.Assert._ + +import dotc.util.SourceFile +import model.internal._ + +class PackageStructure extends DottyTest { + @Test def multipleCompilationUnits = { + val source1 = new SourceFile( + "TraitA.scala", + """ + |package scala + | + |trait A + """.stripMargin + ) + + val source2 = new SourceFile( + "TraitB.scala", + """ + |package scala + | + |trait B + """.stripMargin + ) + + checkSources(source1 :: source2 :: Nil) { packages => + packages("scala") match { + case PackageImpl(_, List(tA, tB), _, _) => + assert( + tA.name == "A" && tB.name == "B", + s"trait A had name '${tA.name}' and trait B had name '${tB.name}'" + ) + case _ => fail("Incorrect package structure after run") + } + } + } + + + @Test def multiplePackages = { + val source1 = new SourceFile( + "TraitA.scala", + """ + |package scala + |package collection + | + |trait A + """.stripMargin) + + val source2 = new SourceFile( + "TraitB.scala", + """ + |package scala + |package collection + | + |trait B + """.stripMargin) + + checkSources(source1 :: source2 :: Nil) { packages => + packages("scala") match { + case PackageImpl( + "scala", + List(PackageImpl("scala.collection", List(tA, tB), _, _)), + _, _ + ) => + assert( + tA.name == "A" && tB.name == "B", + s"trait A had name '${tA.name}' and trait B had name '${tB.name}'" + ) + + case _ => + fail(s"""Incorrect package structure for 'scala' package: ${packages("scala")}""") + } + + packages("scala.collection") match { + case PackageImpl("scala.collection", List(tA, tB), _, _) => + assert( + tA.name == "A" && tB.name == "B", + s"trait A had name '${tA.name}' and trait B had name '${tB.name}'" + ) + + case _ => fail("Incorrect package structure for 'scala.collection' package") + } + } + } +} diff --git a/dottydoc/test/SimpleComments.scala b/dottydoc/test/SimpleComments.scala new file mode 100644 index 000000000..959eb1745 --- /dev/null +++ b/dottydoc/test/SimpleComments.scala @@ -0,0 +1,29 @@ +package dotty.tools +package dottydoc + +import org.junit.Test +import org.junit.Assert._ + +class TestSimpleComments extends DottyTest { + + @Test def simpleComment = { + val source = + """ + |package scala + | + |/** Hello, world! */ + |trait HelloWorld + """.stripMargin + + checkSource(source) { packages => + val traitCmt = + packages("scala") + .children.find(_.path.mkString(".") == "scala.HelloWorld") + .flatMap(_.comment.map(_.body)) + .get + + assertEquals(traitCmt, "<p>Hello, world!</p>") + } + } + +} diff --git a/dottydoc/test/WhitelistedStdLib.scala b/dottydoc/test/WhitelistedStdLib.scala new file mode 100644 index 000000000..48697ea7f --- /dev/null +++ b/dottydoc/test/WhitelistedStdLib.scala @@ -0,0 +1,45 @@ +package dotty.tools +package dottydoc + +import org.junit.Test +import org.junit.Assert._ + +class TestWhitelistedCollections extends DottyTest { + val files: List[String] = { + val whitelist = "./test/dotc/scala-collections.whitelist" + + scala.io.Source.fromFile(whitelist, "UTF8") + .getLines() + .map(_.trim) // allow identation + .filter(!_.startsWith("#")) // allow comment lines prefixed by # + .map(_.takeWhile(_ != '#').trim) // allow comments in the end of line + .filter(_.nonEmpty) + .filterNot(_.endsWith("package.scala")) + .toList + } + + @Test def arrayHasDocumentation = + checkFiles(files) { packages => + val array = + packages("scala") + .children.find(_.path.mkString(".") == "scala.Array") + .get + + assert(array.comment.get.body.length > 0) + } + + @Test def traitImmutableHasDocumentation = + checkFiles(files) { packages => + val imm = + packages("scala") + .children.find(_.path.mkString(".") == "scala.Immutable") + .get + + assert( + imm.kind == "trait" && imm.name == "Immutable", + "Found wrong `Immutable`") + assert( + imm.comment.map(_.body).get.length > 0, + "Imm did not have a comment with length > 0") + } +} diff --git a/project/Build.scala b/project/Build.scala index b7822907d..8157147d2 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -73,7 +73,9 @@ object DottyBuild extends Build { javaSource in Test := baseDirectory.value / "test", resourceDirectory in Compile := baseDirectory.value / "resources", unmanagedSourceDirectories in Compile := Seq((scalaSource in Compile).value), + unmanagedSourceDirectories in Compile += baseDirectory.value / "dottydoc" / "src", unmanagedSourceDirectories in Test := Seq((scalaSource in Test).value), + unmanagedSourceDirectories in Test += baseDirectory.value / "dottydoc" / "test", // set system in/out for repl connectInput in run := true, @@ -91,6 +93,8 @@ object DottyBuild extends Build { //http://stackoverflow.com/questions/10472840/how-to-attach-sources-to-sbt-managed-dependencies-in-scala-ide#answer-11683728 com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys.withSource := true, + resolvers += Resolver.sonatypeRepo("snapshots"), + // get libraries onboard partestDeps := Seq(scalaCompiler, "org.scala-lang" % "scala-reflect" % scalaVersion.value, @@ -98,8 +102,10 @@ object DottyBuild extends Build { libraryDependencies ++= partestDeps.value, libraryDependencies ++= Seq("org.scala-lang.modules" %% "scala-xml" % "1.0.1", "org.scala-lang.modules" %% "scala-partest" % "1.0.11" % "test", + "ch.epfl.lamp" % "dottydoc-client" % "0.1-SNAPSHOT", "com.novocode" % "junit-interface" % "0.11" % "test", "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0", + "com.github.spullara.mustache.java" % "compiler" % "0.9.3", "com.typesafe.sbt" % "sbt-interface" % sbtVersion.value), // enable improved incremental compilation algorithm incOptions := incOptions.value.withNameHashing(true), @@ -199,7 +205,8 @@ object DottyBuild extends Build { settings( addCommandAlias("partest", ";test:package;package;test:runMain dotc.build;lockPartestFile;test:test;runPartestRunner") ++ addCommandAlias("partest-only", ";test:package;package;test:runMain dotc.build;lockPartestFile;test:test-only dotc.tests;runPartestRunner") ++ - addCommandAlias("partest-only-no-bootstrap", ";test:package;package; lockPartestFile;test:test-only dotc.tests;runPartestRunner") + addCommandAlias("partest-only-no-bootstrap", ";test:package;package; lockPartestFile;test:test-only dotc.tests;runPartestRunner") ++ + addCommandAlias("dottydoc", ";dottydoc/run") ). settings(publishing) @@ -263,7 +270,6 @@ object DottyInjectedPlugin extends AutoPlugin { ). settings(publishing) - /** A sandbox to play with the Scala.js back-end of dotty. * * This sandbox is compiled with dotty with support for Scala.js. It can be diff --git a/project/plugins.sbt b/project/plugins.sbt index 57bd46581..8ac4d69bf 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,3 +8,5 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0") addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0") addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.8") + +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.3.5") diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala index 8a4b9cfe8..346af42b8 100644 --- a/src/dotty/tools/dotc/ast/Desugar.scala +++ b/src/dotty/tools/dotc/ast/Desugar.scala @@ -616,16 +616,20 @@ object desugar { * * { cases } * ==> - * x$1 => x$1 match { cases } + * x$1 => (x$1 @unchecked) match { cases } * * If `nparams` != 1, expand instead to * - * (x$1, ..., x$n) => (x$0, ..., x${n-1}) match { cases } + * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked) match { cases } */ - def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1)(implicit ctx: Context) = { + def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1, unchecked: Boolean = true)(implicit ctx: Context) = { val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) val selector = makeTuple(params.map(p => Ident(p.name))) - Function(params, Match(selector, cases)) + + if (unchecked) + Function(params, Match(Annotated(New(ref(defn.UncheckedAnnotType)), selector), cases)) + else + Function(params, Match(selector, cases)) } /** Map n-ary function `(p1, ..., pn) => body` where n != 1 to unary function as follows: @@ -753,7 +757,7 @@ object desugar { case VarPattern(named, tpt) => Function(derivedValDef(named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) case _ => - makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil) + makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil, unchecked = false) } /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap @@ -799,7 +803,7 @@ object desugar { val cases = List( CaseDef(pat, EmptyTree, Literal(Constant(true))), CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) - Apply(Select(rhs, nme.withFilter), Match(EmptyTree, cases)) + Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases)) } /** Is pattern `pat` irrefutable when matched against `rhs`? diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala index 20ae02994..cf11c27fa 100644 --- a/src/dotty/tools/dotc/ast/Trees.scala +++ b/src/dotty/tools/dotc/ast/Trees.scala @@ -15,6 +15,7 @@ import printing.Printer import util.{Stats, Attachment, DotClass} import annotation.unchecked.uncheckedVariance import language.implicitConversions +import parsing.Scanners.Comment object Trees { @@ -30,7 +31,7 @@ object Trees { @sharable var ntrees = 0 /** Attachment key for trees with documentation strings attached */ - val DocComment = new Attachment.Key[String] + val DocComment = new Attachment.Key[Comment] /** Modifiers and annotations for definitions * @param flags The set flags @@ -324,7 +325,7 @@ object Trees { private[ast] def rawMods: Modifiers[T] = if (myMods == null) genericEmptyModifiers else myMods - def rawComment: Option[String] = getAttachment(DocComment) + def rawComment: Option[Comment] = getAttachment(DocComment) def withMods(mods: Modifiers[Untyped]): ThisTree[Untyped] = { val tree = if (myMods == null || (myMods == mods)) this else clone.asInstanceOf[MemberDef[Untyped]] @@ -334,7 +335,7 @@ object Trees { def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(Modifiers(flags)) - def setComment(comment: Option[String]): ThisTree[Untyped] = { + def setComment(comment: Option[Comment]): ThisTree[Untyped] = { comment.map(putAttachment(DocComment, _)) asInstanceOf[ThisTree[Untyped]] } diff --git a/src/dotty/tools/dotc/config/ScalaSettings.scala b/src/dotty/tools/dotc/config/ScalaSettings.scala index d0c4cc02c..c090a5515 100644 --- a/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -163,6 +163,7 @@ class ScalaSettings extends Settings.SettingGroup { val YkeepComments = BooleanSetting("-Ykeep-comments", "Keep comments when scanning source files.") val YforceSbtPhases = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") + val YcheckAllPatmat = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm)") def stop = YstopAfter /** Area-specific debug output. @@ -196,4 +197,68 @@ class ScalaSettings extends Settings.SettingGroup { val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "") val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "") val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999) + + /** Doc specific settings */ + val template = OptionSetting[String]( + "-template", + "A mustache template for rendering each top-level entity in the API" + ) + + val resources = OptionSetting[String]( + "-resources", + "A directory containing static resources needed for the API documentation" + ) + + val DocTitle = StringSetting ( + "-Ydoc-title", + "title", + "The overall name of the Scaladoc site", + "" + ) + + val DocVersion = StringSetting ( + "-Ydoc-version", + "version", + "An optional version number, to be appended to the title", + "" + ) + + val DocOutput = StringSetting ( + "-Ydoc-output", + "outdir", + "The output directory in which to place the documentation", + "." + ) + + val DocFooter = StringSetting ( + "-Ydoc-footer", + "footer", + "A footer on every Scaladoc page, by default the EPFL/Lightbend copyright notice. Can be overridden with a custom footer.", + "" + ) + + val DocUncompilable = StringSetting ( + "-Ydoc-no-compile", + "path", + "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)", + "" + ) + + //def DocUncompilableFiles(implicit ctx: Context) = DocUncompilable.value match { + // case "" => Nil + // case path => io.Directory(path).deepFiles.filter(_ hasExtension "scala").toList + //} + + val DocExternalDoc = MultiStringSetting ( + "-Ydoc-external-doc", + "external-doc", + "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies." + ) + + val DocAuthor = BooleanSetting("-Ydoc-author", "Include authors.", true) + + val DocGroups = BooleanSetting ( + "-Ydoc:groups", + "Group similar functions together (based on the @group annotation)" + ) } diff --git a/src/dotty/tools/dotc/config/Settings.scala b/src/dotty/tools/dotc/config/Settings.scala index f30cedaa0..73df4e1ec 100644 --- a/src/dotty/tools/dotc/config/Settings.scala +++ b/src/dotty/tools/dotc/config/Settings.scala @@ -235,8 +235,8 @@ object Settings { setting } - def BooleanSetting(name: String, descr: String): Setting[Boolean] = - publish(Setting(name, descr, false)) + def BooleanSetting(name: String, descr: String, initialValue: Boolean = false): Setting[Boolean] = + publish(Setting(name, descr, initialValue)) def StringSetting(name: String, helpArg: String, descr: String, default: String): Setting[String] = publish(Setting(name, descr, default, helpArg)) diff --git a/src/dotty/tools/dotc/core/Contexts.scala b/src/dotty/tools/dotc/core/Contexts.scala index 262443314..cd76fe88b 100644 --- a/src/dotty/tools/dotc/core/Contexts.scala +++ b/src/dotty/tools/dotc/core/Contexts.scala @@ -29,6 +29,7 @@ import printing._ import config.{Settings, ScalaSettings, Platform, JavaPlatform, SJSPlatform} import language.implicitConversions import DenotTransformers.DenotTransformer +import parsing.Scanners.Comment import xsbti.AnalysisCallback object Contexts { @@ -531,6 +532,9 @@ object Contexts { /** The symbol loaders */ val loaders = new SymbolLoaders + /** Documentation base */ + val docbase = new DocBase + /** The platform, initialized by `initPlatform()`. */ private var _platform: Platform = _ @@ -567,14 +571,32 @@ object Contexts { def squashed(p: Phase): Phase = { allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) } + } - val _docstrings: mutable.Map[Symbol, String] = + class DocBase { + private[this] val _docstrings: mutable.Map[Symbol, Comment] = mutable.Map.empty - def docstring(sym: Symbol): Option[String] = _docstrings.get(sym) + def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) - def addDocstring(sym: Symbol, doc: Option[String]): Unit = + def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = doc.map(d => _docstrings += (sym -> d)) + + /* + * Dottydoc places instances of `Package` in this map - but we do not want + * to depend on `dottydoc` for the compiler, as such this is defined as a + * map of `String -> AnyRef` + */ + private[this] val _packages: mutable.Map[String, AnyRef] = mutable.Map.empty + def packages[A]: mutable.Map[String, A] = _packages.asInstanceOf[mutable.Map[String, A]] + + /** Should perhaps factorize this into caches that get flushed */ + private var _defs: Map[Symbol, Set[Symbol]] = Map.empty + def defs(sym: Symbol): Set[Symbol] = _defs.get(sym).getOrElse(Set.empty) + + def addDef(s: Symbol, d: Symbol): Unit = _defs = (_defs + { + s -> _defs.get(s).map(xs => xs + d).getOrElse(Set(d)) + }) } /** The essential mutable state of a context base, collected into a common class */ diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala index 115995ddc..87d94dcbe 100644 --- a/src/dotty/tools/dotc/core/Types.scala +++ b/src/dotty/tools/dotc/core/Types.scala @@ -841,6 +841,13 @@ object Types { case _ => this } + /** Eliminate anonymous classes */ + final def deAnonymize(implicit ctx: Context): Type = this match { + case tp:TypeRef if tp.symbol.isAnonymousClass => + tp.symbol.asClass.typeRef.asSeenFrom(tp.prefix, tp.symbol.owner) + case tp => tp + } + /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type * is no longer alias type, LazyRef, or instantiated type variable. */ diff --git a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 4ea98f7c3..1570dbca0 100644 --- a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -85,6 +85,7 @@ class ClassfileParser( val jflags = in.nextChar val isAnnotation = hasAnnotation(jflags) val sflags = classTranslation.flags(jflags) + val isEnum = (jflags & JAVA_ACC_ENUM) != 0 val nameIdx = in.nextChar currentClassName = pool.getClassName(nameIdx) @@ -140,6 +141,15 @@ class ClassfileParser( setClassInfo(classRoot, classInfo) setClassInfo(moduleRoot, staticInfo) } + + // eager load java enum definitions for exhaustivity check of pattern match + if (isEnum) { + instanceScope.toList.map(_.ensureCompleted()) + staticScope.toList.map(_.ensureCompleted()) + classRoot.setFlag(Flags.Enum) + moduleRoot.setFlag(Flags.Enum) + } + result } diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala index 600707cbf..378aa6ed7 100644 --- a/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/src/dotty/tools/dotc/parsing/Parsers.scala @@ -22,6 +22,7 @@ import ScriptParsers._ import scala.annotation.{tailrec, switch} import util.DotClass import rewrite.Rewrites.patch +import Scanners.Comment object Parsers { @@ -1778,13 +1779,13 @@ object Parsers { */ def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match { case VAL => - patDefOrDcl(posMods(start, mods), in.getDocString(start)) + patDefOrDcl(posMods(start, mods), in.getDocComment(start)) case VAR => - patDefOrDcl(posMods(start, addFlag(mods, Mutable)), in.getDocString(start)) + patDefOrDcl(posMods(start, addFlag(mods, Mutable)), in.getDocComment(start)) case DEF => - defDefOrDcl(posMods(start, mods), in.getDocString(start)) + defDefOrDcl(posMods(start, mods), in.getDocComment(start)) case TYPE => - typeDefOrDcl(posMods(start, mods), in.getDocString(start)) + typeDefOrDcl(posMods(start, mods), in.getDocComment(start)) case _ => tmplDef(start, mods) } @@ -1794,7 +1795,7 @@ object Parsers { * ValDcl ::= Id {`,' Id} `:' Type * VarDcl ::= Id {`,' Id} `:' Type */ - def patDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = { + def patDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = { val lhs = commaSeparated(pattern2) val tpt = typedOpt() val rhs = @@ -1820,7 +1821,7 @@ object Parsers { * DefDcl ::= DefSig `:' Type * DefSig ::= id [DefTypeParamClause] ParamClauses */ - def defDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = atPos(tokenRange) { + def defDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(tokenRange) { def scala2ProcedureSyntax(resultTypeStr: String) = { val toInsert = if (in.token == LBRACE) s"$resultTypeStr =" @@ -1895,7 +1896,7 @@ object Parsers { /** TypeDef ::= type Id [TypeParamClause] `=' Type * TypeDcl ::= type Id [TypeParamClause] TypeBounds */ - def typeDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = { + def typeDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = { newLinesOpt() atPos(tokenRange) { val name = ident().toTypeName @@ -1917,7 +1918,7 @@ object Parsers { * | [`case'] `object' ObjectDef */ def tmplDef(start: Int, mods: Modifiers): Tree = { - val docstring = in.getDocString(start) + val docstring = in.getDocComment(start) in.token match { case TRAIT => classDef(posMods(start, addFlag(mods, Trait)), docstring) @@ -1938,7 +1939,7 @@ object Parsers { /** ClassDef ::= Id [ClsTypeParamClause] * [ConstrMods] ClsParamClauses TemplateOpt */ - def classDef(mods: Modifiers, docstring: Option[String]): TypeDef = atPos(tokenRange) { + def classDef(mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(tokenRange) { val name = ident().toTypeName val constr = atPos(in.offset) { val tparams = typeParamClauseOpt(ParamOwner.Class) @@ -1965,7 +1966,7 @@ object Parsers { /** ObjectDef ::= Id TemplateOpt */ - def objectDef(mods: Modifiers, docstring: Option[String] = None): ModuleDef = { + def objectDef(mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = { val name = ident() val template = templateOpt(emptyConstructor()) @@ -2190,7 +2191,7 @@ object Parsers { if (in.token == PACKAGE) { in.nextToken() if (in.token == OBJECT) { - val docstring = in.getDocString(start) + val docstring = in.getDocComment(start) ts += objectDef(atPos(start, in.skipToken()) { Modifiers(Package) }, docstring) if (in.token != EOF) { acceptStatSep() diff --git a/src/dotty/tools/dotc/parsing/Scanners.scala b/src/dotty/tools/dotc/parsing/Scanners.scala index 1355ea386..b46ab6348 100644 --- a/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/src/dotty/tools/dotc/parsing/Scanners.scala @@ -193,7 +193,7 @@ object Scanners { } /** Returns the closest docstring preceding the position supplied */ - def getDocString(pos: Int): Option[String] = { + def getDocComment(pos: Int): Option[Comment] = { def closest(c: Comment, docstrings: List[Comment]): Comment = docstrings match { case x :: xs if (c.pos.end < x.pos.end && x.pos.end <= pos) => closest(x, xs) case Nil => c @@ -203,7 +203,7 @@ object Scanners { case (list @ (x :: xs)) :: _ => { val c = closest(x, xs) docsPerBlockStack = list.dropWhile(_ != c).tail :: docsPerBlockStack.tail - Some(c.chrs) + Some(c) } case _ => None } diff --git a/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/src/dotty/tools/dotc/transform/ExpandSAMs.scala index d9445d046..04c6864b1 100644 --- a/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -74,7 +74,8 @@ class ExpandSAMs extends MiniPhaseTransform { thisTransformer => Bind(defaultSym, Underscore(selector.tpe.widen)), EmptyTree, Literal(Constant(false))) - cpy.Match(applyRhs)(paramRef, cases.map(translateCase) :+ defaultCase) + val annotated = Annotated(New(ref(defn.UncheckedAnnotType)), paramRef) + cpy.Match(applyRhs)(annotated, cases.map(translateCase) :+ defaultCase) case _ => tru } diff --git a/src/dotty/tools/dotc/transform/Mixin.scala b/src/dotty/tools/dotc/transform/Mixin.scala index 8cdc82f7a..27cfc835a 100644 --- a/src/dotty/tools/dotc/transform/Mixin.scala +++ b/src/dotty/tools/dotc/transform/Mixin.scala @@ -189,16 +189,17 @@ class Mixin extends MiniPhaseTransform with SymTransformer { thisTransform => var argNum = 0 def nextArgument() = initArgs.get(mixin) match { case Some(arguments) => - try arguments(argNum) finally argNum += 1 + val result = arguments(argNum) + argNum += 1 + result case None => - val (msg, pos) = impl.parents.find(_.tpe.typeSymbol == mixin) match { - case Some(parent) => ("lacks argument list", parent.pos) - case None => - ("""is indirectly implemented, - |needs to be implemented directly so that arguments can be passed""".stripMargin, - cls.pos) - } - ctx.error(i"parameterized $mixin $msg", pos) + assert( + impl.parents.forall(_.tpe.typeSymbol != mixin), + i"missing parameters for $mixin from $impl should have been caught in typer") + ctx.error( + em"""parameterized $mixin is indirectly implemented, + |needs to be implemented directly so that arguments can be passed""", + cls.pos) EmptyTree } diff --git a/src/dotty/tools/dotc/transform/PatternMatcher.scala b/src/dotty/tools/dotc/transform/PatternMatcher.scala index 839189948..21b56959b 100644 --- a/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -24,6 +24,7 @@ import Applications._ import TypeApplications._ import SymUtils._, core.NameOps._ import core.Mode +import patmat._ import dotty.tools.dotc.util.Positions.Position import dotty.tools.dotc.core.Decorators._ @@ -52,6 +53,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans override def transformMatch(tree: Match)(implicit ctx: Context, info: TransformerInfo): Tree = { val translated = new Translator()(ctx).translator.translateMatch(tree) + // check exhaustivity and unreachability + val engine = new SpaceEngine + if (engine.checkable(tree)) { + engine.checkExhaustivity(tree) + engine.checkRedundancy(tree) + } + translated.ensureConforms(tree.tpe) } @@ -1244,13 +1252,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans case _ => false } - def elimAnonymousClass(t: Type) = t match { - case t:TypeRef if t.symbol.isAnonymousClass => - t.symbol.asClass.typeRef.asSeenFrom(t.prefix, t.symbol.owner) - case _ => - t - } - /** Implement a pattern match by turning its cases (including the implicit failure case) * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator. * @@ -1264,7 +1265,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans def translateMatch(match_ : Match): Tree = { val Match(sel, cases) = match_ - val selectorTp = elimAnonymousClass(sel.tpe.widen/*withoutAnnotations*/) + val selectorTp = sel.tpe.widen.deAnonymize/*withoutAnnotations*/ val selectorSym = freshSym(sel.pos, selectorTp, "selector") @@ -1273,6 +1274,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans case _ => (cases, None) } + // checkMatchVariablePatterns(nonSyntheticCases) // only used for warnings // we don't transform after uncurry diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala index b71284049..fd22a0ad9 100644 --- a/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/src/dotty/tools/dotc/transform/PostTyper.scala @@ -39,6 +39,8 @@ import Symbols._, TypeUtils._ * * (9) Adds SourceFile annotations to all top-level classes and objects * + * (10) Adds Child annotations to all sealed classes + * * The reason for making this a macro transform is that some functions (in particular * super and protected accessors and instantiation checks) are naturally top-down and * don't lend themselves to the bottom-up approach of a mini phase. The other two functions @@ -243,6 +245,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran ctx.compilationUnit.source.exists && sym != defn.SourceFileAnnot) sym.addAnnotation(Annotation.makeSourceFile(ctx.compilationUnit.source.file.path)) + + if (!sym.isAnonymousClass) // ignore anonymous class + for (parent <- sym.asClass.classInfo.classParents) { + val pclazz = parent.classSymbol + if (pclazz.is(Sealed)) pclazz.addAnnotation(Annotation.makeChild(sym)) + } + tree } else { diff --git a/src/dotty/tools/dotc/transform/patmat/Space.scala b/src/dotty/tools/dotc/transform/patmat/Space.scala new file mode 100644 index 000000000..d942c6853 --- /dev/null +++ b/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -0,0 +1,619 @@ +package dotty.tools.dotc +package transform +package patmat + +import core.Types._ +import core.Contexts._ +import core.Flags._ +import ast.Trees._ +import ast.tpd +import core.Decorators._ +import core.Symbols._ +import core.StdNames._ +import core.NameOps._ +import core.Constants._ + +/** Space logic for checking exhaustivity and unreachability of pattern matching + * + * Space can be thought of as a set of possible values. A type or a pattern + * both refer to spaces. The space of a type is the values that inhabit the + * type. The space of a pattern is the values that can be covered by the + * pattern. + * + * Space is recursively defined as follows: + * + * 1. `Empty` is a space + * 2. For a type T, `Typ(T)` is a space + * 3. A union of spaces `S1 | S2 | ...` is a space + * 4. For a case class Kon(x1: T1, x2: T2, .., xn: Tn), if S1, S2, ..., Sn + * are spaces, then `Kon(S1, S2, ..., Sn)` is a space. + * 5. A constant `Const(value, T)` is a point in space + * 6. A stable identifier `Var(sym, T)` is a space + * + * For the problem of exhaustivity check, its formulation in terms of space is as follows: + * + * Is the space Typ(T) a subspace of the union of space covered by all the patterns? + * + * The problem of unreachable patterns can be formulated as follows: + * + * Is the space covered by a pattern a subspace of the space covered by previous patterns? + * + * Assumption: + * (1) One case class cannot be inherited directly or indirectly by another + * case class. + * (2) Inheritance of a case class cannot be well handled by the algorithm. + * + */ + + +/** space definition */ +sealed trait Space + +/** Empty space */ +case object Empty extends Space + +/** Space representing the set of all values of a type + * + * @param tp: the type this space represents + * @param decomposed: does the space result from decomposition? Used for pretty print + * + */ +case class Typ(tp: Type, decomposed: Boolean) extends Space + +/** Space representing a constructor pattern */ +case class Kon(tp: Type, params: List[Space]) extends Space + +/** Union of spaces */ +case class Or(spaces: List[Space]) extends Space + +/** Point in space */ +sealed trait Point extends Space + +/** Point representing variables(stable identifier) in patterns */ +case class Var(sym: Symbol, tp: Type) extends Point + +/** Point representing literal constants in patterns */ +case class Const(value: Constant, tp: Type) extends Point + +/** abstract space logic */ +trait SpaceLogic { + /** Is `tp1` a subtype of `tp2`? */ + def isSubType(tp1: Type, tp2: Type): Boolean + + /** Is `tp1` the same type as `tp2`? */ + def isEqualType(tp1: Type, tp2: Type): Boolean + + /** Is the type `tp` decomposable? i.e. all values of the type can be covered + * by its decomposed types. + * + * Abstract sealed class, OrType, Boolean and Java enums can be decomposed. + */ + def canDecompose(tp: Type): Boolean + + /** Return term parameter types of the case class `tp` */ + def signature(tp: Type): List[Type] + + /** Get components of decomposable types */ + def decompose(tp: Type): List[Space] + + /** Simplify space using the laws, there's no nested union after simplify */ + def simplify(space: Space): Space = space match { + case Kon(tp, spaces) => + val sp = Kon(tp, spaces.map(simplify _)) + if (sp.params.contains(Empty)) Empty + else sp + case Or(spaces) => + val set = spaces.map(simplify _).flatMap { + case Or(ss) => ss + case s => Seq(s) + } filter (_ != Empty) + + if (set.isEmpty) Empty + else if (set.size == 1) set.toList(0) + else Or(set) + case Typ(tp, _) => + if (canDecompose(tp) && decompose(tp).isEmpty) Empty + else space + case _ => space + } + + /** Flatten space to get rid of `Or` for pretty print */ + def flatten(space: Space): List[Space] = space match { + case Kon(tp, spaces) => + val flats = spaces.map(flatten _) + + flats.foldLeft(List[Kon]()) { (acc, flat) => + if (acc.isEmpty) flat.map(s => Kon(tp, Nil :+ s)) + else for (Kon(tp, ss) <- acc; s <- flat) yield Kon(tp, ss :+ s) + } + case Or(spaces) => + spaces.flatMap(flatten _) + case _ => List(space) + } + + /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */ + def isSubspace(a: Space, b: Space): Boolean = { + def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b) + def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp))) + + (a, b) match { + case (Empty, _) => true + case (_, Empty) => false + case (Or(ss), _) => ss.forall(isSubspace(_, b)) + case (Typ(tp1, _), Typ(tp2, _)) => + isSubType(tp1, tp2) || tryDecompose1(tp1) || tryDecompose2(tp2) + case (Typ(tp1, _), Or(ss)) => + ss.exists(isSubspace(a, _)) || tryDecompose1(tp1) + case (Typ(tp1, _), Kon(tp2, ss)) => + isSubType(tp1, tp2) && isSubspace(Kon(tp2, signature(tp2).map(Typ(_, false))), b) || + tryDecompose1(tp1) + case (Kon(tp1, ss), Typ(tp2, _)) => + isSubType(tp1, tp2) || + simplify(a) == Empty || + (isSubType(tp2, tp1) && tryDecompose1(tp1)) || + tryDecompose2(tp2) + case (Kon(_, _), Or(_)) => + simplify(minus(a, b)) == Empty + case (Kon(tp1, ss1), Kon(tp2, ss2)) => + isEqualType(tp1, tp2) && ss1.zip(ss2).forall((isSubspace _).tupled) + case (Const(v1, _), Const(v2, _)) => v1 == v2 + case (Const(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2) + case (Const(_, _), Or(ss)) => ss.exists(isSubspace(a, _)) + case (Const(_, _), _) => false + case (_, Const(_, _)) => false + case (Var(x, _), Var(y, _)) => x == y + case (Var(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2) + case (Var(_, _), Or(ss)) => ss.exists(isSubspace(a, _)) + case (Var(_, _), _) => false + case (_, Var(_, _)) => false + } + } + + /** Intersection of two spaces */ + def intersect(a: Space, b: Space): Space = { + def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b) + def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp))) + + (a, b) match { + case (Empty, _) | (_, Empty) => Empty + case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filterConserve(_ ne Empty)) + case (Or(ss), _) => Or(ss.map(intersect(_, b)).filterConserve(_ ne Empty)) + case (Typ(tp1, _), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) a + else if (isSubType(tp2, tp1)) b + else if (canDecompose(tp1)) tryDecompose1(tp1) + else if (canDecompose(tp2)) tryDecompose2(tp2) + else Empty + case (Typ(tp1, _), Kon(tp2, ss)) => + if (isSubType(tp2, tp1)) b + else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class + else if (canDecompose(tp1)) tryDecompose1(tp1) + else Empty + case (Kon(tp1, ss), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) a + else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class + else if (canDecompose(tp2)) tryDecompose2(tp2) + else Empty + case (Kon(tp1, ss1), Kon(tp2, ss2)) => + if (!isEqualType(tp1, tp2)) Empty + else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty + else Kon(tp1, ss1.zip(ss2).map((intersect _).tupled)) + case (Const(v1, _), Const(v2, _)) => + if (v1 == v2) a else Empty + case (Const(_, tp1), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) a + else if (canDecompose(tp2)) tryDecompose2(tp2) + else Empty + case (Const(_, _), _) => Empty + case (Typ(tp1, _), Const(_, tp2)) => + if (isSubType(tp2, tp1)) b + else if (canDecompose(tp1)) tryDecompose1(tp1) + else Empty + case (_, Const(_, _)) => Empty + case (Var(x, _), Var(y, _)) => + if (x == y) a else Empty + case (Var(_, tp1), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) a + else if (canDecompose(tp2)) tryDecompose2(tp2) + else Empty + case (Var(_, _), _) => Empty + case (Typ(tp1, _), Var(_, tp2)) => + if (isSubType(tp2, tp1)) b + else if (canDecompose(tp1)) tryDecompose1(tp1) + else Empty + case (_, Var(_, _)) => Empty + } + } + + /** The space of a not covered by b */ + def minus(a: Space, b: Space): Space = { + def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b) + def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp))) + + (a, b) match { + case (Empty, _) => Empty + case (_, Empty) => a + case (Typ(tp1, _), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) Empty + else if (canDecompose(tp1)) tryDecompose1(tp1) + else if (canDecompose(tp2)) tryDecompose2(tp2) + else a + case (Typ(tp1, _), Kon(tp2, ss)) => + // corner case: inheriting a case class + // rationale: every instance of `tp1` is covered by `tp2(_)` + if (isSubType(tp1, tp2)) minus(Kon(tp2, signature(tp2).map(Typ(_, false))), b) + else if (canDecompose(tp1)) tryDecompose1(tp1) + else a + case (_, Or(ss)) => + ss.foldLeft(a)(minus) + case (Or(ss), _) => + Or(ss.map(minus(_, b))) + case (Kon(tp1, ss), Typ(tp2, _)) => + // uncovered corner case: tp2 :< tp1 + if (isSubType(tp1, tp2)) Empty + else if (simplify(a) == Empty) Empty + else if (canDecompose(tp2)) tryDecompose2(tp2) + else a + case (Kon(tp1, ss1), Kon(tp2, ss2)) => + if (!isEqualType(tp1, tp2)) a + else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) a + else if (ss1.zip(ss2).forall((isSubspace _).tupled)) Empty + else + // `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)` + Or(ss1.zip(ss2).map((minus _).tupled).zip(0 to ss2.length - 1).map { + case (ri, i) => Kon(tp1, ss1.updated(i, ri)) + }) + case (Const(v1, _), Const(v2, _)) => + if (v1 == v2) Empty else a + case (Const(_, tp1), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) Empty + else if (canDecompose(tp2)) tryDecompose2(tp2) + else a + case (Const(_, _), _) => a + case (Typ(tp1, _), Const(_, tp2)) => // Boolean & Java enum + if (canDecompose(tp1)) tryDecompose1(tp1) + else a + case (_, Const(_, _)) => a + case (Var(x, _), Var(y, _)) => + if (x == y) Empty else a + case (Var(_, tp1), Typ(tp2, _)) => + if (isSubType(tp1, tp2)) Empty + else if (canDecompose(tp2)) tryDecompose2(tp2) + else a + case (Var(_, _), _) => a + case (_, Var(_, _)) => a + } + } +} + +/** Scala implementation of space logic */ +class SpaceEngine(implicit ctx: Context) extends SpaceLogic { + import tpd._ + + /** Return the space that represents the pattern `pat` + * + * If roundUp is true, approximate extractors to its type, + * otherwise approximate extractors to Empty + */ + def project(pat: Tree, roundUp: Boolean = true)(implicit ctx: Context): Space = pat match { + case Literal(c) => Const(c, c.tpe) + case _: BackquotedIdent => Var(pat.symbol, pat.tpe) + case Ident(_) | Select(_, _) => + pat.tpe.stripAnnots match { + case tp: TermRef => + if (pat.symbol.is(Enum)) + Const(Constant(pat.symbol), tp) + else if (tp.underlyingIterator.exists(_.classSymbol.is(Module))) + Typ(tp.widenTermRefExpr.stripAnnots, false) + else + Var(pat.symbol, tp) + case tp => Typ(tp, false) + } + case Alternative(trees) => Or(trees.map(project(_, roundUp))) + case Bind(_, pat) => project(pat) + case UnApply(_, _, pats) => + if (pat.tpe.classSymbol.is(CaseClass)) + Kon(pat.tpe.stripAnnots, pats.map(pat => project(pat, roundUp))) + else if (roundUp) Typ(pat.tpe.stripAnnots, false) + else Empty + case Typed(pat @ UnApply(_, _, _), _) => project(pat) + case Typed(expr, _) => Typ(expr.tpe.stripAnnots, true) + case _ => + Empty + } + + /* Erase a type binding according to erasure semantics in pattern matching */ + def erase(tp: Type): Type = { + def doErase(tp: Type): Type = tp match { + case tp: HKApply => erase(tp.superType) + case tp: RefinedType => erase(tp.parent) + case _ => tp + } + + tp match { + case OrType(tp1, tp2) => + OrType(erase(tp1), erase(tp2)) + case AndType(tp1, tp2) => + AndType(erase(tp1), erase(tp2)) + case _ => + val origin = doErase(tp) + if (origin =:= defn.ArrayType) tp else origin + } + } + + /** Is `tp1` a subtype of `tp2`? */ + def isSubType(tp1: Type, tp2: Type): Boolean = { + // check SI-9657 and tests/patmat/gadt.scala + erase(tp1) <:< erase(tp2) + } + + def isEqualType(tp1: Type, tp2: Type): Boolean = tp1 =:= tp2 + + /** Parameter types of the case class type `tp` */ + def signature(tp: Type): List[Type] = { + val ktor = tp.classSymbol.primaryConstructor.info + + val meth = ktor match { + case ktor: PolyType => + ktor.instantiate(tp.classSymbol.typeParams.map(_.typeRef)).asSeenFrom(tp, tp.classSymbol) + case _ => ktor + } + + // refine path-dependent type in params. refer to t9672 + meth.firstParamTypes.map(_.asSeenFrom(tp, tp.classSymbol)) + } + + /** Decompose a type into subspaces -- assume the type can be decomposed */ + def decompose(tp: Type): List[Space] = { + val children = tp.classSymbol.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot => + // refer to definition of Annotation.makeChild + annot.tree match { + case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol + } + } + + tp match { + case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true)) + case _ if tp =:= ctx.definitions.BooleanType => + List( + Const(Constant(true), ctx.definitions.BooleanType), + Const(Constant(false), ctx.definitions.BooleanType) + ) + case _ if tp.classSymbol.is(Enum) => + children.map(sym => Const(Constant(sym), tp)) + case _ => + val parts = children.map { sym => + if (sym.is(ModuleClass)) + sym.asClass.classInfo.selfType + else if (sym.info.typeParams.length > 0 || tp.isInstanceOf[TypeRef]) + refine(tp, sym.typeRef) + else + sym.typeRef + } filter { tpe => + // Child class may not always be subtype of parent: + // GADT & path-dependent types + tpe <:< expose(tp) + } + + parts.map(Typ(_, true)) + } + } + + /** Refine tp2 based on tp1 + * + * E.g. if `tp1` is `Option[Int]`, `tp2` is `Some`, then return + * `Some[Int]`. + * + * If `tp1` is `path1.A`, `tp2` is `path2.B`, and `path1` is subtype of + * `path2`, then return `path1.B`. + */ + def refine(tp1: Type, tp2: Type): Type = (tp1, tp2) match { + case (tp1: RefinedType, _) => tp1.wrapIfMember(refine(tp1.parent, tp2)) + case (tp1: HKApply, _) => refine(tp1.superType, tp2) + case (TypeRef(ref1: TypeProxy, _), tp2 @ TypeRef(ref2: TypeProxy, name)) => + if (ref1.underlying <:< ref2.underlying) TypeRef(ref1, name) else tp2 + case _ => tp2 + } + + /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */ + def canDecompose(tp: Type): Boolean = { + tp.classSymbol.is(allOf(Abstract, Sealed)) || + tp.classSymbol.is(allOf(Trait, Sealed)) || + tp.isInstanceOf[OrType] || + tp =:= ctx.definitions.BooleanType || + tp.classSymbol.is(Enum) + } + + /** Show friendly type name with current scope in mind + * + * E.g. C.this.B --> B if current owner is C + * C.this.x.T --> x.T if current owner is C + * X[T] --> X + * C --> C if current owner is C !!! + * + */ + def showType(tp: Type): String = { + val enclosingCls = ctx.owner.enclosingClass.asClass.classInfo.symbolicTypeRef + + def isOmittable(sym: Symbol) = + sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName || + ctx.definitions.UnqualifiedOwnerTypes.exists(_.symbol == sym) || + sym.showFullName.startsWith("scala.") || + sym == enclosingCls.typeSymbol + + def refinePrefix(tp: Type): String = tp match { + case NoPrefix => "" + case tp: NamedType if isOmittable(tp.symbol) => "" + case tp: ThisType => refinePrefix(tp.tref) + case tp: RefinedType => refinePrefix(tp.parent) + case tp: NamedType => tp.name.show.stripSuffix("$") + } + + def refine(tp: Type): String = tp match { + case tp: RefinedType => refine(tp.parent) + case tp: ThisType => refine(tp.tref) + case tp: NamedType => + val pre = refinePrefix(tp.prefix) + if (tp.name == tpnme.higherKinds) pre + else if (pre.isEmpty) tp.name.show.stripSuffix("$") + else pre + "." + tp.name.show.stripSuffix("$") + case _ => tp.show.stripSuffix("$") + } + + val text = tp.stripAnnots match { + case tp: OrType => showType(tp.tp1) + " | " + showType(tp.tp2) + case tp => refine(tp) + } + + if (text.isEmpty) enclosingCls.show.stripSuffix("$") + else text + } + + /** Display spaces */ + def show(s: Space): String = { + def doShow(s: Space, mergeList: Boolean = false): String = s match { + case Empty => "" + case Const(v, _) => v.show + case Var(x, _) => x.show + case Typ(tp, decomposed) => + val sym = tp.widen.classSymbol + + if (sym.is(ModuleClass)) + showType(tp) + else if (ctx.definitions.isTupleType(tp)) + signature(tp).map(_ => "_").mkString("(", ", ", ")") + else if (sym.showFullName == "scala.collection.immutable.::") + if (mergeList) "_" else "List(_)" + else if (tp.classSymbol.is(CaseClass)) + // use constructor syntax for case class + showType(tp) + signature(tp).map(_ => "_").mkString("(", ", ", ")") + else if (signature(tp).nonEmpty) + tp.classSymbol.name + signature(tp).map(_ => "_").mkString("(", ", ", ")") + else if (decomposed) "_: " + showType(tp) + else "_" + case Kon(tp, params) => + if (ctx.definitions.isTupleType(tp)) + "(" + params.map(doShow(_)).mkString(", ") + ")" + else if (tp.widen.classSymbol.showFullName == "scala.collection.immutable.::") + if (mergeList) params.map(doShow(_, mergeList)).mkString(", ") + else params.map(doShow(_, true)).filter(_ != "Nil").mkString("List(", ", ", ")") + else + showType(tp) + params.map(doShow(_)).mkString("(", ", ", ")") + case Or(_) => + throw new Exception("incorrect flatten result " + s) + } + + flatten(s).map(doShow(_, false)).distinct.mkString(", ") + } + + def checkable(tree: Match): Boolean = { + def isCheckable(tp: Type): Boolean = tp match { + case AnnotatedType(tp, annot) => + (ctx.definitions.UncheckedAnnot != annot.symbol) && isCheckable(tp) + case _ => + // Possible to check everything, but be compatible with scalac by default + ctx.settings.YcheckAllPatmat.value || + tp.typeSymbol.is(Sealed) || + tp.isInstanceOf[OrType] || + tp.typeSymbol == ctx.definitions.BooleanType.typeSymbol || + tp.typeSymbol.is(Enum) || + canDecompose(tp) || + (defn.isTupleType(tp) && tp.dealias.argInfos.exists(isCheckable(_))) + } + + val Match(sel, cases) = tree + isCheckable(sel.tpe.widen.deAnonymize.dealias) + } + + + /** Expose refined type to eliminate reference to type variables + * + * A = B M { type T = A } ~~> M { type T = B } + * + * A <: X :> Y M { type T = A } ~~> M { type T <: X :> Y } + * + * A <: X :> Y B <: U :> V M { type T <: A :> B } ~~> M { type T <: X :> V } + * + * A = X B = Y M { type T <: A :> B } ~~> M { type T <: X :> Y } + */ + def expose(tp: Type): Type = { + def follow(tp: Type, up: Boolean): Type = tp match { + case tp: TypeProxy => + tp.underlying match { + case TypeBounds(lo, hi) => + follow(if (up) hi else lo, up) + case _ => + tp + } + case OrType(tp1, tp2) => + OrType(follow(tp1, up), follow(tp2, up)) + case AndType(tp1, tp2) => + AndType(follow(tp1, up), follow(tp2, up)) + } + + tp match { + case tp: RefinedType => + tp.refinedInfo match { + case tpa : TypeAlias => + val hi = follow(tpa.alias, true) + val lo = follow(tpa.alias, false) + val refined = if (hi =:= lo) + tpa.derivedTypeAlias(hi) + else + tpa.derivedTypeBounds(lo, hi) + + tp.derivedRefinedType( + expose(tp.parent), + tp.refinedName, + refined + ) + case tpb @ TypeBounds(lo, hi) => + tp.derivedRefinedType( + expose(tp.parent), + tp.refinedName, + tpb.derivedTypeBounds(follow(lo, false), follow(hi, true)) + ) + } + case _ => tp + } + } + + def checkExhaustivity(_match: Match): Unit = { + val Match(sel, cases) = _match + val selTyp = sel.tpe.widen.deAnonymize.dealias + + + val patternSpace = cases.map(x => project(x.pat)).reduce((a, b) => Or(List(a, b))) + val uncovered = simplify(minus(Typ(selTyp, true), patternSpace)) + + if (uncovered != Empty) { + ctx.warning( + "match may not be exhaustive.\n" + + s"It would fail on the following input: " + + show(uncovered), _match.pos + ) + } + } + + def checkRedundancy(_match: Match): Unit = { + val Match(sel, cases) = _match + // ignore selector type for now + // val selTyp = sel.tpe.widen.deAnonymize.dealias + + // starts from the second, the first can't be redundant + (1 until cases.length).foreach { i => + // in redundancy check, take guard as false, take extractor as match + // nothing in order to soundly approximate + val prevs = cases.take(i).map { x => + if (x.guard.isEmpty) project(x.pat, false) + else Empty + }.reduce((a, b) => Or(List(a, b))) + + val curr = project(cases(i).pat) + + if (isSubspace(curr, prevs)) { + ctx.warning("unreachable code", cases(i).body.pos) + } + } + } +} diff --git a/src/dotty/tools/dotc/typer/FrontEnd.scala b/src/dotty/tools/dotc/typer/FrontEnd.scala index c5c6aec3c..e193b126a 100644 --- a/src/dotty/tools/dotc/typer/FrontEnd.scala +++ b/src/dotty/tools/dotc/typer/FrontEnd.scala @@ -57,7 +57,7 @@ class FrontEnd extends Phase { case _ => NoSymbol } - private def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) = + protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) = unit.isJava || firstTopLevelDef(unit.tpdTree :: Nil).isPrimitiveValueClass override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = { diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala index 698f7e9a9..3c0a45e94 100644 --- a/src/dotty/tools/dotc/typer/Namer.scala +++ b/src/dotty/tools/dotc/typer/Namer.scala @@ -414,6 +414,16 @@ class Namer { typer: Typer => case mdef: DefTree => val sym = enterSymbol(createSymbol(mdef)) setDocstring(sym, stat) + + // add java enum constants + mdef match { + case vdef: ValDef if (isEnumConstant(vdef)) => + val enumClass = sym.owner.linkedClass + if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed) + enumClass.addAnnotation(Annotation.makeChild(sym)) + case _ => + } + ctx case stats: Thicket => for (tree <- stats.toList) { @@ -425,8 +435,26 @@ class Namer { typer: Typer => ctx } + /** Determines whether this field holds an enum constant. + * To qualify, the following conditions must be met: + * - The field's class has the ENUM flag set + * - The field's class extends java.lang.Enum + * - The field has the ENUM flag set + * - The field is static + * - The field is stable + */ + def isEnumConstant(vd: ValDef)(implicit ctx: Context) = { + // val ownerHasEnumFlag = + // Necessary to check because scalac puts Java's static members into the companion object + // while Scala's enum constants live directly in the class. + // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal + // cyclic reference error. See the commit message for details. + // if (ctx.compilationUnit.isJava) ctx.owner.companionClass.is(Enum) else ctx.owner.is(Enum) + vd.mods.is(allOf(Enum, Stable, JavaStatic, JavaDefined)) // && ownerHasEnumFlag + } + def setDocstring(sym: Symbol, tree: Tree)(implicit ctx: Context) = tree match { - case t: MemberDef => ctx.base.addDocstring(sym, t.rawComment) + case t: MemberDef => ctx.docbase.addDocstring(sym, t.rawComment) case _ => () } diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala index a6e2deb23..36404a68f 100644 --- a/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -488,7 +488,7 @@ trait TypeAssigner { tree.withType(sym.nonMemberTermRef) def assignType(tree: untpd.Annotated, annot: Tree, arg: Tree)(implicit ctx: Context) = - tree.withType(AnnotatedType(arg.tpe, Annotation(annot))) + tree.withType(AnnotatedType(arg.tpe.widen, Annotation(annot))) def assignType(tree: untpd.PackageDef, pid: Tree)(implicit ctx: Context) = tree.withType(pid.symbol.valRef) diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala index 678e408e4..7eb022b51 100644 --- a/src/dotty/tools/dotc/typer/Typer.scala +++ b/src/dotty/tools/dotc/typer/Typer.scala @@ -741,7 +741,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit tree.selector match { case EmptyTree => val (protoFormals, _) = decomposeProtoFunction(pt, 1) - typed(desugar.makeCaseLambda(tree.cases, protoFormals.length) withPos tree.pos, pt) + val unchecked = pt <:< defn.PartialFunctionType + typed(desugar.makeCaseLambda(tree.cases, protoFormals.length, unchecked) withPos tree.pos, pt) case _ => val sel1 = typedExpr(tree.selector) val selType = widenForMatchSelector( @@ -1132,8 +1133,43 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(implicit ctx: Context) = track("typedClassDef") { val TypeDef(name, impl @ Template(constr, parents, self, _)) = cdef val superCtx = ctx.superCallContext + + /** If `ref` is an implicitly parameterized trait, pass an implicit argument list. + * Otherwise, if `ref` is a parameterized trait, error. + * Note: Traits and classes currently always have at least an empty parameter list () + * before the implicit parameters (this is inserted if not given in source). + * We skip this parameter list when deciding whether a trait is parameterless or not. + * @param ref The tree referring to the (parent) trait + * @param psym Its type symbol + * @param cinfo The info of its constructor + */ + def maybeCall(ref: Tree, psym: Symbol, cinfo: Type): Tree = cinfo match { + case cinfo: PolyType => + maybeCall(ref, psym, cinfo.resultType) + case cinfo @ MethodType(Nil, _) if cinfo.resultType.isInstanceOf[ImplicitMethodType] => + val icall = New(ref).select(nme.CONSTRUCTOR).appliedToNone + typedExpr(untpd.TypedSplice(icall))(superCtx) + case cinfo @ MethodType(Nil, _) if !cinfo.resultType.isInstanceOf[MethodType] => + ref + case cinfo: MethodType => + if (!ctx.erasedTypes) { // after constructors arguments are passed in super call. + typr.println(i"constr type: $cinfo") + ctx.error(em"parameterized $psym lacks argument list", ref.pos) + } + ref + case _ => + ref + } + def typedParent(tree: untpd.Tree): Tree = - if (tree.isType) typedType(tree)(superCtx) + if (tree.isType) { + val result = typedType(tree)(superCtx) + val psym = result.tpe.typeSymbol + if (psym.is(Trait) && !cls.is(Trait) && !cls.superClass.isSubClass(psym)) + maybeCall(result, psym, psym.primaryConstructor.info) + else + result + } else { val result = typedExpr(tree)(superCtx) checkParentCall(result, cls) diff --git a/test/test/DottyDocParsingTests.scala b/test/test/DottyDocParsingTests.scala index b09d048da..ed89c6114 100644 --- a/test/test/DottyDocParsingTests.scala +++ b/test/test/DottyDocParsingTests.scala @@ -14,7 +14,7 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(c: TypeDef)) => - assert(c.rawComment == None, "Should not have a comment, mainly used for exhaustive tests") + assert(c.rawComment.map(_.chrs) == None, "Should not have a comment, mainly used for exhaustive tests") } } @@ -29,7 +29,7 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" => - checkDocString(t.rawComment, "/** Hello world! */") + checkDocString(t.rawComment.map(_.chrs), "/** Hello world! */") } } @@ -44,7 +44,7 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" => - checkDocString(t.rawComment, "/** Hello /* multiple open */ world! */") + checkDocString(t.rawComment.map(_.chrs), "/** Hello /* multiple open */ world! */") } } @Test def multipleClassesInPackage = { @@ -62,8 +62,8 @@ class DottyDocParsingTests extends DottyDocTest { checkCompile("frontend", source) { (_, ctx) => ctx.compilationUnit.untpdTree match { case PackageDef(_, Seq(c1 @ TypeDef(_,_), c2 @ TypeDef(_,_))) => { - checkDocString(c1.rawComment, "/** Class1 docstring */") - checkDocString(c2.rawComment, "/** Class2 docstring */") + checkDocString(c1.rawComment.map(_.chrs), "/** Class1 docstring */") + checkDocString(c2.rawComment.map(_.chrs), "/** Class2 docstring */") } } } @@ -77,7 +77,7 @@ class DottyDocParsingTests extends DottyDocTest { """.stripMargin checkFrontend(source) { - case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment, "/** Class without package */") + case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.chrs), "/** Class without package */") } } @@ -85,7 +85,7 @@ class DottyDocParsingTests extends DottyDocTest { val source = "/** Trait docstring */\ntrait Trait" checkFrontend(source) { - case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment, "/** Trait docstring */") + case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.chrs), "/** Trait docstring */") } } @@ -101,8 +101,8 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(t1 @ TypeDef(_,_), t2 @ TypeDef(_,_))) => { - checkDocString(t1.rawComment, "/** Trait1 docstring */") - checkDocString(t2.rawComment, "/** Trait2 docstring */") + checkDocString(t1.rawComment.map(_.chrs), "/** Trait1 docstring */") + checkDocString(t2.rawComment.map(_.chrs), "/** Trait2 docstring */") } } } @@ -127,10 +127,10 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(t1 @ TypeDef(_,_), c2 @ TypeDef(_,_), cc3 @ TypeDef(_,_), _, ac4 @ TypeDef(_,_))) => { - checkDocString(t1.rawComment, "/** Trait1 docstring */") - checkDocString(c2.rawComment, "/** Class2 docstring */") - checkDocString(cc3.rawComment, "/** CaseClass3 docstring */") - checkDocString(ac4.rawComment, "/** AbstractClass4 docstring */") + checkDocString(t1.rawComment.map(_.chrs), "/** Trait1 docstring */") + checkDocString(c2.rawComment.map(_.chrs), "/** Class2 docstring */") + checkDocString(cc3.rawComment.map(_.chrs), "/** CaseClass3 docstring */") + checkDocString(ac4.rawComment.map(_.chrs), "/** AbstractClass4 docstring */") } } } @@ -147,9 +147,9 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(outer @ TypeDef(_, tpl @ Template(_,_,_,_)))) => { - checkDocString(outer.rawComment, "/** Outer docstring */") + checkDocString(outer.rawComment.map(_.chrs), "/** Outer docstring */") tpl.body match { - case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */") + case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */") case _ => assert(false, "Couldn't find inner class") } } @@ -171,10 +171,10 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case PackageDef(_, Seq(o1 @ TypeDef(_, tpl @ Template(_,_,_,_)), o2 @ TypeDef(_,_))) => { - checkDocString(o1.rawComment, "/** Outer1 docstring */") - checkDocString(o2.rawComment, "/** Outer2 docstring */") + checkDocString(o1.rawComment.map(_.chrs), "/** Outer1 docstring */") + checkDocString(o2.rawComment.map(_.chrs), "/** Outer2 docstring */") tpl.body match { - case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */") + case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */") case _ => assert(false, "Couldn't find inner class") } } @@ -196,9 +196,9 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case p @ PackageDef(_, Seq(o1: MemberDef[Untyped], o2: MemberDef[Untyped])) => { assertEquals(o1.name.toString, "Object1") - checkDocString(o1.rawComment, "/** Object1 docstring */") + checkDocString(o1.rawComment.map(_.chrs), "/** Object1 docstring */") assertEquals(o2.name.toString, "Object2") - checkDocString(o2.rawComment, "/** Object2 docstring */") + checkDocString(o2.rawComment.map(_.chrs), "/** Object2 docstring */") } } } @@ -223,12 +223,12 @@ class DottyDocParsingTests extends DottyDocTest { checkFrontend(source) { case p @ PackageDef(_, Seq(o1: ModuleDef, o2: ModuleDef)) => { assert(o1.name.toString == "Object1") - checkDocString(o1.rawComment, "/** Object1 docstring */") + checkDocString(o1.rawComment.map(_.chrs), "/** Object1 docstring */") assert(o2.name.toString == "Object2") - checkDocString(o2.rawComment, "/** Object2 docstring */") + checkDocString(o2.rawComment.map(_.chrs), "/** Object2 docstring */") o2.impl.body match { - case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */") + case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */") case _ => assert(false, "Couldn't find inner class") } } @@ -257,14 +257,14 @@ class DottyDocParsingTests extends DottyDocTest { import dotty.tools.dotc.ast.untpd._ checkFrontend(source) { case PackageDef(_, Seq(p: ModuleDef)) => { - checkDocString(p.rawComment, "/** Package object docstring */") + checkDocString(p.rawComment.map(_.chrs), "/** Package object docstring */") p.impl.body match { case (b: TypeDef) :: (t: TypeDef) :: (o: ModuleDef) :: Nil => { - checkDocString(b.rawComment, "/** Boo docstring */") - checkDocString(t.rawComment, "/** Trait docstring */") - checkDocString(o.rawComment, "/** InnerObject docstring */") - checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment, "/** InnerClass docstring */") + checkDocString(b.rawComment.map(_.chrs), "/** Boo docstring */") + checkDocString(t.rawComment.map(_.chrs), "/** Trait docstring */") + checkDocString(o.rawComment.map(_.chrs), "/** InnerObject docstring */") + checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment.map(_.chrs), "/** InnerClass docstring */") } case _ => assert(false, "Incorrect structure inside package object") } @@ -284,7 +284,7 @@ class DottyDocParsingTests extends DottyDocTest { import dotty.tools.dotc.ast.untpd._ checkFrontend(source) { case PackageDef(_, Seq(c: TypeDef)) => - checkDocString(c.rawComment, "/** Real comment */") + checkDocString(c.rawComment.map(_.chrs), "/** Real comment */") } } @@ -303,7 +303,7 @@ class DottyDocParsingTests extends DottyDocTest { import dotty.tools.dotc.ast.untpd._ checkFrontend(source) { case PackageDef(_, Seq(c: TypeDef)) => - checkDocString(c.rawComment, "/** Real comment */") + checkDocString(c.rawComment.map(_.chrs), "/** Real comment */") } } @@ -329,9 +329,9 @@ class DottyDocParsingTests extends DottyDocTest { case PackageDef(_, Seq(o: ModuleDef)) => { o.impl.body match { case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => { - checkDocString(v1.rawComment, "/** val1 */") - checkDocString(v2.rawComment, "/** val2 */") - checkDocString(v3.rawComment, "/** val3 */") + checkDocString(v1.rawComment.map(_.chrs), "/** val1 */") + checkDocString(v2.rawComment.map(_.chrs), "/** val2 */") + checkDocString(v3.rawComment.map(_.chrs), "/** val3 */") } case _ => assert(false, "Incorrect structure inside object") } @@ -361,9 +361,9 @@ class DottyDocParsingTests extends DottyDocTest { case PackageDef(_, Seq(o: ModuleDef)) => { o.impl.body match { case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => { - checkDocString(v1.rawComment, "/** var1 */") - checkDocString(v2.rawComment, "/** var2 */") - checkDocString(v3.rawComment, "/** var3 */") + checkDocString(v1.rawComment.map(_.chrs), "/** var1 */") + checkDocString(v2.rawComment.map(_.chrs), "/** var2 */") + checkDocString(v3.rawComment.map(_.chrs), "/** var3 */") } case _ => assert(false, "Incorrect structure inside object") } @@ -393,9 +393,9 @@ class DottyDocParsingTests extends DottyDocTest { case PackageDef(_, Seq(o: ModuleDef)) => { o.impl.body match { case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => { - checkDocString(v1.rawComment, "/** def1 */") - checkDocString(v2.rawComment, "/** def2 */") - checkDocString(v3.rawComment, "/** def3 */") + checkDocString(v1.rawComment.map(_.chrs), "/** def1 */") + checkDocString(v2.rawComment.map(_.chrs), "/** def2 */") + checkDocString(v3.rawComment.map(_.chrs), "/** def3 */") } case _ => assert(false, "Incorrect structure inside object") } @@ -425,9 +425,9 @@ class DottyDocParsingTests extends DottyDocTest { case PackageDef(_, Seq(o: ModuleDef)) => { o.impl.body match { case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => { - checkDocString(v1.rawComment, "/** type1 */") - checkDocString(v2.rawComment, "/** type2 */") - checkDocString(v3.rawComment, "/** type3 */") + checkDocString(v1.rawComment.map(_.chrs), "/** type1 */") + checkDocString(v2.rawComment.map(_.chrs), "/** type2 */") + checkDocString(v3.rawComment.map(_.chrs), "/** type3 */") } case _ => assert(false, "Incorrect structure inside object") } @@ -451,7 +451,7 @@ class DottyDocParsingTests extends DottyDocTest { case PackageDef(_, Seq(o: ModuleDef)) => o.impl.body match { case (foo: MemberDef) :: Nil => - expectNoDocString(foo.rawComment) + expectNoDocString(foo.rawComment.map(_.chrs)) case _ => assert(false, "Incorrect structure inside object") } } @@ -468,7 +468,7 @@ class DottyDocParsingTests extends DottyDocTest { import dotty.tools.dotc.ast.untpd._ checkFrontend(source) { case p @ PackageDef(_, Seq(_, c: TypeDef)) => - checkDocString(c.rawComment, "/** Class1 */") + checkDocString(c.rawComment.map(_.chrs), "/** Class1 */") } } @@ -483,7 +483,7 @@ class DottyDocParsingTests extends DottyDocTest { import dotty.tools.dotc.ast.untpd._ checkFrontend(source) { case p @ PackageDef(_, Seq(c: TypeDef)) => - checkDocString(c.rawComment, "/** Class1 */") + checkDocString(c.rawComment.map(_.chrs), "/** Class1 */") } } } /* End class */ diff --git a/tests/neg/i1263.scala b/tests/neg/i1263.scala new file mode 100644 index 000000000..e6d8c37b5 --- /dev/null +++ b/tests/neg/i1263.scala @@ -0,0 +1,33 @@ +object Test { + trait Foo(val s: String) + + val foo1 = new Foo("bar") {} + val foo2 = new Foo { override val s = "bar" } // error: parameterized trait lacks argument list + def main(args: Array[String]): Unit = { + assert(foo1.s == "bar") + assert(foo2.s == "bar") + } +} +object Test1 { + trait Foo(private val s0: String) { + def s = s0 + } + + val foo1 = new Foo("bar") {} + def main(args: Array[String]): Unit = { + assert(foo1.s == "bar") + } +} +object Test2 { + trait Foo(protected val s: String) + + val foo1 = new Foo("bar") {} +} +object Test3 { + trait Foo(final val s: String) + + val foo1 = new Foo("bar") {} + def main(args: Array[String]): Unit = { + assert(foo1.s == "bar") + } +} diff --git a/tests/neg/traitParamsMixin.scala b/tests/neg/traitParamsMixin.scala index dfb9fbe2f..aa91012d5 100644 --- a/tests/neg/traitParamsMixin.scala +++ b/tests/neg/traitParamsMixin.scala @@ -2,8 +2,6 @@ trait T(x: Int) { def f = x } -class C extends T // error - trait U extends T class D extends U { // error diff --git a/tests/neg/traitParamsTyper.scala b/tests/neg/traitParamsTyper.scala index e97906b50..caed2727d 100644 --- a/tests/neg/traitParamsTyper.scala +++ b/tests/neg/traitParamsTyper.scala @@ -2,6 +2,8 @@ trait T(x: Int) { def f = x } +class C0 extends T // error + class C(x: Int) extends T() // error trait U extends C with T diff --git a/tests/patmat/NonAbstractSealed.check b/tests/patmat/NonAbstractSealed.check new file mode 100644 index 000000000..9224ee370 --- /dev/null +++ b/tests/patmat/NonAbstractSealed.check @@ -0,0 +1,5 @@ +./tests/patmat/NonAbstractSealed.scala:6: warning: match may not be exhaustive. +It would fail on the following input: _: A + (null: A) match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/NonAbstractSealed.scala b/tests/patmat/NonAbstractSealed.scala new file mode 100644 index 000000000..ff2e90aee --- /dev/null +++ b/tests/patmat/NonAbstractSealed.scala @@ -0,0 +1,10 @@ +sealed class A +class B extends A +class C extends A + +object Test { + (null: A) match { + case t: B => + case t: C => + } +} diff --git a/tests/patmat/TwoTrait.scala b/tests/patmat/TwoTrait.scala new file mode 100644 index 000000000..b8e3402c5 --- /dev/null +++ b/tests/patmat/TwoTrait.scala @@ -0,0 +1,12 @@ +object Test { + sealed trait A + sealed trait B + + abstract sealed class Parent + class Foo extends Parent with A with B + class Bar extends Parent with B with A + + (null: A) match { + case _: B => + } +} diff --git a/tests/patmat/aladdin1055/A.scala b/tests/patmat/aladdin1055/A.scala new file mode 100644 index 000000000..862336e30 --- /dev/null +++ b/tests/patmat/aladdin1055/A.scala @@ -0,0 +1,6 @@ +object A { + sealed trait T { def f: Int } + class TT extends T { def f = 0 } + + def foo = new T { def f = 1 } // local subclass of sealed trait T +} diff --git a/tests/patmat/aladdin1055/Test_1.scala.ignore b/tests/patmat/aladdin1055/Test_1.scala.ignore new file mode 100644 index 000000000..39d9b1dc9 --- /dev/null +++ b/tests/patmat/aladdin1055/Test_1.scala.ignore @@ -0,0 +1,5 @@ +object Test { + def foo(t: A.T) = t match { + case a: A.TT => 0 + } +} diff --git a/tests/patmat/aladdin1055/expected.check.ignore b/tests/patmat/aladdin1055/expected.check.ignore new file mode 100644 index 000000000..a8024ad02 --- /dev/null +++ b/tests/patmat/aladdin1055/expected.check.ignore @@ -0,0 +1,5 @@ +./tests/patmat/aladdin1055/Test_1.scala:2: warning: match may not be exhaustive. +It would fail on the following input: (_ : this.<local child>) + def foo(t: A.T) = t match { + ^ +one warning found diff --git a/tests/patmat/enum/Day.java b/tests/patmat/enum/Day.java new file mode 100644 index 000000000..eedb9a72b --- /dev/null +++ b/tests/patmat/enum/Day.java @@ -0,0 +1,4 @@ +public enum Day { + SUNDAY, MONDAY, TUESDAY, WEDNESDAY, + THURSDAY, FRIDAY, SATURDAY +}
\ No newline at end of file diff --git a/tests/patmat/enum/expected.check b/tests/patmat/enum/expected.check new file mode 100644 index 000000000..b3dafa8bd --- /dev/null +++ b/tests/patmat/enum/expected.check @@ -0,0 +1,9 @@ +./tests/patmat/enum/patmat-enum.scala:4: warning: match may not be exhaustive. +It would fail on the following input: SATURDAY, FRIDAY, THURSDAY, SUNDAY + day match { + ^ +./tests/patmat/enum/patmat-enum.scala:15: warning: match may not be exhaustive. +It would fail on the following input: SATURDAY, FRIDAY, THURSDAY + day match { + ^ +two warnings found
\ No newline at end of file diff --git a/tests/patmat/enum/patmat-enum.scala b/tests/patmat/enum/patmat-enum.scala new file mode 100644 index 000000000..ec5c90255 --- /dev/null +++ b/tests/patmat/enum/patmat-enum.scala @@ -0,0 +1,21 @@ +object Test1 { + val day: Day = ??? + + day match { + case Day.MONDAY => true + case Day.TUESDAY => true + case Day.WEDNESDAY => true + } +} + +object Test2 { + import Day._ + val day: Day = ??? + + day match { + case MONDAY => true + case TUESDAY => true + case WEDNESDAY => true + case SUNDAY => true + } +}
\ No newline at end of file diff --git a/tests/patmat/exhausting.check b/tests/patmat/exhausting.check new file mode 100644 index 000000000..790b12334 --- /dev/null +++ b/tests/patmat/exhausting.check @@ -0,0 +1,25 @@ +./tests/patmat/exhausting.scala:21: warning: match may not be exhaustive. +It would fail on the following input: List(_), List(_, _, _) + def fail1[T](xs: List[T]) = xs match { + ^ +./tests/patmat/exhausting.scala:27: warning: match may not be exhaustive. +It would fail on the following input: Nil + def fail2[T](xs: List[T]) = xs match { + ^ +./tests/patmat/exhausting.scala:32: warning: match may not be exhaustive. +It would fail on the following input: List(_, _) + def fail3a(xs: List[Int]) = xs match { + ^ +./tests/patmat/exhausting.scala:39: warning: match may not be exhaustive. +It would fail on the following input: Bar3 + def fail3[T](x: Foo[T]) = x match { + ^ +./tests/patmat/exhausting.scala:44: warning: match may not be exhaustive. +It would fail on the following input: (Bar2, Bar2) + def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match { + ^ +./tests/patmat/exhausting.scala:53: warning: match may not be exhaustive. +It would fail on the following input: (Bar2, Bar2), (Bar2, Bar1), (Bar1, Bar3), (Bar1, Bar2) + def fail5[T](xx: (Foo[T], Foo[T])) = xx match { + ^ +6 warnings found diff --git a/tests/patmat/exhausting.scala b/tests/patmat/exhausting.scala new file mode 100644 index 000000000..03e8198dd --- /dev/null +++ b/tests/patmat/exhausting.scala @@ -0,0 +1,58 @@ +object Test { + sealed abstract class Foo[T] + case object Bar1 extends Foo[Int] + case object Bar2 extends Foo[String] + case object Bar3 extends Foo[Any] + + def ex1[T](xs: List[T]) = xs match { + case ys: List[_] => "ok" + } + def ex2[T](xx: (Foo[T], Foo[T])) = xx match { + case (Bar1, Bar1) => () + case (_, Bar1) => () + case (_, Bar3) => () + case (_, Bar2) => () + } + def ex3[T](xx: (Foo[T], Foo[T])) = xx match { + case (_: Foo[_], _: Foo[_]) => () + } + + // fails for: ::(_, Nil), ::(_, ::(_, ::(_, _))), ... + def fail1[T](xs: List[T]) = xs match { + case Nil => "ok" + case x :: y :: Nil => "ok" + } + + // fails for: Nil + def fail2[T](xs: List[T]) = xs match { + case _ :: _ => "ok" + } + + // fails for: ::(<not in (2, 1)>, _) + def fail3a(xs: List[Int]) = xs match { + case 1 :: _ => + case 2 :: _ => + case Nil => + } + + // fails for: Bar3 + def fail3[T](x: Foo[T]) = x match { + case Bar1 => "ok" + case Bar2 => "ok" + } + // fails for: (Bar2, Bar2) + def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match { + case (Bar1, Bar1) => () + case (Bar2, Bar3) => () + case (Bar3, _) => () + } + // fails for: (Bar1, Bar2) + // fails for: (Bar1, Bar3) + // fails for: (Bar2, Bar1) + // fails for: (Bar2, Bar2) + def fail5[T](xx: (Foo[T], Foo[T])) = xx match { + case (Bar1, Bar1) => () + case (Bar2, Bar3) => () + case (Bar3, _) => () + } +} diff --git a/tests/patmat/exhaustive_heuristics.scala b/tests/patmat/exhaustive_heuristics.scala new file mode 100644 index 000000000..7d682f6aa --- /dev/null +++ b/tests/patmat/exhaustive_heuristics.scala @@ -0,0 +1,26 @@ +// tests exhaustivity doesn't give warnings (due to its heuristic rewrites kicking in or it backing off) +object Test { + // List() => Nil + List(1) match { + case List() => + case x :: xs => + } + + // we don't look into guards + val turnOffChecks = true + List(1) match { + case _ if turnOffChecks => + } + + // we back off when there are any user-defined extractors + // in fact this is exhaustive, but we pretend we don't know since List's unapplySeq is not special to the compiler + // to compensate our ignorance, we back off + // well, in truth, we do rewrite List() to Nil, but otherwise we do nothing + // the full rewrite List(a, b) to a :: b :: Nil, for example is planned (but not sure it's a good idea) + List(true, false) match { + case List(_, _, _:_*) => + case List(node, _:_*) => + case Nil => + } + +}
\ No newline at end of file diff --git a/tests/patmat/for.scala b/tests/patmat/for.scala new file mode 100644 index 000000000..ae9dcf65e --- /dev/null +++ b/tests/patmat/for.scala @@ -0,0 +1,5 @@ +object Test { + def foo[A, B](l: List[(A, B)]): List[A] = { + for ((a, b) <- l) yield a + } +}
\ No newline at end of file diff --git a/tests/patmat/gadt.check b/tests/patmat/gadt.check new file mode 100644 index 000000000..f2154fa60 --- /dev/null +++ b/tests/patmat/gadt.check @@ -0,0 +1,17 @@ +./tests/patmat/gadt.scala:13: warning: match may not be exhaustive. +It would fail on the following input: IntLit(_) + def foo1b(x: Expr[Int]) = x match { + ^ +./tests/patmat/gadt.scala:22: warning: match may not be exhaustive. +It would fail on the following input: Or(_, _) + def foo2b(x: Expr[Boolean]) = x match { + ^ +./tests/patmat/gadt.scala:45: warning: match may not be exhaustive. +It would fail on the following input: BooleanLit(_), IntLit(_) + def foo4b(x: Expr[_]) = x match { + ^ +./tests/patmat/gadt.scala:55: warning: match may not be exhaustive. +It would fail on the following input: Sum(_, _) + def foo5b[T <: Int](x: Expr[T]) = x match { + ^ +four warnings found
\ No newline at end of file diff --git a/tests/patmat/gadt.scala b/tests/patmat/gadt.scala new file mode 100644 index 000000000..0541ed61f --- /dev/null +++ b/tests/patmat/gadt.scala @@ -0,0 +1,58 @@ +object Test { + sealed trait Expr[T] + case class IntLit(i: Int) extends Expr[Int] + case class BooleanLit(b: Boolean) extends Expr[Boolean] + case class Sum(l: Expr[Int], r: Expr[Int]) extends Expr[Int] + case class Or(l: Expr[Boolean], r: Expr[Boolean]) extends Expr[Boolean] + + def foo1a(x: Expr[Int]) = x match { + case _: IntLit => true + case _: Sum => true + } + + def foo1b(x: Expr[Int]) = x match { + case _: Sum => true + } + + def foo2a(x: Expr[Boolean]) = x match { + case _: BooleanLit => true + case _: Or => true + } + + def foo2b(x: Expr[Boolean]) = x match { + case _: BooleanLit => true + } + + def foo3a(x: Expr[Boolean]) = x match { + case _: BooleanLit => true + case _: Or => true + // case _: Sum => true + } + + def foo3b(x: Expr[Int]) = x match { + case _: IntLit => true + case _: Sum => true + // case _: Or => true + } + + def foo4a(x: Expr[_]) = x match { + case _: IntLit => true + case _: Sum => true + case _: BooleanLit => true + case _: Or => true + } + + def foo4b(x: Expr[_]) = x match { + case _: Sum => true + case _: Or => true + } + + def foo5a[T <: Int](x: Expr[T]) = x match { + case _: IntLit => true + case _: Sum => true + } + + def foo5b[T <: Int](x: Expr[T]) = x match { + case _: IntLit => true + } +} diff --git a/tests/patmat/gadt2.scala.ignore b/tests/patmat/gadt2.scala.ignore new file mode 100644 index 000000000..80ba72c70 --- /dev/null +++ b/tests/patmat/gadt2.scala.ignore @@ -0,0 +1,14 @@ +sealed trait Nat[+T] +case class Zero() extends Nat[Nothing] +case class Succ[T]() extends Nat[T] + +sealed trait Vect[+N <: Nat[_], +T] +case class VN[T]() extends Vect[Zero, T] +case class VC[T, N <: Nat[_]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T] + +object Test { + def foo[N <: Nat[_], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match { + case (VN(), VN()) => 1 + case (VC(x, xs), VC(y, ys)) => 2 + } +} diff --git a/tests/patmat/gadt3.scala.ignore b/tests/patmat/gadt3.scala.ignore new file mode 100644 index 000000000..c39416414 --- /dev/null +++ b/tests/patmat/gadt3.scala.ignore @@ -0,0 +1,10 @@ +sealed trait Expr[T] +case class IntExpr(x: Int) extends Expr[Int] +case class BooleanExpr(b: Boolean) extends Expr[Boolean] + +object Test { + def foo[T](x: Expr[T], y: Expr[T]) = (x, y) match { + case (IntExpr(_), IntExpr(_)) => + case (BooleanExpr(_), BooleanExpr(_)) => + } +}
\ No newline at end of file diff --git a/tests/patmat/i947.check b/tests/patmat/i947.check new file mode 100644 index 000000000..5cce559c4 --- /dev/null +++ b/tests/patmat/i947.check @@ -0,0 +1,4 @@ +./tests/patmat/i947.scala:10: warning: unreachable code + case ys: List[d18383] => false + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/i947.scala b/tests/patmat/i947.scala new file mode 100644 index 000000000..0f2d9e775 --- /dev/null +++ b/tests/patmat/i947.scala @@ -0,0 +1,16 @@ +object Test { + + class c { + + private var x: Int = 0 + + override def equals(other: Any) = other match { + case o: c => x == o.x + case xs: List[c] => false + case ys: List[d18383] => false + case _ => false + } + + + } +} diff --git a/tests/patmat/outer-ref-checks.scala b/tests/patmat/outer-ref-checks.scala new file mode 100644 index 000000000..35983fe92 --- /dev/null +++ b/tests/patmat/outer-ref-checks.scala @@ -0,0 +1,106 @@ +import scala.annotation.unchecked.uncheckedVariance + +class Outer { + // A final class gets no outer ref, so we expect to see warnings where an outer ref check should be performed + final case class Inner(val s: String) // unchecked warning + + def belongs(a: Any): Unit = a match { + case Inner(s) => // unchecked warning + case _ => + } + + def belongsStaticSameOuter(a: Inner): Unit = a match { + case Inner(s) => // no need for outer check + // match is exhaustive, no default case needed + } + + def belongsOtherOuter(a: Outer#Inner): Unit = a match { + case Inner(s) => // unchecked warning + case O.Inner(s) => // unchecked warning + case _ => + } +} + +object O extends Outer { + def belongsStaticSameOuter2(a: Inner): Unit = a match { + case Inner(s) => // no need for outer check + // match is exhaustive, no default case needed + } + + def belongsStaticSameOuter3(a: Inner): Unit = a match { + case _: Inner => // no need for outer check + // match is exhaustive, no default case needed + } + + def belongsStaticSameOuter4(a: Inner): Unit = a match { + case _: (Inner @uncheckedVariance) => // no need for outer check + // match is exhaustive, no default case needed + } + + def belongsOtherOuter2(a: Outer#Inner): Unit = a match { + case Inner(s) => // unchecked warning + case _ => + } + + def belongsOtherOuter3(a: Outer#Inner): Unit = a match { + case _: Inner => // unchecked warning + case _ => + } + + def belongsOtherOuter4(a: Outer#Inner): Unit = a match { + case _: (Inner @unchecked) => // warning supressed + case _ => + } + + def belongsOtherOuter5(a: Outer#Inner): Unit = a match { + case _: (Inner @uncheckedVariance) => // unchecked warning + case _ => + } + + def nested: Unit = { + final case class I(s: String) + + def check1(a: Any): Unit = a match { + case I(s) => // no need for outer check + case _ => + } + + def check2(a: I): Unit = a match { + case I(s) => // no need for outer check + // match is exhaustive, no default case needed + } + } +} + +class O2 { + def nested: Unit = { + final case class I(s: String) + + def check1(a: Any): Unit = a match { + case I(s) => // no need for outer check (is this correct?) + case _ => + } + + def check2(a: I): Unit = a match { + case I(s) => // no need for outer check (is this correct?) + // match is exhaustive, no default case needed + } + } +} + +package p { + object T { + case class C(x: Int) + } +} + +object U { + val T = p.T +} + +class Test { + def m(a: Any) = a match { + case U.T.C(1) => 1 // used to warn + case _ => 1 + } +} diff --git a/tests/patmat/partial-function.scala b/tests/patmat/partial-function.scala new file mode 100644 index 000000000..f168489da --- /dev/null +++ b/tests/patmat/partial-function.scala @@ -0,0 +1,12 @@ +sealed abstract class TA +sealed abstract class TB extends TA +case object B extends TB +case object B2 extends TB + +case class CC(i: Int, tb: TB) + +object Test { + def foo: PartialFunction[CC, Unit] = { + case CC(_, B) => () + } +}
\ No newline at end of file diff --git a/tests/patmat/patmat-adt.check b/tests/patmat/patmat-adt.check new file mode 100644 index 000000000..f4e1ce369 --- /dev/null +++ b/tests/patmat/patmat-adt.check @@ -0,0 +1,21 @@ +./tests/patmat/patmat-adt.scala:7: warning: match may not be exhaustive. +It would fail on the following input: Bad(Good(_)), Good(Bad(_)) + def foo1a(x: Odd) = x match { // warning: Good(_: Bad), Bad(_: Good) + ^ +./tests/patmat/patmat-adt.scala:19: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def foo2(x: Option[Int]) = x match { // warning: Some(_: Int) + ^ +./tests/patmat/patmat-adt.scala:24: warning: match may not be exhaustive. +It would fail on the following input: (None, Some(_)), (_, Some(_)) + def foo3a[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, Some(_)) + ^ +./tests/patmat/patmat-adt.scala:29: warning: match may not be exhaustive. +It would fail on the following input: (None, None), (Some(_), Some(_)) + def foo3b[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, None) + ^ +./tests/patmat/patmat-adt.scala:50: warning: match may not be exhaustive. +It would fail on the following input: LetL(BooleanLit), LetL(IntLit) + def foo5(tree: Tree) : Any = tree match { + ^ +5 warnings found
\ No newline at end of file diff --git a/tests/patmat/patmat-adt.scala b/tests/patmat/patmat-adt.scala new file mode 100644 index 000000000..e7eac4e4a --- /dev/null +++ b/tests/patmat/patmat-adt.scala @@ -0,0 +1,58 @@ +object PatmatADT { + abstract sealed class Odd(x: Odd) + + case class Good(x: Odd) extends Odd(x) + case class Bad(x: Odd) extends Odd(x) + + def foo1a(x: Odd) = x match { // warning: Good(_: Bad), Bad(_: Good) + case Good(_: Good) => false + case Bad(_: Bad) => false + } + + def foo1b(x: Odd) = x match { + case Good(_: Good) => false + case Bad(_: Bad) => false + case Good(_: Bad) => false + case Bad(_: Good) => false + } + + def foo2(x: Option[Int]) = x match { // warning: Some(_: Int) + case Some(_: Double) => true + case None => true + } + + def foo3a[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, Some(_)) + case (Some(_), None) => true + case (None, None) => true + } + + def foo3b[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, None) + case (Some(_), None) => true + case (None, Some(_)) => true + } + + sealed trait Base + case class Foo() extends Base + + def foo4(x: Base) = x match { + case Foo() => + } + + sealed abstract class CL3Literal + case object IntLit extends CL3Literal + case object CharLit extends CL3Literal + case object BooleanLit extends CL3Literal + + + sealed abstract class Tree + case class LetL(value: CL3Literal) extends Tree + + def foo5(tree: Tree) : Any = tree match { + case LetL(CharLit) => + } + + def foo6[T](l: List[T]): Boolean = l match { + case x::xs => true + case Nil => false + } +}
\ No newline at end of file diff --git a/tests/patmat/patmat-extractor.scala b/tests/patmat/patmat-extractor.scala new file mode 100644 index 000000000..02fde96dc --- /dev/null +++ b/tests/patmat/patmat-extractor.scala @@ -0,0 +1,17 @@ +sealed trait Node +case class NodeA(i: Int) extends Node +case class NodeB(b: Boolean) extends Node +case class NodeC(s: String) extends Node + +object Node { + def unapply(node: Node): Option[(Node, Node)] = ??? +} + +// currently scalac can't do anything with following +// it's possible to do better in our case +object Test { + def foo(x: Node): Boolean = x match { // unexhaustive + case Node(NodeA(_), NodeB(_)) => true + case Node(NodeA(4), NodeB(false)) => true // unreachable code + } +}
\ No newline at end of file diff --git a/tests/patmat/patmat-indent.check b/tests/patmat/patmat-indent.check new file mode 100644 index 000000000..3a76e0a95 --- /dev/null +++ b/tests/patmat/patmat-indent.check @@ -0,0 +1,13 @@ +./tests/patmat/patmat-indent.scala:9: warning: match may not be exhaustive. +It would fail on the following input: Nil + def foo1a[T](l: List[T]) = l match { + ^ +./tests/patmat/patmat-indent.scala:23: warning: match may not be exhaustive. +It would fail on the following input: _: Boolean + def foo2(b: Boolean) = b match { + ^ +./tests/patmat/patmat-indent.scala:27: warning: match may not be exhaustive. +It would fail on the following input: _: Int + def foo3(x: Int) = x match { + ^ +three warnings found
\ No newline at end of file diff --git a/tests/patmat/patmat-indent.scala b/tests/patmat/patmat-indent.scala new file mode 100644 index 000000000..ef25bb2c7 --- /dev/null +++ b/tests/patmat/patmat-indent.scala @@ -0,0 +1,30 @@ +object Test { + val Nil = scala.Nil + val X = 5 + + object Inner { + val Y = false + } + + def foo1a[T](l: List[T]) = l match { + case x::xs => false + } + + def foo1b[T](l: List[T]) = l match { + case Nil => true + case x::xs => false + } + + def foo1c[T](l: List[T]) = l match { + case Test.Nil => true + case x::xs => false + } + + def foo2(b: Boolean) = b match { + case Inner.Y => false + } + + def foo3(x: Int) = x match { + case X => 0 + } +}
\ No newline at end of file diff --git a/tests/patmat/patmat-ortype.check b/tests/patmat/patmat-ortype.check new file mode 100644 index 000000000..2291da251 --- /dev/null +++ b/tests/patmat/patmat-ortype.check @@ -0,0 +1,13 @@ +./tests/patmat/patmat-ortype.scala:8: warning: match may not be exhaustive. +It would fail on the following input: _: String + def foo2a(x: Int | Double | String) = x match { // _: String not matched + ^ +./tests/patmat/patmat-ortype.scala:18: warning: match may not be exhaustive. +It would fail on the following input: Some(_: String), None + def foo3(x: Option[Int | Double | String]) = x match { // warning: None, Some(_: String) not matched + ^ +./tests/patmat/patmat-ortype.scala:36: warning: match may not be exhaustive. +It would fail on the following input: Some(_: String) + def foo5b(x: Option[Int | Double | String]) = x match { // warning: Some(_: String) not matched + ^ +three warnings found
\ No newline at end of file diff --git a/tests/patmat/patmat-ortype.scala b/tests/patmat/patmat-ortype.scala new file mode 100644 index 000000000..c7419acd3 --- /dev/null +++ b/tests/patmat/patmat-ortype.scala @@ -0,0 +1,40 @@ +object PatmatOrType { + + def foo1(x: Int | Double) = x match { + case _: Int => true + case _: Double => true + } + + def foo2a(x: Int | Double | String) = x match { // _: String not matched + case _: Int => true + case _: Double => true + } + + def foo2b(x: Int | Double | String) = x match { + case _: Int => true + case _: (Double | String) => true + } + + def foo3(x: Option[Int | Double | String]) = x match { // warning: None, Some(_: String) not matched + case Some(_: Int) => true + case Some(_: Double) => true + } + + def foo4(x: Option[Int | Double | String]) = x match { + case Some(_: Int) => true + case Some(_: Double) => true + case Some(_: String) => true + case None => false + } + + def foo5a(x: Option[Int | Double | String]) = x match { + case Some(_: (Int | Double)) => true + case Some(_: String) => true + case None => false + } + + def foo5b(x: Option[Int | Double | String]) = x match { // warning: Some(_: String) not matched + case Some(_: (Int | Double)) => true + case None => false + } +}
\ No newline at end of file diff --git a/tests/patmat/patmatexhaust-huge.check b/tests/patmat/patmatexhaust-huge.check new file mode 100644 index 000000000..06cac90bd --- /dev/null +++ b/tests/patmat/patmatexhaust-huge.check @@ -0,0 +1,5 @@ +./tests/patmat/patmatexhaust-huge.scala:404: warning: match may not be exhaustive. +It would fail on the following input: C397, C392 + def f(c: C): Int = c match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/patmatexhaust-huge.scala b/tests/patmat/patmatexhaust-huge.scala new file mode 100644 index 000000000..c4008b995 --- /dev/null +++ b/tests/patmat/patmatexhaust-huge.scala @@ -0,0 +1,806 @@ +abstract sealed trait C +case object C1 extends C +case object C2 extends C +case object C3 extends C +case object C4 extends C +case object C5 extends C +case object C6 extends C +case object C7 extends C +case object C8 extends C +case object C9 extends C +case object C10 extends C +case object C11 extends C +case object C12 extends C +case object C13 extends C +case object C14 extends C +case object C15 extends C +case object C16 extends C +case object C17 extends C +case object C18 extends C +case object C19 extends C +case object C20 extends C +case object C21 extends C +case object C22 extends C +case object C23 extends C +case object C24 extends C +case object C25 extends C +case object C26 extends C +case object C27 extends C +case object C28 extends C +case object C29 extends C +case object C30 extends C +case object C31 extends C +case object C32 extends C +case object C33 extends C +case object C34 extends C +case object C35 extends C +case object C36 extends C +case object C37 extends C +case object C38 extends C +case object C39 extends C +case object C40 extends C +case object C41 extends C +case object C42 extends C +case object C43 extends C +case object C44 extends C +case object C45 extends C +case object C46 extends C +case object C47 extends C +case object C48 extends C +case object C49 extends C +case object C50 extends C +case object C51 extends C +case object C52 extends C +case object C53 extends C +case object C54 extends C +case object C55 extends C +case object C56 extends C +case object C57 extends C +case object C58 extends C +case object C59 extends C +case object C60 extends C +case object C61 extends C +case object C62 extends C +case object C63 extends C +case object C64 extends C +case object C65 extends C +case object C66 extends C +case object C67 extends C +case object C68 extends C +case object C69 extends C +case object C70 extends C +case object C71 extends C +case object C72 extends C +case object C73 extends C +case object C74 extends C +case object C75 extends C +case object C76 extends C +case object C77 extends C +case object C78 extends C +case object C79 extends C +case object C80 extends C +case object C81 extends C +case object C82 extends C +case object C83 extends C +case object C84 extends C +case object C85 extends C +case object C86 extends C +case object C87 extends C +case object C88 extends C +case object C89 extends C +case object C90 extends C +case object C91 extends C +case object C92 extends C +case object C93 extends C +case object C94 extends C +case object C95 extends C +case object C96 extends C +case object C97 extends C +case object C98 extends C +case object C99 extends C +case object C100 extends C +case object C101 extends C +case object C102 extends C +case object C103 extends C +case object C104 extends C +case object C105 extends C +case object C106 extends C +case object C107 extends C +case object C108 extends C +case object C109 extends C +case object C110 extends C +case object C111 extends C +case object C112 extends C +case object C113 extends C +case object C114 extends C +case object C115 extends C +case object C116 extends C +case object C117 extends C +case object C118 extends C +case object C119 extends C +case object C120 extends C +case object C121 extends C +case object C122 extends C +case object C123 extends C +case object C124 extends C +case object C125 extends C +case object C126 extends C +case object C127 extends C +case object C128 extends C +case object C129 extends C +case object C130 extends C +case object C131 extends C +case object C132 extends C +case object C133 extends C +case object C134 extends C +case object C135 extends C +case object C136 extends C +case object C137 extends C +case object C138 extends C +case object C139 extends C +case object C140 extends C +case object C141 extends C +case object C142 extends C +case object C143 extends C +case object C144 extends C +case object C145 extends C +case object C146 extends C +case object C147 extends C +case object C148 extends C +case object C149 extends C +case object C150 extends C +case object C151 extends C +case object C152 extends C +case object C153 extends C +case object C154 extends C +case object C155 extends C +case object C156 extends C +case object C157 extends C +case object C158 extends C +case object C159 extends C +case object C160 extends C +case object C161 extends C +case object C162 extends C +case object C163 extends C +case object C164 extends C +case object C165 extends C +case object C166 extends C +case object C167 extends C +case object C168 extends C +case object C169 extends C +case object C170 extends C +case object C171 extends C +case object C172 extends C +case object C173 extends C +case object C174 extends C +case object C175 extends C +case object C176 extends C +case object C177 extends C +case object C178 extends C +case object C179 extends C +case object C180 extends C +case object C181 extends C +case object C182 extends C +case object C183 extends C +case object C184 extends C +case object C185 extends C +case object C186 extends C +case object C187 extends C +case object C188 extends C +case object C189 extends C +case object C190 extends C +case object C191 extends C +case object C192 extends C +case object C193 extends C +case object C194 extends C +case object C195 extends C +case object C196 extends C +case object C197 extends C +case object C198 extends C +case object C199 extends C +case object C200 extends C +case object C201 extends C +case object C202 extends C +case object C203 extends C +case object C204 extends C +case object C205 extends C +case object C206 extends C +case object C207 extends C +case object C208 extends C +case object C209 extends C +case object C210 extends C +case object C211 extends C +case object C212 extends C +case object C213 extends C +case object C214 extends C +case object C215 extends C +case object C216 extends C +case object C217 extends C +case object C218 extends C +case object C219 extends C +case object C220 extends C +case object C221 extends C +case object C222 extends C +case object C223 extends C +case object C224 extends C +case object C225 extends C +case object C226 extends C +case object C227 extends C +case object C228 extends C +case object C229 extends C +case object C230 extends C +case object C231 extends C +case object C232 extends C +case object C233 extends C +case object C234 extends C +case object C235 extends C +case object C236 extends C +case object C237 extends C +case object C238 extends C +case object C239 extends C +case object C240 extends C +case object C241 extends C +case object C242 extends C +case object C243 extends C +case object C244 extends C +case object C245 extends C +case object C246 extends C +case object C247 extends C +case object C248 extends C +case object C249 extends C +case object C250 extends C +case object C251 extends C +case object C252 extends C +case object C253 extends C +case object C254 extends C +case object C255 extends C +case object C256 extends C +case object C257 extends C +case object C258 extends C +case object C259 extends C +case object C260 extends C +case object C261 extends C +case object C262 extends C +case object C263 extends C +case object C264 extends C +case object C265 extends C +case object C266 extends C +case object C267 extends C +case object C268 extends C +case object C269 extends C +case object C270 extends C +case object C271 extends C +case object C272 extends C +case object C273 extends C +case object C274 extends C +case object C275 extends C +case object C276 extends C +case object C277 extends C +case object C278 extends C +case object C279 extends C +case object C280 extends C +case object C281 extends C +case object C282 extends C +case object C283 extends C +case object C284 extends C +case object C285 extends C +case object C286 extends C +case object C287 extends C +case object C288 extends C +case object C289 extends C +case object C290 extends C +case object C291 extends C +case object C292 extends C +case object C293 extends C +case object C294 extends C +case object C295 extends C +case object C296 extends C +case object C297 extends C +case object C298 extends C +case object C299 extends C +case object C300 extends C +case object C301 extends C +case object C302 extends C +case object C303 extends C +case object C304 extends C +case object C305 extends C +case object C306 extends C +case object C307 extends C +case object C308 extends C +case object C309 extends C +case object C310 extends C +case object C311 extends C +case object C312 extends C +case object C313 extends C +case object C314 extends C +case object C315 extends C +case object C316 extends C +case object C317 extends C +case object C318 extends C +case object C319 extends C +case object C320 extends C +case object C321 extends C +case object C322 extends C +case object C323 extends C +case object C324 extends C +case object C325 extends C +case object C326 extends C +case object C327 extends C +case object C328 extends C +case object C329 extends C +case object C330 extends C +case object C331 extends C +case object C332 extends C +case object C333 extends C +case object C334 extends C +case object C335 extends C +case object C336 extends C +case object C337 extends C +case object C338 extends C +case object C339 extends C +case object C340 extends C +case object C341 extends C +case object C342 extends C +case object C343 extends C +case object C344 extends C +case object C345 extends C +case object C346 extends C +case object C347 extends C +case object C348 extends C +case object C349 extends C +case object C350 extends C +case object C351 extends C +case object C352 extends C +case object C353 extends C +case object C354 extends C +case object C355 extends C +case object C356 extends C +case object C357 extends C +case object C358 extends C +case object C359 extends C +case object C360 extends C +case object C361 extends C +case object C362 extends C +case object C363 extends C +case object C364 extends C +case object C365 extends C +case object C366 extends C +case object C367 extends C +case object C368 extends C +case object C369 extends C +case object C370 extends C +case object C371 extends C +case object C372 extends C +case object C373 extends C +case object C374 extends C +case object C375 extends C +case object C376 extends C +case object C377 extends C +case object C378 extends C +case object C379 extends C +case object C380 extends C +case object C381 extends C +case object C382 extends C +case object C383 extends C +case object C384 extends C +case object C385 extends C +case object C386 extends C +case object C387 extends C +case object C388 extends C +case object C389 extends C +case object C390 extends C +case object C391 extends C +case object C392 extends C +case object C393 extends C +case object C394 extends C +case object C395 extends C +case object C396 extends C +case object C397 extends C +case object C398 extends C +case object C399 extends C +case object C400 extends C + +object M { + def f(c: C): Int = c match { + case C1 => 1 + case C2 => 2 + case C3 => 3 + case C4 => 4 + case C5 => 5 + case C6 => 6 + case C7 => 7 + case C8 => 8 + case C9 => 9 + case C10 => 10 + case C11 => 11 + case C12 => 12 + case C13 => 13 + case C14 => 14 + case C15 => 15 + case C16 => 16 + case C17 => 17 + case C18 => 18 + case C19 => 19 + case C20 => 20 + case C21 => 21 + case C22 => 22 + case C23 => 23 + case C24 => 24 + case C25 => 25 + case C26 => 26 + case C27 => 27 + case C28 => 28 + case C29 => 29 + case C30 => 30 + case C31 => 31 + case C32 => 32 + case C33 => 33 + case C34 => 34 + case C35 => 35 + case C36 => 36 + case C37 => 37 + case C38 => 38 + case C39 => 39 + case C40 => 40 + case C41 => 41 + case C42 => 42 + case C43 => 43 + case C44 => 44 + case C45 => 45 + case C46 => 46 + case C47 => 47 + case C48 => 48 + case C49 => 49 + case C50 => 50 + case C51 => 51 + case C52 => 52 + case C53 => 53 + case C54 => 54 + case C55 => 55 + case C56 => 56 + case C57 => 57 + case C58 => 58 + case C59 => 59 + case C60 => 60 + case C61 => 61 + case C62 => 62 + case C63 => 63 + case C64 => 64 + case C65 => 65 + case C66 => 66 + case C67 => 67 + case C68 => 68 + case C69 => 69 + case C70 => 70 + case C71 => 71 + case C72 => 72 + case C73 => 73 + case C74 => 74 + case C75 => 75 + case C76 => 76 + case C77 => 77 + case C78 => 78 + case C79 => 79 + case C80 => 80 + case C81 => 81 + case C82 => 82 + case C83 => 83 + case C84 => 84 + case C85 => 85 + case C86 => 86 + case C87 => 87 + case C88 => 88 + case C89 => 89 + case C90 => 90 + case C91 => 91 + case C92 => 92 + case C93 => 93 + case C94 => 94 + case C95 => 95 + case C96 => 96 + case C97 => 97 + case C98 => 98 + case C99 => 99 + case C100 => 100 + case C101 => 101 + case C102 => 102 + case C103 => 103 + case C104 => 104 + case C105 => 105 + case C106 => 106 + case C107 => 107 + case C108 => 108 + case C109 => 109 + case C110 => 110 + case C111 => 111 + case C112 => 112 + case C113 => 113 + case C114 => 114 + case C115 => 115 + case C116 => 116 + case C117 => 117 + case C118 => 118 + case C119 => 119 + case C120 => 120 + case C121 => 121 + case C122 => 122 + case C123 => 123 + case C124 => 124 + case C125 => 125 + case C126 => 126 + case C127 => 127 + case C128 => 128 + case C129 => 129 + case C130 => 130 + case C131 => 131 + case C132 => 132 + case C133 => 133 + case C134 => 134 + case C135 => 135 + case C136 => 136 + case C137 => 137 + case C138 => 138 + case C139 => 139 + case C140 => 140 + case C141 => 141 + case C142 => 142 + case C143 => 143 + case C144 => 144 + case C145 => 145 + case C146 => 146 + case C147 => 147 + case C148 => 148 + case C149 => 149 + case C150 => 150 + case C151 => 151 + case C152 => 152 + case C153 => 153 + case C154 => 154 + case C155 => 155 + case C156 => 156 + case C157 => 157 + case C158 => 158 + case C159 => 159 + case C160 => 160 + case C161 => 161 + case C162 => 162 + case C163 => 163 + case C164 => 164 + case C165 => 165 + case C166 => 166 + case C167 => 167 + case C168 => 168 + case C169 => 169 + case C170 => 170 + case C171 => 171 + case C172 => 172 + case C173 => 173 + case C174 => 174 + case C175 => 175 + case C176 => 176 + case C177 => 177 + case C178 => 178 + case C179 => 179 + case C180 => 180 + case C181 => 181 + case C182 => 182 + case C183 => 183 + case C184 => 184 + case C185 => 185 + case C186 => 186 + case C187 => 187 + case C188 => 188 + case C189 => 189 + case C190 => 190 + case C191 => 191 + case C192 => 192 + case C193 => 193 + case C194 => 194 + case C195 => 195 + case C196 => 196 + case C197 => 197 + case C198 => 198 + case C199 => 199 + case C200 => 200 + case C201 => 201 + case C202 => 202 + case C203 => 203 + case C204 => 204 + case C205 => 205 + case C206 => 206 + case C207 => 207 + case C208 => 208 + case C209 => 209 + case C210 => 210 + case C211 => 211 + case C212 => 212 + case C213 => 213 + case C214 => 214 + case C215 => 215 + case C216 => 216 + case C217 => 217 + case C218 => 218 + case C219 => 219 + case C220 => 220 + case C221 => 221 + case C222 => 222 + case C223 => 223 + case C224 => 224 + case C225 => 225 + case C226 => 226 + case C227 => 227 + case C228 => 228 + case C229 => 229 + case C230 => 230 + case C231 => 231 + case C232 => 232 + case C233 => 233 + case C234 => 234 + case C235 => 235 + case C236 => 236 + case C237 => 237 + case C238 => 238 + case C239 => 239 + case C240 => 240 + case C241 => 241 + case C242 => 242 + case C243 => 243 + case C244 => 244 + case C245 => 245 + case C246 => 246 + case C247 => 247 + case C248 => 248 + case C249 => 249 + case C250 => 250 + case C251 => 251 + case C252 => 252 + case C253 => 253 + case C254 => 254 + case C255 => 255 + case C256 => 256 + case C257 => 257 + case C258 => 258 + case C259 => 259 + case C260 => 260 + case C261 => 261 + case C262 => 262 + case C263 => 263 + case C264 => 264 + case C265 => 265 + case C266 => 266 + case C267 => 267 + case C268 => 268 + case C269 => 269 + case C270 => 270 + case C271 => 271 + case C272 => 272 + case C273 => 273 + case C274 => 274 + case C275 => 275 + case C276 => 276 + case C277 => 277 + case C278 => 278 + case C279 => 279 + case C280 => 280 + case C281 => 281 + case C282 => 282 + case C283 => 283 + case C284 => 284 + case C285 => 285 + case C286 => 286 + case C287 => 287 + case C288 => 288 + case C289 => 289 + case C290 => 290 + case C291 => 291 + case C292 => 292 + case C293 => 293 + case C294 => 294 + case C295 => 295 + case C296 => 296 + case C297 => 297 + case C298 => 298 + case C299 => 299 + case C300 => 300 + case C301 => 301 + case C302 => 302 + case C303 => 303 + case C304 => 304 + case C305 => 305 + case C306 => 306 + case C307 => 307 + case C308 => 308 + case C309 => 309 + case C310 => 310 + case C311 => 311 + case C312 => 312 + case C313 => 313 + case C314 => 314 + case C315 => 315 + case C316 => 316 + case C317 => 317 + case C318 => 318 + case C319 => 319 + case C320 => 320 + case C321 => 321 + case C322 => 322 + case C323 => 323 + case C324 => 324 + case C325 => 325 + case C326 => 326 + case C327 => 327 + case C328 => 328 + case C329 => 329 + case C330 => 330 + case C331 => 331 + case C332 => 332 + case C333 => 333 + case C334 => 334 + case C335 => 335 + case C336 => 336 + case C337 => 337 + case C338 => 338 + case C339 => 339 + case C340 => 340 + case C341 => 341 + case C342 => 342 + case C343 => 343 + case C344 => 344 + case C345 => 345 + case C346 => 346 + case C347 => 347 + case C348 => 348 + case C349 => 349 + case C350 => 350 + case C351 => 351 + case C352 => 352 + case C353 => 353 + case C354 => 354 + case C355 => 355 + case C356 => 356 + case C357 => 357 + case C358 => 358 + case C359 => 359 + case C360 => 360 + case C361 => 361 + case C362 => 362 + case C363 => 363 + case C364 => 364 + case C365 => 365 + case C366 => 366 + case C367 => 367 + case C368 => 368 + case C369 => 369 + case C370 => 370 + case C371 => 371 + case C372 => 372 + case C373 => 373 + case C374 => 374 + case C375 => 375 + case C376 => 376 + case C377 => 377 + case C378 => 378 + case C379 => 379 + case C380 => 380 + case C381 => 381 + case C382 => 382 + case C383 => 383 + case C384 => 384 + case C385 => 385 + case C386 => 386 + case C387 => 387 + case C388 => 388 + case C389 => 389 + case C390 => 390 + case C391 => 391 +// case C392 => 392 + case C393 => 393 + case C394 => 394 + case C395 => 395 + case C396 => 396 +// case C397 => 397 + case C398 => 398 + case C399 => 399 + case C400 => 400 + } +} diff --git a/tests/patmat/patmatexhaust.check b/tests/patmat/patmatexhaust.check new file mode 100644 index 000000000..ef2b578d6 --- /dev/null +++ b/tests/patmat/patmatexhaust.check @@ -0,0 +1,33 @@ +./tests/patmat/patmatexhaust.scala:7: warning: match may not be exhaustive. +It would fail on the following input: Baz + def ma1(x:Foo) = x match { + ^ +./tests/patmat/patmatexhaust.scala:11: warning: match may not be exhaustive. +It would fail on the following input: Bar(_) + def ma2(x:Foo) = x match { + ^ +./tests/patmat/patmatexhaust.scala:23: warning: match may not be exhaustive. +It would fail on the following input: (Qult(), Qult()), (Kult(_), Kult(_)) + def ma3(x:Mult) = (x,x) match { // not exhaustive + ^ +./tests/patmat/patmatexhaust.scala:49: warning: match may not be exhaustive. +It would fail on the following input: _: Gp + def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included + ^ +./tests/patmat/patmatexhaust.scala:75: warning: match may not be exhaustive. +It would fail on the following input: _: B + def ma9(x: B) = x match { + ^ +./tests/patmat/patmatexhaust.scala:100: warning: match may not be exhaustive. +It would fail on the following input: _: C1 + def ma10(x: C) = x match { // not exhaustive: C1 is not sealed. + ^ +./tests/patmat/patmatexhaust.scala:114: warning: match may not be exhaustive. +It would fail on the following input: D2(), D1 + def ma10(x: C) = x match { // not exhaustive: C1 has subclasses. + ^ +./tests/patmat/patmatexhaust.scala:126: warning: match may not be exhaustive. +It would fail on the following input: _: C1 + def ma10(x: C) = x match { // not exhaustive: C1 is not abstract. + ^ +8 warnings found
\ No newline at end of file diff --git a/tests/patmat/patmatexhaust.scala b/tests/patmat/patmatexhaust.scala new file mode 100644 index 000000000..26f0c12a9 --- /dev/null +++ b/tests/patmat/patmatexhaust.scala @@ -0,0 +1,131 @@ +class TestSealedExhaustive { // compile only + sealed abstract class Foo + + case class Bar(x:Int) extends Foo + case object Baz extends Foo + + def ma1(x:Foo) = x match { + case Bar(_) => // not exhaustive + } + + def ma2(x:Foo) = x match { + case Baz => // not exhaustive + } + + sealed abstract class Mult + case class Kult(s:Mult) extends Mult + case class Qult() extends Mult + + def ma33(x:Kult) = x match { // exhaustive + case Kult(_) => // exhaustive + } + + def ma3(x:Mult) = (x,x) match { // not exhaustive + case (Kult(_), Qult()) => // Kult missing + //case (Kult(_), Kult(_)) => + case (Qult(), Kult(_)) => // Qult missing + //case (Qult(), Qult()) => + } + + def ma3u(x:Mult) = ((x,x) : @unchecked) match { // not exhaustive, but not checked! + case (Kult(_), Qult()) => + case (Qult(), Kult(_)) => + } + + sealed abstract class Deep + + case object Ga extends Deep + sealed class Gp extends Deep + case object Gu extends Gp + + def zma3(x:Deep) = x match { // exhaustive! + case _ => + } + def zma4(x:Deep) = x match { // exhaustive! + case Ga => + case _ => + } + + def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included + case Ga => + } + + def ma5(x:Deep) = x match { + case Gu => + case _ if 1 == 0 => + case Ga => + } + + def ma6() = List(1,2) match { // give up + case List(1,2) => + case x :: xs => + } + + def ma7() = List(1,2) match { //exhaustive + case 1::2::Nil => + case _ => + } + + sealed class B + case class B1() extends B + case object B2 extends B + def ma8(x: B) = x match { + case _: B => true + } + def ma9(x: B) = x match { + case B1() => true // missing B, which is not abstract so must be included + case B2 => true + } + + object ob1 { + sealed abstract class C + sealed abstract class C1 extends C + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // exhaustive: abstract sealed C1 is dead end. + case C3() => true + case C2 | C4 => true + } + } + + object ob2 { + sealed abstract class C + abstract class C1 extends C + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // not exhaustive: C1 is not sealed. + case C3() => true + case C2 | C4 => true + } + } + object ob3 { + sealed abstract class C + sealed abstract class C1 extends C + object D1 extends C1 + case class D2() extends C1 + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // not exhaustive: C1 has subclasses. + case C3() => true + case C2 | C4 => true + } + } + object ob4 { + sealed abstract class C + sealed class C1 extends C + object C2 extends C + case class C3() extends C + case object C4 extends C + + def ma10(x: C) = x match { // not exhaustive: C1 is not abstract. + case C3() => true + case C2 | C4 => true + } + } +} diff --git a/tests/patmat/sealed-java-enums.check b/tests/patmat/sealed-java-enums.check new file mode 100644 index 000000000..ed93d3d40 --- /dev/null +++ b/tests/patmat/sealed-java-enums.check @@ -0,0 +1,5 @@ +./tests/patmat/sealed-java-enums.scala:5: warning: match may not be exhaustive. +It would fail on the following input: TERMINATED, TIMED_WAITING, BLOCKED + def f(state: State) = state match { + ^ +one warning found diff --git a/tests/patmat/sealed-java-enums.scala b/tests/patmat/sealed-java-enums.scala new file mode 100644 index 000000000..2daf93f30 --- /dev/null +++ b/tests/patmat/sealed-java-enums.scala @@ -0,0 +1,10 @@ +import java.lang.Thread.State +import java.lang.Thread.State._ + +object Test { + def f(state: State) = state match { + case NEW | WAITING => true + case RUNNABLE => false + // and I forget the rest + } +} diff --git a/tests/patmat/t1056.scala b/tests/patmat/t1056.scala new file mode 100644 index 000000000..68f1ff273 --- /dev/null +++ b/tests/patmat/t1056.scala @@ -0,0 +1,5 @@ +object Test { + type T = PartialFunction[String,String] + def g(h: T) = () + g({case s: String => s}) +} diff --git a/tests/patmat/t2425.scala b/tests/patmat/t2425.scala new file mode 100644 index 000000000..477d5467a --- /dev/null +++ b/tests/patmat/t2425.scala @@ -0,0 +1,15 @@ +trait B +class D extends B +object Test extends App { + def foo[T](bar: T) = { + bar match { + case _: Array[Array[_]] => println("array 2d") + case _: Array[_] => println("array 1d") + case _ => println("something else") + } + } + foo(Array.fill(10)(2)) + foo(Array.fill(10, 10)(2)) + foo(Array.fill(10, 10, 10)(2)) + foo(List(1, 2, 3)) +} diff --git a/tests/patmat/t2442/MyEnum.java b/tests/patmat/t2442/MyEnum.java new file mode 100644 index 000000000..3ffbbb31b --- /dev/null +++ b/tests/patmat/t2442/MyEnum.java @@ -0,0 +1,3 @@ +public enum MyEnum { + ONE, TWO, THREE; +}
\ No newline at end of file diff --git a/tests/patmat/t2442/MySecondEnum.java b/tests/patmat/t2442/MySecondEnum.java new file mode 100644 index 000000000..0f841286d --- /dev/null +++ b/tests/patmat/t2442/MySecondEnum.java @@ -0,0 +1,6 @@ +public enum MySecondEnum { + RED(1), BLUE(2) { public void foo() {} }; + MySecondEnum(int i) {} + + public void foo() {} +}
\ No newline at end of file diff --git a/tests/patmat/t2442/expected.check b/tests/patmat/t2442/expected.check new file mode 100644 index 000000000..33110ce43 --- /dev/null +++ b/tests/patmat/t2442/expected.check @@ -0,0 +1,9 @@ +./tests/patmat/t2442/t2442.scala:4: warning: match may not be exhaustive. +It would fail on the following input: THREE + def f(e: MyEnum) = e match { + ^ +./tests/patmat/t2442/t2442.scala:11: warning: match may not be exhaustive. +It would fail on the following input: BLUE + def g(e: MySecondEnum) = e match { + ^ +two warnings found diff --git a/tests/patmat/t2442/t2442.scala b/tests/patmat/t2442/t2442.scala new file mode 100644 index 000000000..b0a0f3cd4 --- /dev/null +++ b/tests/patmat/t2442/t2442.scala @@ -0,0 +1,15 @@ +class Test { + import MyEnum._ + + def f(e: MyEnum) = e match { + case ONE => println("one") + case TWO => println("two") + // missing case --> exhaustivity warning! + } + + import MySecondEnum._ + def g(e: MySecondEnum) = e match { + case RED => println("red") + // missing case --> exhaustivity warning! + } +}
\ No newline at end of file diff --git a/tests/patmat/t3097.scala b/tests/patmat/t3097.scala new file mode 100644 index 000000000..3ff61b3c7 --- /dev/null +++ b/tests/patmat/t3097.scala @@ -0,0 +1,35 @@ +sealed trait ISimpleValue + +sealed trait IListValue extends ISimpleValue { + def items: List[IAtomicValue[_]] +} + +sealed trait IAtomicValue[O] extends ISimpleValue { + def data: O +} + +sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] { +} + +sealed trait IDoubleValue extends IAbstractDoubleValue[Double] + +case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue + +class DoubleValue(val data: Double) extends IDoubleValue { + def asDouble = data +} + +object Test { + + /** + * @param args the command line arguments + */ + def main(args: Array[String]): Unit = { + val v: ISimpleValue = new DoubleValue(1) + v match { + case m: IListValue => println("list") + case a: IAtomicValue[_] => println("atomic") + } + + } +}
\ No newline at end of file diff --git a/tests/patmat/t3098/a.scala b/tests/patmat/t3098/a.scala new file mode 100644 index 000000000..57a103c7a --- /dev/null +++ b/tests/patmat/t3098/a.scala @@ -0,0 +1,6 @@ +// Traits.scala +sealed trait T + +trait A extends T +trait B extends T +trait C extends T diff --git a/tests/patmat/t3098/b.scala b/tests/patmat/t3098/b.scala new file mode 100644 index 000000000..84a1f9f6f --- /dev/null +++ b/tests/patmat/t3098/b.scala @@ -0,0 +1,8 @@ +// Test.scala +object Test { + def f = (null: T) match { + case _: A => println("A") + case _: B => println("B") + // no C + } +} diff --git a/tests/patmat/t3098/expected.check b/tests/patmat/t3098/expected.check new file mode 100644 index 000000000..331904111 --- /dev/null +++ b/tests/patmat/t3098/expected.check @@ -0,0 +1,5 @@ +./tests/patmat/t3098/b.scala:3: warning: match may not be exhaustive. +It would fail on the following input: _: C + def f = (null: T) match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t3111.check b/tests/patmat/t3111.check new file mode 100644 index 000000000..46ff0a6a9 --- /dev/null +++ b/tests/patmat/t3111.check @@ -0,0 +1,8 @@ +./tests/patmat/t3111.scala:4: warning: match may not be exhaustive. +It would fail on the following input: false + bool match { + ^ +./tests/patmat/t3111.scala:11: warning: unreachable code + case _ => "cats and dogs living together... mass hysteria!" + ^ +two warnings found
\ No newline at end of file diff --git a/tests/patmat/t3111.scala b/tests/patmat/t3111.scala new file mode 100644 index 000000000..8f2bc5a27 --- /dev/null +++ b/tests/patmat/t3111.scala @@ -0,0 +1,13 @@ +object Test { + val bool: Boolean = false + + bool match { + case true => "true!" + } + + bool match { + case true => "true!" + case false => "false!" + case _ => "cats and dogs living together... mass hysteria!" + } +}
\ No newline at end of file diff --git a/tests/patmat/t3163.check b/tests/patmat/t3163.check new file mode 100644 index 000000000..3da94e2c2 --- /dev/null +++ b/tests/patmat/t3163.check @@ -0,0 +1,5 @@ +./tests/patmat/t3163.scala:2: warning: match may not be exhaustive. +It would fail on the following input: _: AnyVal + def foo(x : AnyVal) = x match {case b : Boolean => "It's a bool"} + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t3163.scala b/tests/patmat/t3163.scala new file mode 100644 index 000000000..2e0f2c1d9 --- /dev/null +++ b/tests/patmat/t3163.scala @@ -0,0 +1,3 @@ +object Test { + def foo(x : AnyVal) = x match {case b : Boolean => "It's a bool"} +}
\ No newline at end of file diff --git a/tests/patmat/t3683.scala b/tests/patmat/t3683.scala new file mode 100644 index 000000000..44be9d6c6 --- /dev/null +++ b/tests/patmat/t3683.scala @@ -0,0 +1,19 @@ +sealed trait Foo +sealed trait Bar extends Foo +sealed trait W[T >: Bar <: Foo] +sealed case class X() extends W[Foo] +sealed case class Y() extends W[Bar] +sealed case class Z[T >: Bar <: Foo]( + z1: W[T] +) extends W[T] + +object Main { + def func(w: W[Bar]): Int = { + w match { + // Error if I include it, warning if I do not! + // case X() => 2 + case Y() => 1 + case Z(z) => func(z) + } + } +} diff --git a/tests/patmat/t3683a.check b/tests/patmat/t3683a.check new file mode 100644 index 000000000..df5e691c6 --- /dev/null +++ b/tests/patmat/t3683a.check @@ -0,0 +1,5 @@ +./tests/patmat/t3683a.scala:14: warning: match may not be exhaustive. +It would fail on the following input: XX() + w match { + ^ +one warning found diff --git a/tests/patmat/t3683a.scala b/tests/patmat/t3683a.scala new file mode 100644 index 000000000..6d1915213 --- /dev/null +++ b/tests/patmat/t3683a.scala @@ -0,0 +1,20 @@ +sealed trait Foo +sealed trait Bar extends Foo +sealed trait W[T >: Bar <: Foo] +case class X() extends W[Foo] +case class XX() extends W[Bar] +case class Y() extends W[Bar] +case class Z[T >: Bar <: Foo]( + z1: W[T] +) extends W[T] + +object Main { + // should warn for not including XX() + def f1(w: W[Bar]): Int = { + w match { + // case XX() => 2 + case Y() => 1 + case Z(z) => f1(z) + } + } +}
\ No newline at end of file diff --git a/tests/patmat/t4020.scala b/tests/patmat/t4020.scala new file mode 100644 index 000000000..f97646019 --- /dev/null +++ b/tests/patmat/t4020.scala @@ -0,0 +1,25 @@ +class A { + sealed trait Foo +} + +object a1 extends A { + case class Foo1(i: Int) extends Foo +} + +object a2 extends A { + case class Foo2(i: Int) extends Foo +} + +class B { + def mthd(foo: a2.Foo) = { + foo match { + case a2.Foo2(i) => i + + // Note: This case is impossible. In fact, scalac + // will (correctly) report an error if it is uncommented, + // but a warning if it is commented. + + // case a1.Foo1(i) => i + } + } +}
\ No newline at end of file diff --git a/tests/patmat/t4333.scala.ignore b/tests/patmat/t4333.scala.ignore new file mode 100644 index 000000000..07d105c74 --- /dev/null +++ b/tests/patmat/t4333.scala.ignore @@ -0,0 +1,7 @@ +object Enum extends Enumeration { val A, B, C = Value } + +object Test { + def foo(v : Enum.Value) = v match { + case Enum.B => println("B") + } +} diff --git a/tests/patmat/t4408.check b/tests/patmat/t4408.check new file mode 100644 index 000000000..53bfe1c2c --- /dev/null +++ b/tests/patmat/t4408.check @@ -0,0 +1,5 @@ +./tests/patmat/t4408.scala:2: warning: match may not be exhaustive. +It would fail on the following input: List(_, _, _) + def printList(in: List[String]): Unit = in match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t4408.scala b/tests/patmat/t4408.scala new file mode 100644 index 000000000..419b66369 --- /dev/null +++ b/tests/patmat/t4408.scala @@ -0,0 +1,16 @@ +object Test { + def printList(in: List[String]): Unit = in match { + case Nil => Unit + + case (s: String) :: Nil => + println(s) + + case head :: (s: String) :: Nil => + printList(head :: Nil) + for(i <- head){ + print(i) + } + println + println(s) + } +} diff --git a/tests/patmat/t4526.check b/tests/patmat/t4526.check new file mode 100644 index 000000000..b577cbc0c --- /dev/null +++ b/tests/patmat/t4526.check @@ -0,0 +1,13 @@ +./tests/patmat/t4526.scala:2: warning: match may not be exhaustive. +It would fail on the following input: _: Int + def foo(a: Int) = a match { + ^ +./tests/patmat/t4526.scala:7: warning: match may not be exhaustive. +It would fail on the following input: (_, _) + def bar(a: (Int, Int)) = a match { + ^ +./tests/patmat/t4526.scala:12: warning: match may not be exhaustive. +It would fail on the following input: (false, false), (true, true) + def baz(a: (Boolean, Boolean)) = a match { + ^ +three warnings found
\ No newline at end of file diff --git a/tests/patmat/t4526.scala b/tests/patmat/t4526.scala new file mode 100644 index 000000000..d531c6b34 --- /dev/null +++ b/tests/patmat/t4526.scala @@ -0,0 +1,16 @@ +object Test{ + def foo(a: Int) = a match { + case 5 => "Five!" + case 42 => "The answer." + } + + def bar(a: (Int, Int)) = a match { + case (5, 5) => "Two fives!" + case (42, 21) => "The answer and a half." + } + + def baz(a: (Boolean, Boolean)) = a match { + case (true, false) => "tf" + case (false, true) => "ft" + } +}
\ No newline at end of file diff --git a/tests/patmat/t4691.check b/tests/patmat/t4691.check new file mode 100644 index 000000000..4d2c24506 --- /dev/null +++ b/tests/patmat/t4691.check @@ -0,0 +1,5 @@ +./tests/patmat/t4691.scala:15: warning: match may not be exhaustive. +It would fail on the following input: NodeType2(_) + def test (x: Node) = x match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t4691.scala b/tests/patmat/t4691.scala new file mode 100644 index 000000000..bfaa61670 --- /dev/null +++ b/tests/patmat/t4691.scala @@ -0,0 +1,18 @@ +sealed trait Node + +class NodeType1 (val a:Int) extends Node +class NodeType2 (val b:Int) extends Node + +object NodeType1 { + def unapply (x : NodeType1) : Some[Int] = Some(x.a) +} + +object NodeType2 { + def unapply (x : NodeType2) : Some[Int] = Some(x.b) +} + +object Test { + def test (x: Node) = x match { + case NodeType1(a) => "got node type 1 " + a + } +}
\ No newline at end of file diff --git a/tests/patmat/t4691_exhaust_extractor.check b/tests/patmat/t4691_exhaust_extractor.check new file mode 100644 index 000000000..e7d1e17f9 --- /dev/null +++ b/tests/patmat/t4691_exhaust_extractor.check @@ -0,0 +1,13 @@ +./tests/patmat/t4691_exhaust_extractor.scala:17: warning: match may not be exhaustive. +It would fail on the following input: _: Bar3 + def f1(x: Foo) = x match { + ^ +./tests/patmat/t4691_exhaust_extractor.scala:23: warning: match may not be exhaustive. +It would fail on the following input: _: Bar3 + def f2(x: Foo) = x match { + ^ +./tests/patmat/t4691_exhaust_extractor.scala:29: warning: match may not be exhaustive. +It would fail on the following input: _: Bar3 + def f3(x: Foo) = x match { + ^ +three warnings found diff --git a/tests/patmat/t4691_exhaust_extractor.scala b/tests/patmat/t4691_exhaust_extractor.scala new file mode 100644 index 000000000..c68c33d65 --- /dev/null +++ b/tests/patmat/t4691_exhaust_extractor.scala @@ -0,0 +1,33 @@ +sealed trait Foo +class Bar1 extends Foo +class Bar2 extends Foo +class Bar3 extends Foo + +// these extractors are known to always succeed as they return a Some +object Baz1 { + def unapply(x: Bar1): Some[Int] = Some(1) +} +object Baz2 { + def unapply(x: Bar2): Some[Int] = Some(2) +} + + +object Test { + // warning: missing Bar3 + def f1(x: Foo) = x match { + case _: Bar1 => 1 + case _: Bar2 => 2 + } + + // warning: missing Bar3 + def f2(x: Foo) = x match { + case _: Bar1 => 1 + case Baz2(x) => x + } + + // warning: missing Bar3 + def f3(x: Foo) = x match { + case Baz1(x) => x + case Baz2(x) => x + } +}
\ No newline at end of file diff --git a/tests/patmat/t5440.check b/tests/patmat/t5440.check new file mode 100644 index 000000000..0780d6529 --- /dev/null +++ b/tests/patmat/t5440.check @@ -0,0 +1,5 @@ +./tests/patmat/t5440.scala:2: warning: match may not be exhaustive. +It would fail on the following input: (Nil, List(_)), (List(_), Nil) + def merge(list1: List[Long], list2: List[Long]): Boolean = (list1, list2) match { + ^ +one warning found diff --git a/tests/patmat/t5440.scala b/tests/patmat/t5440.scala new file mode 100644 index 000000000..6721b0562 --- /dev/null +++ b/tests/patmat/t5440.scala @@ -0,0 +1,6 @@ +object Test { + def merge(list1: List[Long], list2: List[Long]): Boolean = (list1, list2) match { + case (hd1::_, hd2::_) => true + case (Nil, Nil) => true + } +}
\ No newline at end of file diff --git a/tests/patmat/t5968.scala b/tests/patmat/t5968.scala new file mode 100644 index 000000000..14cc903c8 --- /dev/null +++ b/tests/patmat/t5968.scala @@ -0,0 +1,7 @@ +object Test { + object X + def f(e: Either[Int, X.type]) = e match { + case Left(i) => i + case Right(X) => 0 + } +}
\ No newline at end of file diff --git a/tests/patmat/t6008.scala b/tests/patmat/t6008.scala new file mode 100644 index 000000000..c42e9c5a5 --- /dev/null +++ b/tests/patmat/t6008.scala @@ -0,0 +1,5 @@ +object Test { + def x(in: (Int, Boolean)) = in match { + case (i: Int, b: Boolean) => 3 + } +}
\ No newline at end of file diff --git a/tests/patmat/t6146.scala b/tests/patmat/t6146.scala new file mode 100644 index 000000000..b5bde826b --- /dev/null +++ b/tests/patmat/t6146.scala @@ -0,0 +1,60 @@ +// No unreachable or exhaustiveness warnings, please. + +// +// The reported bug +// + +trait AxisCompanion { + sealed trait Format + object Format { + case object Decimal extends Format + case object Integer extends Format + // Gives an unrelated warning: The outer reference in this type test cannot be checked at run time. + //final case class Time( hours: Boolean = false, millis: Boolean = true ) extends Format + } +} +object Axis extends AxisCompanion +class Axis { + import Axis._ + def test( f: Format ) = f match { + case Format.Integer => "Int" + // case Format.Time( hours, millis ) => "Time" + case Format.Decimal => "Dec" + } +} + + +// +// Some tricksier variations +// + +trait T1[X] { + trait T2[Y] { + sealed trait Format + object Format { + case object Decimal extends Format + case object Integer extends Format + } + } +} + +object O1 extends T1[Any] { + object O2 extends T2[Any] { + + } +} + +case object Shorty extends O1.O2.Format + +class Test1 { + import O1.O2._ + val FI: Format.Integer.type = Format.Integer + def test( f: Format ) = { + val ff: f.type = f + ff match { + case FI => "Int" + case Format.Decimal => "Dec" + case Shorty => "Sho" + } + } +} diff --git a/tests/patmat/t6420.check b/tests/patmat/t6420.check new file mode 100644 index 000000000..c62b33d18 --- /dev/null +++ b/tests/patmat/t6420.check @@ -0,0 +1,5 @@ +./tests/patmat/t6420.scala:5: warning: match may not be exhaustive. +It would fail on the following input: (Nil, _), (List(_, _), _), (Nil, Nil), (Nil, List(_, _)), (List(_, _), Nil), (List(_, _), List(_, _)), (_, Nil), (_, List(_, _)) + def foo(x: List[Boolean], y: List[Boolean]) = (x,y) match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t6420.scala b/tests/patmat/t6420.scala new file mode 100644 index 000000000..80c0f90f6 --- /dev/null +++ b/tests/patmat/t6420.scala @@ -0,0 +1,11 @@ +object Test { + val c0 = false + val c1 = true + + def foo(x: List[Boolean], y: List[Boolean]) = (x,y) match { + case (`c0`::x, `c0`::y) => x + case (`c0`::x, `c1`::y) => y + case (`c1`::x, `c0`::y) => y + case (`c1`::x, `c1`::y) => x + } +} diff --git a/tests/patmat/t6450.scala b/tests/patmat/t6450.scala new file mode 100644 index 000000000..157f1ce81 --- /dev/null +++ b/tests/patmat/t6450.scala @@ -0,0 +1,9 @@ +sealed abstract class FoundNode[T] +// case class A[T](x: T) extends FoundNode[T] + +object Foo { + val v: (Some[_], FoundNode[_]) = (???, ???) + v match { + case (x: Some[t], _) => + } +} diff --git a/tests/patmat/t6582_exhaust_big.check b/tests/patmat/t6582_exhaust_big.check new file mode 100644 index 000000000..c244e5ba5 --- /dev/null +++ b/tests/patmat/t6582_exhaust_big.check @@ -0,0 +1,5 @@ +./tests/patmat/t6582_exhaust_big.scala:27: warning: match may not be exhaustive. +It would fail on the following input: Z.Z11() + def foo(z: Z) = z match { + ^ +one warning found diff --git a/tests/patmat/t6582_exhaust_big.scala b/tests/patmat/t6582_exhaust_big.scala new file mode 100644 index 000000000..dd639eb56 --- /dev/null +++ b/tests/patmat/t6582_exhaust_big.scala @@ -0,0 +1,32 @@ +sealed abstract class Z +object Z { + object Z0 extends Z + case class Z1() extends Z + object Z2 extends Z + case class Z3() extends Z + object Z4 extends Z + case class Z5() extends Z + object Z6 extends Z + case class Z7() extends Z + object Z8 extends Z + case class Z9() extends Z + object Z10 extends Z + case class Z11() extends Z + object Z12 extends Z + case class Z13() extends Z + object Z14 extends Z + case class Z15() extends Z + object Z16 extends Z + case class Z17() extends Z + object Z18 extends Z + case class Z19() extends Z +} + +object Test { + import Z._ + def foo(z: Z) = z match { + case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() | + Z10 | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19() + => + } +} diff --git a/tests/patmat/t6818.scala b/tests/patmat/t6818.scala new file mode 100644 index 000000000..2334095c4 --- /dev/null +++ b/tests/patmat/t6818.scala @@ -0,0 +1,11 @@ +object Test { + type Id[X] = X + + def foo(x:Id[Option[Int]]) = x match { + case Some(n) => "foo" + case None => "bar" + } + + foo(Some(3)) // "foo" + foo(None) // "bar" +}
\ No newline at end of file diff --git a/tests/patmat/t7020.check b/tests/patmat/t7020.check new file mode 100644 index 000000000..c091535ae --- /dev/null +++ b/tests/patmat/t7020.check @@ -0,0 +1,17 @@ +./tests/patmat/t7020.scala:3: warning: match may not be exhaustive. +It would fail on the following input: List(_, _) + List(5) match { + ^ +./tests/patmat/t7020.scala:10: warning: match may not be exhaustive. +It would fail on the following input: List(_, _) + List(5) match { + ^ +./tests/patmat/t7020.scala:17: warning: match may not be exhaustive. +It would fail on the following input: List(_, _) + List(5) match { + ^ +./tests/patmat/t7020.scala:24: warning: match may not be exhaustive. +It would fail on the following input: List(_, _) + List(5) match { + ^ +four warnings found diff --git a/tests/patmat/t7020.scala b/tests/patmat/t7020.scala new file mode 100644 index 000000000..cc5421bab --- /dev/null +++ b/tests/patmat/t7020.scala @@ -0,0 +1,30 @@ +object Test { + // warning was non-deterministic + List(5) match { + case 1 :: Nil | 2 :: Nil => + case (x@(4 | 5 | 6)) :: Nil => + case 7 :: Nil => + case Nil => + } + + List(5) match { + case 1 :: Nil | 2 :: Nil => + case (x@(4 | 5 | 6)) :: Nil => + case 7 :: Nil => + case Nil => + } + + List(5) match { + case 1 :: Nil | 2 :: Nil => + case (x@(4 | 5 | 6)) :: Nil => + case 7 :: Nil => + case Nil => + } + + List(5) match { + case 1 :: Nil | 2 :: Nil => + case (x@(4 | 5 | 6)) :: Nil => + case 7 :: Nil => + case Nil => + } +} diff --git a/tests/patmat/t7206.scala.ignore b/tests/patmat/t7206.scala.ignore new file mode 100644 index 000000000..0133f1808 --- /dev/null +++ b/tests/patmat/t7206.scala.ignore @@ -0,0 +1,19 @@ +object E extends Enumeration { + val V = Value +} + +sealed case class C(e: E.Value) + +class Test { + def foo(c: C) = { + c match { + case C(E.V) => {} + } + } + + def foo2(e: E.Value) = { + e match { + case E.V => {} + } + } +} diff --git a/tests/patmat/t7285.check b/tests/patmat/t7285.check new file mode 100644 index 000000000..703706cdc --- /dev/null +++ b/tests/patmat/t7285.check @@ -0,0 +1,13 @@ +./tests/patmat/t7285.scala:15: warning: match may not be exhaustive. +It would fail on the following input: (Up, Down) + (d1, d2) match { + ^ +./tests/patmat/t7285.scala:33: warning: match may not be exhaustive. +It would fail on the following input: Down + (d1) match { + ^ +./tests/patmat/t7285.scala:51: warning: match may not be exhaustive. +It would fail on the following input: (Base.Up, Base.Down) + (d1, d2) match { + ^ +three warnings found
\ No newline at end of file diff --git a/tests/patmat/t7285.scala b/tests/patmat/t7285.scala new file mode 100644 index 000000000..d40df7fe8 --- /dev/null +++ b/tests/patmat/t7285.scala @@ -0,0 +1,55 @@ +sealed abstract class Base + + +object Test1 { + sealed abstract class Base + + object Base { + case object Down extends Base { + } + + case object Up extends Base { + } + + def foo(d1: Base, d2: Base) = + (d1, d2) match { + case (Up, Up) | (Down, Down) => false + case (Down, Up) => true + } + } +} + +object Test2 { + sealed abstract class Base + + object Base { + case object Down extends Base { + } + + case object Up extends Base { + } + + def foo(d1: Base, d2: Base) = + (d1) match { + case Test2.Base.Up => false + } + } +} + + +object Test4 { + sealed abstract class Base + + object Base { + case object Down extends Base + + case object Up extends Base + } + + import Test4.Base._ + def foo(d1: Base, d2: Base) = + (d1, d2) match { + case (Up, Up) | (Down, Down) => false + case (Down, Test4.Base.Up) => true + } +} diff --git a/tests/patmat/t7285a.scala b/tests/patmat/t7285a.scala new file mode 100644 index 000000000..49f6b663b --- /dev/null +++ b/tests/patmat/t7285a.scala @@ -0,0 +1,83 @@ +sealed abstract class Base + +object Test { + case object Up extends Base + + def foo(d1: Base) = + d1 match { + case Up => + } + + // Sealed subtype: ModuleTypeRef <empty>.this.Test.Up.type + // Pattern: UniqueThisType Test.this.type +} + + +object Test1 { + sealed abstract class Base + + object Base { + case object Down extends Base { + } + + case object Up extends Base { + } + + def foo(d1: Base, d2: Base) = + (d1, d2) match { + case (Up, Up) | (Down, Down) => false + case (Down, Up) => true + case (Up, Down) => false + } + } +} + +object Test2 { + sealed abstract class Base + + object Base { + case object Down extends Base { + } + + case object Up extends Base { + } + + def foo(d1: Base, d2: Base) = + (d1) match { + case Up | Down => false + } + } +} + +object Test3 { + sealed abstract class Base + + object Base { + case object Down extends Base + + def foo(d1: Base, d2: Base) = + (d1, d2) match { + case (Down, Down) => false + } + } +} + +object Test4 { + sealed abstract class Base + + object Base { + case object Down extends Base { + } + + case object Up extends Base { + } + + } + import Test4.Base._ + def foo(d1: Base, d2: Base) = + (d1, d2) match { + case (Up, Up) | (Down, Down) => false + case (Down, Test4.Base.Up) => true + case (Up, Down) => false + } +} diff --git a/tests/patmat/t7298.scala b/tests/patmat/t7298.scala new file mode 100644 index 000000000..6fba5e120 --- /dev/null +++ b/tests/patmat/t7298.scala @@ -0,0 +1,11 @@ +sealed trait Bool + +object Bool { + case object FALSE extends Bool + case object TRUE extends Bool + + def show(b: Bool) = b match { + case FALSE => "1" + case TRUE => "2" + } +} diff --git a/tests/patmat/t7353.scala b/tests/patmat/t7353.scala new file mode 100644 index 000000000..7a8fea115 --- /dev/null +++ b/tests/patmat/t7353.scala @@ -0,0 +1,11 @@ +sealed trait EthernetType + +object EthernetType { + final case object Gigabit extends EthernetType + final case object FastEthernet extends EthernetType + + final def toInt(t: EthernetType) = t match { + case Gigabit => 1 + case FastEthernet => 2 + } +}
\ No newline at end of file diff --git a/tests/patmat/t7437.scala b/tests/patmat/t7437.scala new file mode 100644 index 000000000..b0c5dff7c --- /dev/null +++ b/tests/patmat/t7437.scala @@ -0,0 +1,17 @@ +sealed trait IntegralNumber +sealed trait FiniteNumber extends IntegralNumber + +object IntegralNumber { + + sealed abstract class BaseNumber extends IntegralNumber + sealed abstract class NonFinite extends BaseNumber + object NaN extends NonFinite + sealed abstract class FiniteNumberImpl[N](val value: N) extends BaseNumber with FiniteNumber + sealed class IntNumber(value: Int) extends FiniteNumberImpl[Int](value) + + def test(t: IntNumber, o: IntegralNumber) = o match { + case NaN => -1 + case o: IntNumber => t.value.compare(o.value) + } + +}
\ No newline at end of file diff --git a/tests/patmat/t7466.check b/tests/patmat/t7466.check new file mode 100644 index 000000000..8e575f6a2 --- /dev/null +++ b/tests/patmat/t7466.check @@ -0,0 +1,5 @@ +./tests/patmat/t7466.scala:8: warning: match may not be exhaustive. +It would fail on the following input: (_, _) + (b1, b2) match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t7466.scala b/tests/patmat/t7466.scala new file mode 100644 index 000000000..a74bf4ee2 --- /dev/null +++ b/tests/patmat/t7466.scala @@ -0,0 +1,17 @@ +object Test extends App { + val Yes1 = true + val Yes2 = true + val No1 = false + val No2 = false + + def test(b1: Boolean, b2: Boolean) = { + (b1, b2) match { + case (No1, No2) => println("1") + case (No1, Yes2) => println("2") + case (Yes1, No2) => println("3") + case (Yes1, Yes2) => println("4") + } + } + + test(No1, Yes2) +}
\ No newline at end of file diff --git a/tests/patmat/t7631.check b/tests/patmat/t7631.check new file mode 100644 index 000000000..ede3703e2 --- /dev/null +++ b/tests/patmat/t7631.check @@ -0,0 +1,5 @@ +./tests/patmat/t7631.scala:8: warning: match may not be exhaustive. +It would fail on the following input: TestB() + val x = input match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t7631.scala b/tests/patmat/t7631.scala new file mode 100644 index 000000000..13e74183f --- /dev/null +++ b/tests/patmat/t7631.scala @@ -0,0 +1,11 @@ +sealed trait Test +case class TestA() extends Test +case class TestB() extends Test + +object Tester { + val input : Test = TestA() + val num = 3 + val x = input match { + case TestA() if num == 3 => 2 + } +}
\ No newline at end of file diff --git a/tests/patmat/t7669.check b/tests/patmat/t7669.check new file mode 100644 index 000000000..2804dbf5c --- /dev/null +++ b/tests/patmat/t7669.check @@ -0,0 +1,5 @@ +./tests/patmat/t7669.scala:10: warning: match may not be exhaustive. +It would fail on the following input: NotHandled(_) + def exhausto(expr: Expr): Unit = expr match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t7669.scala b/tests/patmat/t7669.scala new file mode 100644 index 000000000..3aa74129e --- /dev/null +++ b/tests/patmat/t7669.scala @@ -0,0 +1,14 @@ +object Test { + + sealed abstract class Expr + // Change type of `arg` to `Any` and the exhaustiveness warning + // is issued below + case class Op(arg: Expr) extends Expr + case class NotHandled(num: Double) extends Expr + + + def exhausto(expr: Expr): Unit = expr match { + case Op(Op(_)) => + case Op(_) => + } +} diff --git a/tests/patmat/t7746.check b/tests/patmat/t7746.check new file mode 100644 index 000000000..be4c53570 --- /dev/null +++ b/tests/patmat/t7746.check @@ -0,0 +1,5 @@ +./tests/patmat/t7746.scala:2: warning: match may not be exhaustive. +It would fail on the following input: Some(_), None + def f[T](x: Option[T]) = x match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t7746.scala b/tests/patmat/t7746.scala new file mode 100644 index 000000000..91f3823a4 --- /dev/null +++ b/tests/patmat/t7746.scala @@ -0,0 +1,5 @@ +object Test { + def f[T](x: Option[T]) = x match { + case Some(Some(5)) => true + } +}
\ No newline at end of file diff --git a/tests/patmat/t8068.scala b/tests/patmat/t8068.scala new file mode 100644 index 000000000..9837b7381 --- /dev/null +++ b/tests/patmat/t8068.scala @@ -0,0 +1,14 @@ +trait K[A] { + sealed trait T + case class C(x: Int) extends T + case object O extends T +} + +object Hello { + def f[A](k: K[A])(t: k.T) = { + t match { + case k.C(x) => ??? + case k.O => ??? + } + } +} diff --git a/tests/patmat/t8178.check b/tests/patmat/t8178.check new file mode 100644 index 000000000..963845f53 --- /dev/null +++ b/tests/patmat/t8178.check @@ -0,0 +1,13 @@ +./tests/patmat/t8178.scala:6: warning: match may not be exhaustive. +It would fail on the following input: FailsChild2(_) + f match { + ^ +./tests/patmat/t8178.scala:14: warning: match may not be exhaustive. +It would fail on the following input: VarArgs1(_) + f match { + ^ +./tests/patmat/t8178.scala:27: warning: match may not be exhaustive. +It would fail on the following input: SeqArgs2(_) + f match { + ^ +three warnings found
\ No newline at end of file diff --git a/tests/patmat/t8178.scala b/tests/patmat/t8178.scala new file mode 100644 index 000000000..4fb39955b --- /dev/null +++ b/tests/patmat/t8178.scala @@ -0,0 +1,33 @@ +sealed trait Fails +case class VarArgs1(a: String*) extends Fails +case class FailsChild2(a: Seq[String]) extends Fails +object FailsTest { + def matchOnVarArgsFirstFails(f: Fails) = { + f match { + case VarArgs1(_) => ??? + // BUG: Without this line we should get a non-exhaustive match compiler error. + //case FailsChild2(_) => ??? + } + } + + def matchOnSeqArgsFirstWorks(f: Fails) = { + f match { + case FailsChild2(_) => ??? + // Without this line, the compiler reports a "match may not be exhaustive" error as expected. + // case VarArgs1(_) => ??? + } + } +} + +sealed trait Works +case class SeqArgs1(a: Seq[String]) extends Works +case class SeqArgs2(a: Seq[String]) extends Works +object WorksTest { + def matcher(f: Works) = { + f match { + case SeqArgs1(_) => ??? + // Without this line, the compiler reports a "match may not be exhaustive" error as expected. + // case SeqArgs2(_) => ??? + } + } +}
\ No newline at end of file diff --git a/tests/patmat/t8412.check b/tests/patmat/t8412.check new file mode 100644 index 000000000..b82b33999 --- /dev/null +++ b/tests/patmat/t8412.check @@ -0,0 +1,5 @@ +./tests/patmat/t8412.scala:7: warning: match may not be exhaustive. +It would fail on the following input: Lit(_) + tree match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t8412.scala b/tests/patmat/t8412.scala new file mode 100644 index 000000000..f4b2b6090 --- /dev/null +++ b/tests/patmat/t8412.scala @@ -0,0 +1,14 @@ +sealed trait Tree +case class Let(sth: List[Any]) extends Tree +case class Lit(sth: Any) extends Tree + +object Test { + def wroong(tree: Tree) = + tree match { + case Let(_ :: rest) => + ??? + case Let(Nil) => + ??? + // no warning for missing Lit(_) in 2.10 + } +} diff --git a/tests/patmat/t8430.check b/tests/patmat/t8430.check new file mode 100644 index 000000000..4493062bf --- /dev/null +++ b/tests/patmat/t8430.check @@ -0,0 +1,5 @@ +./tests/patmat/t8430.scala:15: warning: match may not be exhaustive. +It would fail on the following input: LetF, LetC, LetP, LetL(UnitLit), LetL(BooleanLit), LetL(IntLit) + def transform(tree: Tree) : Any = tree match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t8430.scala b/tests/patmat/t8430.scala new file mode 100644 index 000000000..ccd4585d9 --- /dev/null +++ b/tests/patmat/t8430.scala @@ -0,0 +1,19 @@ +sealed trait CL3Literal +case object IntLit extends CL3Literal +case object CharLit extends CL3Literal +case object BooleanLit extends CL3Literal +case object UnitLit extends CL3Literal + + +sealed trait Tree +case class LetL(value: CL3Literal) extends Tree +case object LetP extends Tree +case object LetC extends Tree +case object LetF extends Tree + +object Test { + def transform(tree: Tree) : Any = tree match { + case LetL(CharLit) => + ??? + } +} diff --git a/tests/patmat/t8511.check b/tests/patmat/t8511.check new file mode 100644 index 000000000..df07d019a --- /dev/null +++ b/tests/patmat/t8511.check @@ -0,0 +1,5 @@ +./tests/patmat/t8511.scala:18: warning: match may not be exhaustive. +It would fail on the following input: Baz(), Bar(_) + private def logic(head: Expr): String = head match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t8511.scala b/tests/patmat/t8511.scala new file mode 100644 index 000000000..bc7f64713 --- /dev/null +++ b/tests/patmat/t8511.scala @@ -0,0 +1,25 @@ +sealed trait Expr +final case class Foo(other: Option[String]) extends Expr +final case class Bar(someConstant: String) extends Expr +final case class Baz() extends Expr +final case class EatsExhaustiveWarning(other: Reference) extends Expr + +sealed trait Reference { + val value: String +} + +object Reference { + def unapply(reference: Reference): Option[(String)] = { + Some(reference.value) + } +} + +object EntryPoint { + private def logic(head: Expr): String = head match { + case Foo(_) => + ??? + // Commenting this line only causes the exhaustive search warning to be emitted + case EatsExhaustiveWarning(Reference(text)) => + ??? + } +}
\ No newline at end of file diff --git a/tests/patmat/t8546.scala b/tests/patmat/t8546.scala new file mode 100644 index 000000000..c39d749b4 --- /dev/null +++ b/tests/patmat/t8546.scala @@ -0,0 +1,49 @@ +package test + +class F1() { + private sealed abstract class T + private case class A(m: Int) extends T + private case class B() extends T + private case object C extends T + + // No warnings here + private def foo(t: T) = t match { + case A(m) => println("A:" + m) + case B() => println("B") + case C => println("C") + } + + def test(m: Int): Unit = { + foo(A(m)) + foo(B()) + foo(C) + } +} + +class F2[M]() { + private sealed abstract class T + private case class A(m: M) extends T + private case class B() extends T + private case object C extends T + + // match may not be exhaustive. It would fail on the following input: C + private def foo(t: T) = t match { + case A(m) => println("A:" + m) + case B() => println("B") + case C => println("C") + } + + def test(m: M): Unit = { + foo(A(m)) + foo(B()) + foo(C) + } + +} + +object Test { + def main(args: Array[String]): Unit = { + new F1().test(1) + new F2[Int]().test(1) + } +}
\ No newline at end of file diff --git a/tests/patmat/t8606.scala b/tests/patmat/t8606.scala new file mode 100644 index 000000000..9388c9f02 --- /dev/null +++ b/tests/patmat/t8606.scala @@ -0,0 +1,18 @@ +class Cl[T] { + + sealed trait A { + def foo = this match { + case AObj => 0 + case BObj => 0 + case ACls(x) => 0 + case BCls(x) => 0 + } + } + + case object AObj extends A + case class ACls(x: Int) extends A + + sealed trait B extends A + case object BObj extends B + case class BCls(x: Int) extends B +} diff --git a/tests/patmat/t8700a/Bar.scala b/tests/patmat/t8700a/Bar.scala new file mode 100644 index 000000000..33ad8e987 --- /dev/null +++ b/tests/patmat/t8700a/Bar.scala @@ -0,0 +1,9 @@ +object Bar { + def bar1(foo: Foo) = foo match { + case Foo.A => 1 + } + + def bar2(foo: Baz) = foo match { + case Baz.A => 1 + } +} diff --git a/tests/patmat/t8700a/Baz.java b/tests/patmat/t8700a/Baz.java new file mode 100644 index 000000000..49f15e121 --- /dev/null +++ b/tests/patmat/t8700a/Baz.java @@ -0,0 +1,11 @@ +public enum Baz { + A { + public void baz1() {} + }, + B { + public void baz1() {} + }; + + public abstract void baz1(); + public void baz2() {} +} diff --git a/tests/patmat/t8700a/Foo.java b/tests/patmat/t8700a/Foo.java new file mode 100644 index 000000000..cc8e9daf1 --- /dev/null +++ b/tests/patmat/t8700a/Foo.java @@ -0,0 +1,4 @@ +public enum Foo { + A, + B +} diff --git a/tests/patmat/t8700a/expected.check b/tests/patmat/t8700a/expected.check new file mode 100644 index 000000000..83f1c5a9e --- /dev/null +++ b/tests/patmat/t8700a/expected.check @@ -0,0 +1,9 @@ +./tests/patmat/t8700a/Bar.scala:2: warning: match may not be exhaustive. +It would fail on the following input: B + def bar1(foo: Foo) = foo match { + ^ +./tests/patmat/t8700a/Bar.scala:6: warning: match may not be exhaustive. +It would fail on the following input: B + def bar2(foo: Baz) = foo match { + ^ +two warnings found diff --git a/tests/patmat/t9129.check b/tests/patmat/t9129.check new file mode 100644 index 000000000..aa722a61a --- /dev/null +++ b/tests/patmat/t9129.check @@ -0,0 +1,5 @@ +./tests/patmat/t9129.scala:21: warning: match may not be exhaustive. +It would fail on the following input: Two(B2, A2), Two(_, A2) + def foo(c: C): Unit = c match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t9129.scala b/tests/patmat/t9129.scala new file mode 100644 index 000000000..89f08f0ac --- /dev/null +++ b/tests/patmat/t9129.scala @@ -0,0 +1,29 @@ +object Test { + + sealed abstract class A + + case object A1 extends A + + case object A2 extends A + + sealed abstract class B + + case object B1 extends B + + case object B2 extends B + + sealed abstract class C + + final case class One(a: A, b: B) extends C + + final case class Two(b: B, a: A) extends C + + def foo(c: C): Unit = c match { + case One(A1, B1) => + case One(A2, B1) => + case One(A1, B2) => + case One(A2, B2) => + case Two(B1, A1) => + case Two(B2, A1) => + } +}
\ No newline at end of file diff --git a/tests/patmat/t9232.check b/tests/patmat/t9232.check new file mode 100644 index 000000000..c3957c0ff --- /dev/null +++ b/tests/patmat/t9232.check @@ -0,0 +1,5 @@ +./tests/patmat/t9232.scala:13: warning: match may not be exhaustive. +It would fail on the following input: Node2() + def transformTree(tree: Tree): Any = tree match { + ^ +one warning found diff --git a/tests/patmat/t9232.scala b/tests/patmat/t9232.scala new file mode 100644 index 000000000..975ec58db --- /dev/null +++ b/tests/patmat/t9232.scala @@ -0,0 +1,16 @@ +final class Foo(val value: Int) + +object Foo { + def unapplySeq(foo: Foo): Some[Seq[Int]] = Some(List(foo.value)) + // def unapply(foo: Foo): Some[Int] = Some(foo.value) +} + +sealed trait Tree +case class Node1(foo: Foo) extends Tree +case class Node2() extends Tree + +object Test { + def transformTree(tree: Tree): Any = tree match { + case Node1(Foo(1)) => ??? + } +} diff --git a/tests/patmat/t9289.check b/tests/patmat/t9289.check new file mode 100644 index 000000000..5240988e2 --- /dev/null +++ b/tests/patmat/t9289.check @@ -0,0 +1,9 @@ +./tests/patmat/t9289.scala:9: warning: match may not be exhaustive. +It would fail on the following input: module.LetR() + def patmat(tree: module.Tree) = tree match { + ^ +./tests/patmat/t9289.scala:20: warning: match may not be exhaustive. +It would fail on the following input: module.LetR() + def patmat(tree: module.Tree) = tree match { + ^ +two warnings found
\ No newline at end of file diff --git a/tests/patmat/t9289.scala b/tests/patmat/t9289.scala new file mode 100644 index 000000000..714a4a0e3 --- /dev/null +++ b/tests/patmat/t9289.scala @@ -0,0 +1,28 @@ +trait Module { + sealed trait Tree + + case class LetL() extends Tree + case class LetR() extends Tree +} + +class Patmat[T <: Module](val module: T) { + def patmat(tree: module.Tree) = tree match { + case module.LetL() => ??? + } + + def exhaust(tree: module.Tree) = tree match { + case module.LetL() => ??? + case module.LetR() => ??? + } +} + +class Patmat2(val module: Module) { + def patmat(tree: module.Tree) = tree match { + case module.LetL() => ??? + } + + def exhaust(tree: module.Tree) = tree match { + case module.LetL() => ??? + case module.LetR() => ??? + } +} diff --git a/tests/patmat/t9351.check b/tests/patmat/t9351.check new file mode 100644 index 000000000..03b94c2c0 --- /dev/null +++ b/tests/patmat/t9351.check @@ -0,0 +1,13 @@ +./tests/patmat/t9351.scala:8: warning: match may not be exhaustive. +It would fail on the following input: _: A + a match { + ^ +./tests/patmat/t9351.scala:17: warning: match may not be exhaustive. +It would fail on the following input: (_, _), (_, None), (_, Some(_)) + (a, o) match { + ^ +./tests/patmat/t9351.scala:28: warning: match may not be exhaustive. +It would fail on the following input: (_, _) + (a, b) match { + ^ +three warnings found
\ No newline at end of file diff --git a/tests/patmat/t9351.scala b/tests/patmat/t9351.scala new file mode 100644 index 000000000..9b9bd4312 --- /dev/null +++ b/tests/patmat/t9351.scala @@ -0,0 +1,35 @@ +trait A {} +case object B extends A {} +case object C extends A {} + +class X { + def good = { + val a: A = B + a match { + case B => + case C => + } + } + + def bad = { + val a: A = B + val o: Option[Int] = None + (a, o) match { + case (B, None) => + case (B, Some(_)) => + case (C, None) => + case (C, Some(_)) => + } + } + + def alsoGood = { + val a: A = B + val b: A = C + (a, b) match { + case (B, B) => + case (B, C) => + case (C, B) => + case (C, C) => + } + } +} diff --git a/tests/patmat/t9398.check b/tests/patmat/t9398.check new file mode 100644 index 000000000..0efbf231d --- /dev/null +++ b/tests/patmat/t9398.check @@ -0,0 +1,5 @@ +./tests/patmat/t9398.scala:11: warning: match may not be exhaustive. +It would fail on the following input: CC(_, B2) + case CC(_, B) => () + ^ +one warning found diff --git a/tests/patmat/t9398.scala b/tests/patmat/t9398.scala new file mode 100644 index 000000000..6d4d6bd3b --- /dev/null +++ b/tests/patmat/t9398.scala @@ -0,0 +1,13 @@ +sealed abstract class TA +sealed abstract class TB extends TA +case object B extends TB +case object B2 extends TB + +case class CC(i: Int, tb: TB) + +object Test { + // Should warn that CC(_, B2) isn't matched + def foo: CC => Unit = { + case CC(_, B) => () + } +}
\ No newline at end of file diff --git a/tests/patmat/t9399.scala b/tests/patmat/t9399.scala new file mode 100644 index 000000000..89dbedd96 --- /dev/null +++ b/tests/patmat/t9399.scala @@ -0,0 +1,16 @@ +sealed abstract class TA +sealed abstract class TB extends TA +case object A extends TA +case object B extends TB + +sealed trait C +case class CTA(id: Int, da: TA) extends C +case class CTB(id: Int, da: TB) extends C + +object Test { + val test: C => Unit = { + case CTA(_, A) => + case CTA(_, B) => + case CTB(_, B) => + } +} diff --git a/tests/patmat/t9411a.scala b/tests/patmat/t9411a.scala new file mode 100644 index 000000000..d5264663e --- /dev/null +++ b/tests/patmat/t9411a.scala @@ -0,0 +1,27 @@ +object OhNoes { + + sealed trait F + sealed abstract class FA extends F + sealed abstract class FB extends F + + case object FA1 extends FA + case object FB1 extends FB + case object FB2 extends FB + + sealed trait G + case object G1 extends G + case object G2 extends G + + sealed trait H + case class H1(a: FB, b: G) extends H + case class H2(a: F) extends H + + val demo: H => Unit = { + case H1(FB1, G1) => + case H1(FB2, G2) => + case H2(_: FB) => + case H2(_: FA) => + case H1(FB1, G2) => + case H1(FB2, G1) => + } +} diff --git a/tests/patmat/t9411b.scala b/tests/patmat/t9411b.scala new file mode 100644 index 000000000..6888ba938 --- /dev/null +++ b/tests/patmat/t9411b.scala @@ -0,0 +1,36 @@ +object OhNoes { + + sealed trait F + sealed abstract class FA extends F + sealed abstract class FB extends F + + case object FA1 extends FA + case object FB1 extends FB + case object FB2 extends FB + + sealed trait G + case object G1 extends G + case object G2 extends G + + sealed trait H + case class H1(a: FB, b: G) extends H + case class H2(b: F) extends H + + val demo: H => Unit = { + case H1(FB1, G1) => + case H1(FB2, G2) => + case H2(_: FB) => + case H2(_: FA) => + case H1(FB1, G2) => + case H1(FB2, G1) => + } + + val demo2: H => Unit = { + case H2(_: FA) => + case H2(_: FB) => + case H1(FB1, G1) => + case H1(FB2, G1) => + case H1(FB1, G2) => + case H1(FB2, G2) => + } +} diff --git a/tests/patmat/t9573.check b/tests/patmat/t9573.check new file mode 100644 index 000000000..4ec379161 --- /dev/null +++ b/tests/patmat/t9573.check @@ -0,0 +1,5 @@ +./tests/patmat/t9573.scala:9: warning: match may not be exhaustive. +It would fail on the following input: Horse(_) + x match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t9573.scala b/tests/patmat/t9573.scala new file mode 100644 index 000000000..2a32c2599 --- /dev/null +++ b/tests/patmat/t9573.scala @@ -0,0 +1,13 @@ +class Foo { + + def foo = { + abstract sealed class Animal + case class Goat(age: Int) extends Animal + case class Horse(age: Int) extends Animal + + val x: Animal = Goat(1) + x match { + case Goat(_) => println("a goat") + } + } +}
\ No newline at end of file diff --git a/tests/patmat/t9630.scala b/tests/patmat/t9630.scala new file mode 100644 index 000000000..c846faa99 --- /dev/null +++ b/tests/patmat/t9630.scala @@ -0,0 +1,21 @@ +sealed trait OpError +sealed trait RequestErrorType +sealed trait ProcessingErrorType + +final case class InvalidEndpoint(reason: String) extends RequestErrorType +final case class InvalidParameters(reason: String) extends RequestErrorType + +final case class InvalidFormat(response: String) extends ProcessingErrorType +final case class EntityNotFound(id: Long) extends ProcessingErrorType + +final case class RequestError(errorType: RequestErrorType) extends OpError +final case class ProcessingError(errorType: ProcessingErrorType) extends OpError + +object Test{ + def printMatches(error: OpError): Unit = error match { + case RequestError(InvalidEndpoint(reason)) => //print something + case RequestError(InvalidParameters(reason)) => //print something + case ProcessingError(InvalidFormat(format)) => //print something + case ProcessingError(EntityNotFound(entityId)) => //print something + } +}
\ No newline at end of file diff --git a/tests/patmat/t9657.check b/tests/patmat/t9657.check new file mode 100644 index 000000000..d3e2ec73f --- /dev/null +++ b/tests/patmat/t9657.check @@ -0,0 +1,17 @@ +./tests/patmat/t9657.scala:29: warning: match may not be exhaustive. +It would fail on the following input: Bus(_) + def refuel2[P <: Petrol.type](vehicle: Vehicle {type A = P} ): Vehicle = vehicle match { + ^ +./tests/patmat/t9657.scala:38: warning: match may not be exhaustive. +It would fail on the following input: Bus(_) + def foo2(vehicle: Vehicle {type A <: Petrol.type} ): Vehicle = vehicle match { + ^ +./tests/patmat/t9657.scala:49: warning: match may not be exhaustive. +It would fail on the following input: Bus(_) + def bar2(vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match { + ^ +./tests/patmat/t9657.scala:58: warning: match may not be exhaustive. +It would fail on the following input: Bus(_) + def qux2[P <: Petrol.type](vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match { + ^ +four warnings found
\ No newline at end of file diff --git a/tests/patmat/t9657.scala b/tests/patmat/t9657.scala new file mode 100644 index 000000000..f9769574e --- /dev/null +++ b/tests/patmat/t9657.scala @@ -0,0 +1,62 @@ +sealed trait PowerSource + +case object Petrol extends PowerSource + +case object Pedal extends PowerSource + +sealed abstract class Vehicle { + type A <: PowerSource +} + +case object Bicycle extends Vehicle { + type A = Pedal.type +} + +case class Bus(fuel: Int) extends Vehicle { + type A = Petrol.type +} + +case class Car(fuel: Int) extends Vehicle { + type A = Petrol.type +} + +class Test { + def refuel[P <: Petrol.type](vehicle: Vehicle {type A = P} ): Vehicle = vehicle match { + case Car(_) => Car(100) + case Bus(_) => Bus(100) + } + + def refuel2[P <: Petrol.type](vehicle: Vehicle {type A = P} ): Vehicle = vehicle match { + case Car(_) => Car(100) + } + + def foo1(vehicle: Vehicle {type A <: Petrol.type} ): Vehicle = vehicle match { + case Car(_) => Car(100) + case Bus(_) => Bus(100) + } + + def foo2(vehicle: Vehicle {type A <: Petrol.type} ): Vehicle = vehicle match { + case Car(_) => Car(100) + } + + type P = Petrol.type + + def bar1(vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match { + case Car(_) => Car(100) + case Bus(_) => Bus(100) + } + + def bar2(vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match { + case Car(_) => Car(100) + } + + def qux1[P <: Petrol.type](vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match { + case Car(_) => Car(100) + case Bus(_) => Bus(100) + } + + def qux2[P <: Petrol.type](vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match { + case Car(_) => Car(100) + } + +} diff --git a/tests/patmat/t9672.check b/tests/patmat/t9672.check new file mode 100644 index 000000000..3284d1df1 --- /dev/null +++ b/tests/patmat/t9672.check @@ -0,0 +1,5 @@ +./tests/patmat/t9672.scala:22: warning: match may not be exhaustive. +It would fail on the following input: SimpleExpr.IntExpr(_) + def func(expr: Expr) = expr match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t9672.scala b/tests/patmat/t9672.scala new file mode 100644 index 000000000..fe068f3d5 --- /dev/null +++ b/tests/patmat/t9672.scala @@ -0,0 +1,28 @@ +trait Hierarchy { + sealed trait Expr +} +trait If { + this: Hierarchy => + case class If(cond: Expr, yes: Expr, no: Expr) extends Expr +} +trait Word { + this: Hierarchy => + case class Word(name: String) extends Expr +} +trait IntExpr { + this: Hierarchy => + case class IntExpr(value : Int) extends Expr +} + +object SimpleExpr extends Hierarchy with If with Word with IntExpr +//object OtherExpr extends Hierarchy with If with IntExpr + +object Demo extends App { + import SimpleExpr._ + def func(expr: Expr) = expr match { + case If(cond, yes, no) => cond + case Word(name) => name + // compiler should emit warning "missing case statement" + // emits the wrong warning "unreachable code" + } +}
\ No newline at end of file diff --git a/tests/patmat/t9677.check b/tests/patmat/t9677.check new file mode 100644 index 000000000..f1e1817cb --- /dev/null +++ b/tests/patmat/t9677.check @@ -0,0 +1,4 @@ +./tests/patmat/t9677.scala:20: warning: unreachable code + case path: A => println("Not root") + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t9677.scala b/tests/patmat/t9677.scala new file mode 100644 index 000000000..1e9b1df5e --- /dev/null +++ b/tests/patmat/t9677.scala @@ -0,0 +1,23 @@ +sealed abstract class Base + +sealed trait A extends Base + +object A { + + case object Root extends Base + + def apply(param: String): A = { + new A {} + } +} + +object ExhaustiveMatchWarning { + + def test: Unit = { + val b: Base = A("blabla") + b match { + case A.Root => println("Root") + case path: A => println("Not root") + } + } +}
\ No newline at end of file diff --git a/tests/patmat/t9779.check b/tests/patmat/t9779.check new file mode 100644 index 000000000..0e0d8d5f4 --- /dev/null +++ b/tests/patmat/t9779.check @@ -0,0 +1,5 @@ +./tests/patmat/t9779.scala:10: warning: match may not be exhaustive. +It would fail on the following input: _: a.Elem + private def toLuaValue(eX: a.Elem[_]): String = eX match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/t9779.scala b/tests/patmat/t9779.scala new file mode 100644 index 000000000..9c418b0b1 --- /dev/null +++ b/tests/patmat/t9779.scala @@ -0,0 +1,13 @@ +trait Elems { + sealed class Elem[A] extends Dummy + + val UnitElement: Elem[Unit] + + trait Dummy +} + +class BadMatch[A <: Elems](a: A) { + private def toLuaValue(eX: a.Elem[_]): String = eX match { + case a.UnitElement => "" // type mismatch + } +}
\ No newline at end of file diff --git a/tests/patmat/try.scala b/tests/patmat/try.scala new file mode 100644 index 000000000..d7df24ee0 --- /dev/null +++ b/tests/patmat/try.scala @@ -0,0 +1,5 @@ +object Test { + try 2/0 catch { + case e: Exception => + } +}
\ No newline at end of file diff --git a/tests/patmat/tuple.scala b/tests/patmat/tuple.scala new file mode 100644 index 000000000..f33a5cfec --- /dev/null +++ b/tests/patmat/tuple.scala @@ -0,0 +1,5 @@ +object Test { + (4, (4, 6)) match { + case (x, (y, z)) => true + } +}
\ No newline at end of file diff --git a/tests/patmat/virtpatmat_apply.check b/tests/patmat/virtpatmat_apply.check new file mode 100644 index 000000000..d10d82165 --- /dev/null +++ b/tests/patmat/virtpatmat_apply.check @@ -0,0 +1,5 @@ +./tests/patmat/virtpatmat_apply.scala:2: warning: match may not be exhaustive. +It would fail on the following input: List(_) + List(1, 2, 3) match { + ^ +one warning found
\ No newline at end of file diff --git a/tests/patmat/virtpatmat_apply.scala b/tests/patmat/virtpatmat_apply.scala new file mode 100644 index 000000000..646d15f90 --- /dev/null +++ b/tests/patmat/virtpatmat_apply.scala @@ -0,0 +1,7 @@ +object Test { + List(1, 2, 3) match { + case Nil => println("FAIL") + case x :: y :: xs if xs.length == 2 => println("FAIL") + case x :: y :: xs if xs.length == 1 => println("OK "+ y) + } +} diff --git a/tests/patmat/virtpatmat_exhaust_compound.check b/tests/patmat/virtpatmat_exhaust_compound.check new file mode 100644 index 000000000..72e034068 --- /dev/null +++ b/tests/patmat/virtpatmat_exhaust_compound.check @@ -0,0 +1,15 @@ +virtpatmat_exhaust_compound.scala:14: warning: match may not be exhaustive. +It would fail on the following inputs: O1, O2, O4 + a match { + ^ +virtpatmat_exhaust_compound.scala:18: warning: match may not be exhaustive. +It would fail on the following input: O4 + def t1(a: Product with Base with Base2) = a match { + ^ +virtpatmat_exhaust_compound.scala:22: warning: match may not be exhaustive. +It would fail on the following input: O2 + def t2(a: Product with Base { def foo: Int }) = a match { + ^ +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/tests/patmat/virtpatmat_exhaust_compound.scala.ignore b/tests/patmat/virtpatmat_exhaust_compound.scala.ignore new file mode 100644 index 000000000..4ff04dd06 --- /dev/null +++ b/tests/patmat/virtpatmat_exhaust_compound.scala.ignore @@ -0,0 +1,29 @@ +sealed trait Base +case object O1 extends Base +case object O2 extends Base { + def foo: Int = 0 +} + +sealed trait Base2 +case object O3 extends Base2 + +case object O4 extends Base with Base2 + +object Test { + val a /*: Product with Serializable with Base */ = if (true) O1 else O2 + a match { + case null => + } + + def t1(a: Product with Base with Base2) = a match { + case null => // O1..O3 should *not* be possible here + } + + def t2(a: Product with Base { def foo: Int }) = a match { + case null => // O2 in the domain + } + + def t3(a: Product with Base { def bar: Int }) = a match { + case null => // nothing in the domain + } +} diff --git a/tests/patmat/virtpatmat_reach_sealed_unsealed.check b/tests/patmat/virtpatmat_reach_sealed_unsealed.check new file mode 100644 index 000000000..ef5ec1a00 --- /dev/null +++ b/tests/patmat/virtpatmat_reach_sealed_unsealed.check @@ -0,0 +1,11 @@ +./tests/patmat/virtpatmat_reach_sealed_unsealed.scala:16: warning: match may not be exhaustive. +It would fail on the following input: false + (true: Boolean) match { case true => } // not exhaustive, but reachable + ^ +./tests/patmat/virtpatmat_reach_sealed_unsealed.scala:18: warning: unreachable code + (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable + ^ +./tests/patmat/virtpatmat_reach_sealed_unsealed.scala:19: warning: unreachable code + (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable + ^ +three warnings found diff --git a/tests/patmat/virtpatmat_reach_sealed_unsealed.scala b/tests/patmat/virtpatmat_reach_sealed_unsealed.scala new file mode 100644 index 000000000..13911dbd7 --- /dev/null +++ b/tests/patmat/virtpatmat_reach_sealed_unsealed.scala @@ -0,0 +1,21 @@ +sealed abstract class X +sealed case class A(x: Int) extends X + +// test reachability on mixed sealed / non-sealed matches +object Test extends App { + val B: X = A(0) + val C: X = A(1) + + // all cases are reachable and the match is exhaustive + (C: X) match { + case B => + case C => + case A(_) => + } + + (true: Boolean) match { case true => } // not exhaustive, but reachable + (true: Boolean) match { case true => case false => } // exhaustive, reachable + (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable + (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable + (true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable +}
\ No newline at end of file diff --git a/tests/pending/hkt/compiler.error b/tests/pending/hkt/compiler.error new file mode 100644 index 000000000..b31760891 --- /dev/null +++ b/tests/pending/hkt/compiler.error @@ -0,0 +1,6 @@ +$ scalac tests/pending/hkt/*.scala +$ ./bin/dotc tests/pending/hkt/*.scala +tests/pending/hkt/hkt.scala:14: error: method empty in object Child does not take type parameters + Child.empty[Int] + ^ +one error found diff --git a/tests/pending/hkt/hkt.scala b/tests/pending/hkt/hkt.scala new file mode 100644 index 000000000..34858cd95 --- /dev/null +++ b/tests/pending/hkt/hkt.scala @@ -0,0 +1,15 @@ +import scala.language.higherKinds +// Minimal reproduction for: +// scala.collection.mutable.ArrayStack.empty[Int] + +abstract class Super[C[_]] { + def empty[T]: C[T] = ??? +} + +class Child[T] + +object Child extends Super[Child] { + def empty: Child[Nothing] = new Child() + + Child.empty[Int] +} diff --git a/tests/pending/import-rewrite/compiler.error b/tests/pending/import-rewrite/compiler.error new file mode 100644 index 000000000..0832d33bb --- /dev/null +++ b/tests/pending/import-rewrite/compiler.error @@ -0,0 +1,6 @@ +$ scalac tests/pending/import-rewrite/*.scala +$ ./bin/dotc tests/pending/import-rewrite/*.scala +tests/pending/import-rewrite/rewrite.scala:5: error: value apply is not a member of java.io.File.type + Seq("").map(File.apply) + ^ +one error found diff --git a/tests/pending/import-rewrite/file.scala b/tests/pending/import-rewrite/file.scala new file mode 100644 index 000000000..e52581e81 --- /dev/null +++ b/tests/pending/import-rewrite/file.scala @@ -0,0 +1,10 @@ +package file + +class File private (val str: String) { + def name: String = "name" +} + +object File { + def apply(str: String): File = new File(str) +} + diff --git a/tests/pending/import-rewrite/rewrite.scala b/tests/pending/import-rewrite/rewrite.scala new file mode 100644 index 000000000..0bda02c5e --- /dev/null +++ b/tests/pending/import-rewrite/rewrite.scala @@ -0,0 +1,7 @@ +package file +import java.io.{File => JFile, _}, StreamTokenizer.{TT_EOF => eof} + +object Main { + Seq("").map(File.apply) + // def name(file: File) = file.name +} diff --git a/tests/pending/naming-resolution/callsite.scala b/tests/pending/naming-resolution/callsite.scala new file mode 100644 index 000000000..b6f2000c9 --- /dev/null +++ b/tests/pending/naming-resolution/callsite.scala @@ -0,0 +1,7 @@ +package naming.resolution + +import java.nio.file._ // Imports `Files` + +object Resolution { + def gimmeFiles: Files = Files.list(Paths.get(".")) +} diff --git a/tests/pending/naming-resolution/compiler.error b/tests/pending/naming-resolution/compiler.error new file mode 100644 index 000000000..81d6b3cfa --- /dev/null +++ b/tests/pending/naming-resolution/compiler.error @@ -0,0 +1,8 @@ +$ scalac tests/pending/naming-resolution/*.scala +$ ./bin/dotc tests/pending/naming-resolution/*.scala +tests/pending/naming-resolution/callsite.scala:6: error: type mismatch: + found : java.util.stream.Stream[java.nio.file.Path] + required: java.nio.file.Files + def gimmeFiles: Files = Files.list(Paths.get(".")) + ^ +one error found diff --git a/tests/pending/naming-resolution/package.scala b/tests/pending/naming-resolution/package.scala new file mode 100644 index 000000000..f0e26ee95 --- /dev/null +++ b/tests/pending/naming-resolution/package.scala @@ -0,0 +1,5 @@ +package naming + +package object resolution { + type Files = java.util.stream.Stream[java.nio.file.Path] +} diff --git a/tests/pos/i1444.scala b/tests/pos/i1444.scala new file mode 100644 index 000000000..da858d50f --- /dev/null +++ b/tests/pos/i1444.scala @@ -0,0 +1,14 @@ +object Test { + +class Cls(implicit x:X) +class ClsImpl extends Cls //this works + +trait Tr1(implicit x:X) +class TrtImpl extends Tr1 //Compiler: Error: parameterized trait Tr1 lacks argument list + +trait Tr2()(implicit x:X) +class Tr2Impl extends Tr2() //this works + +trait X +implicit object AnX extends X +} diff --git a/tests/run/i1263.scala b/tests/run/i1263.scala index 630e5758e..e97606ef6 100644 --- a/tests/run/i1263.scala +++ b/tests/run/i1263.scala @@ -2,10 +2,8 @@ object Test { trait Foo(val s: String) val foo1 = new Foo("bar") {} - val foo2 = new Foo { override val s = "bar" } def main(args: Array[String]): Unit = { assert(foo1.s == "bar") - assert(foo2.s == "bar") } } object Test1 { @@ -22,7 +20,6 @@ object Test2 { trait Foo(protected val s: String) val foo1 = new Foo("bar") {} - val foo2 = new Foo { override val s = "bar" } } object Test3 { trait Foo(final val s: String) |