From 3d5c675982803e3a17262245a05266b2f5b64bc3 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 09:01:10 -0800 Subject: Moved scaladoc code into src/scaladoc. This leverages the preceding several commits to push scaladoc specific code into src/scaladoc. It also renders some scanner code more comprehensible. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 29 +-- .../scala/tools/nsc/ast/parser/Scanners.scala | 155 +++++++------- .../scala/tools/nsc/javac/JavaScanners.scala | 10 - .../scala/tools/nsc/symtab/SymbolLoaders.scala | 12 +- .../tools/nsc/typechecker/SuperAccessors.scala | 12 -- .../scala/tools/nsc/doc/ScaladocAnalyzer.scala | 229 +++++++++++++++++++++ .../scala/tools/nsc/doc/ScaladocGlobal.scala | 99 ++------- .../scala/tools/partest/ScaladocModelTest.scala | 8 +- 8 files changed, 331 insertions(+), 223 deletions(-) create mode 100644 src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 522c45f9fa..9218ad3330 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -650,31 +650,10 @@ self => /* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */ - /** Join the comment associated with a definition. */ - def joinComment(trees: => List[Tree]): List[Tree] = { - val doc = in.flushDoc - if ((doc ne null) && doc.raw.length > 0) { - val joined = trees map { - t => - DocDef(doc, t) setPos { - if (t.pos.isDefined) { - val pos = doc.pos.withEnd(t.pos.endOrPoint) - // always make the position transparent - pos.makeTransparent - } else { - t.pos - } - } - } - joined.find(_.pos.isOpaqueRange) foreach { - main => - val mains = List(main) - joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) } - } - joined - } - else trees - } + /** A hook for joining the comment associated with a definition. + * Overridden by scaladoc. + */ + def joinComment(trees: => List[Tree]): List[Tree] = trees /* ---------- TREE CONSTRUCTION ------------------------------------------- */ diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 78041fda08..6ad1c50075 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -83,6 +83,69 @@ trait Scanners extends ScannersCommon { abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon { private def isDigit(c: Char) = java.lang.Character isDigit c + private var openComments = 0 + protected def putCommentChar(): Unit = nextChar() + + @tailrec private def skipLineComment(): Unit = ch match { + case SU | CR | LF => + case _ => nextChar() ; skipLineComment() + } + private def maybeOpen() { + putCommentChar() + if (ch == '*') { + putCommentChar() + openComments += 1 + } + } + private def maybeClose(): Boolean = { + putCommentChar() + (ch == '/') && { + putCommentChar() + openComments -= 1 + openComments == 0 + } + } + @tailrec final def skipNestedComments(): Unit = ch match { + case '/' => maybeOpen() ; skipNestedComments() + case '*' => if (!maybeClose()) skipNestedComments() + case SU => incompleteInputError("unclosed comment") + case _ => putCommentChar() ; skipNestedComments() + } + def skipDocComment(): Unit = skipNestedComments() + def skipBlockComment(): Unit = skipNestedComments() + + private def skipToCommentEnd(isLineComment: Boolean) { + nextChar() + if (isLineComment) skipLineComment() + else { + openComments = 1 + val isDocComment = (ch == '*') && { nextChar(); true } + if (isDocComment) { + // Check for the amazing corner case of /**/ + if (ch == '/') + nextChar() + else + skipDocComment() + } + else skipBlockComment() + } + } + + /** @pre ch == '/' + * Returns true if a comment was skipped. + */ + def skipComment(): Boolean = ch match { + case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true + case _ => false + } + def flushDoc(): DocComment = null + + /** To prevent doc comments attached to expressions from leaking out of scope + * onto the next documentable entity, they are discarded upon passing a right + * brace, bracket, or parenthesis. + */ + def discardDocBuffer(): Unit = () + def isAtEnd = charOffset >= buf.length def resume(lastCode: Int) = { @@ -130,22 +193,6 @@ trait Scanners extends ScannersCommon { cbuf.clear() } - /** Should doc comments be built? */ - def buildDocs: Boolean = forScaladoc - - /** holder for the documentation comment - */ - var docComment: DocComment = null - - def flushDoc: DocComment = { - val ret = docComment - docComment = null - ret - } - - protected def foundComment(value: String, start: Int, end: Int) = () - protected def foundDocComment(value: String, start: Int, end: Int) = () - private class TokenData0 extends TokenData /** we need one token lookahead and one token history @@ -218,12 +265,15 @@ trait Scanners extends ScannersCommon { case RBRACE => while (!sepRegions.isEmpty && sepRegions.head != RBRACE) sepRegions = sepRegions.tail - if (!sepRegions.isEmpty) sepRegions = sepRegions.tail - docComment = null + if (!sepRegions.isEmpty) + sepRegions = sepRegions.tail + + discardDocBuffer() case RBRACKET | RPAREN => if (!sepRegions.isEmpty && sepRegions.head == lastToken) sepRegions = sepRegions.tail - docComment = null + + discardDocBuffer() case ARROW => if (!sepRegions.isEmpty && sepRegions.head == lastToken) sepRegions = sepRegions.tail @@ -516,62 +566,6 @@ trait Scanners extends ScannersCommon { } } - private def skipComment(): Boolean = { - - if (ch == '/' || ch == '*') { - - val comment = new StringBuilder("/") - def appendToComment() = comment.append(ch) - - if (ch == '/') { - do { - appendToComment() - nextChar() - } while ((ch != CR) && (ch != LF) && (ch != SU)) - } else { - docComment = null - var openComments = 1 - appendToComment() - nextChar() - appendToComment() - var buildingDocComment = false - if (ch == '*' && buildDocs) { - buildingDocComment = true - } - while (openComments > 0) { - do { - do { - if (ch == '/') { - nextChar(); appendToComment() - if (ch == '*') { - nextChar(); appendToComment() - openComments += 1 - } - } - if (ch != '*' && ch != SU) { - nextChar(); appendToComment() - } - } while (ch != '*' && ch != SU) - while (ch == '*') { - nextChar(); appendToComment() - } - } while (ch != '/' && ch != SU) - if (ch == '/') nextChar() - else incompleteInputError("unclosed comment") - openComments -= 1 - } - - if (buildingDocComment) - foundDocComment(comment.toString, offset, charOffset - 2) - } - - foundComment(comment.toString, offset, charOffset - 2) - true - } else { - false - } - } - /** Can token start a statement? */ def inFirstOfStat(token: Int) = token match { case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD | @@ -1281,17 +1275,6 @@ trait Scanners extends ScannersCommon { } } } - - override def foundComment(value: String, start: Int, end: Int) { - val pos = new RangePosition(unit.source, start, start, end) - unit.comment(pos, value) - } - - override def foundDocComment(value: String, start: Int, end: Int) { - val docPos = new RangePosition(unit.source, start, start, end) - docComment = new DocComment(value, docPos) - unit.comment(docPos, value) - } } class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 3813736535..f9b1e57e66 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -235,16 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { cbuf.setLength(0) } - /** buffer for the documentation comment - */ - var docBuffer: StringBuilder = null - - /** add the given character to the documentation buffer - */ - protected def putDocChar(c: Char) { - if (docBuffer ne null) docBuffer.append(c) - } - private class JavaTokenData0 extends JavaTokenData /** we need one token lookahead diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index ffccc11474..61ac07d18f 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -32,14 +32,10 @@ abstract class SymbolLoaders { protected def signalError(root: Symbol, ex: Throwable) { if (settings.debug.value) ex.printStackTrace() - // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented - // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects - // that are not in their correct place (see bug for details) - if (!settings.isScaladoc) - globalError(ex.getMessage() match { - case null => "i/o error while loading " + root.name - case msg => "error while loading " + root.name + ", " + msg - }) + globalError(ex.getMessage() match { + case null => "i/o error while loading " + root.name + case msg => "error while loading " + root.name + ", " + msg + }) } /** Enter class with given `name` into scope of `root` diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 1d28add6e0..e8925ce2d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -186,18 +186,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym)) } } - if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) { - println("========== scaladoc of "+sym+" =============================") - println(toJavaDoc(expandedDocComment(sym))) - for (member <- sym.info.members) { - println(member+":"+sym.thisType.memberInfo(member)+"\n"+ - toJavaDoc(expandedDocComment(member, sym))) - for ((useCase, comment, pos) <- useCases(member, sym)) { - println("usecase "+useCase+":"+useCase.info) - println(toJavaDoc(comment)) - } - } - } super.transform(tree) } transformClassDef diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala new file mode 100644 index 0000000000..37d95a9d95 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -0,0 +1,229 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package doc + +import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch } +import scala.reflect.internal.Chars._ +import symtab._ +import reporters.Reporter +import typechecker.Analyzer +import scala.reflect.internal.util.{ BatchSourceFile, RangePosition } + +trait ScaladocAnalyzer extends Analyzer { + val global : Global // generally, a ScaladocGlobal + import global._ + + override def newTyper(context: Context): ScaladocTyper = new Typer(context) with ScaladocTyper + + trait ScaladocTyper extends Typer { + private def unit = context.unit + + override def canAdaptConstantTypeToLiteral = false + + override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { + val sym = docDef.symbol + + if ((sym ne null) && (sym ne NoSymbol)) { + val comment = docDef.comment + docComments(sym) = comment + comment.defineVariables(sym) + val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) + for (useCase <- comment.useCases) { + typer1.silent(_ => typer1 defineUseCases useCase) match { + case SilentTypeError(err) => + unit.warning(useCase.pos, err.errMsg) + case _ => + } + for (useCaseSym <- useCase.defined) { + if (sym.name != useCaseSym.name) + unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) + } + } + } + + super.typedDocDef(docDef, mode, pt) + } + + def defineUseCases(useCase: UseCase): List[Symbol] = { + def stringParser(str: String): syntaxAnalyzer.Parser = { + val file = new BatchSourceFile(context.unit.source.file, str) { + override def positionInUltimateSource(pos: Position) = { + pos.withSource(context.unit.source, useCase.pos.start) + } + } + newUnitParser(new CompilationUnit(file)) + } + + val trees = stringParser(useCase.body+";").nonLocalDefOrDcl + val enclClass = context.enclClass.owner + + def defineAlias(name: Name) = ( + if (context.scope.lookup(name) == NoSymbol) { + lookupVariable(name.toString.substring(1), enclClass) foreach { repl => + silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => + val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) + val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) + val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) + alias setInfo newInfo + context.scope.enter(alias) + } + } + } + ) + + for (tree <- trees; t <- tree) + t match { + case Ident(name) if name startsWith '$' => defineAlias(name) + case _ => + } + + useCase.aliases = context.scope.toList + namer.enterSyms(trees) + typedStats(trees, NoSymbol) + useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) + + if (settings.debug.value) + useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) + + useCase.defined + } + } +} + +abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer { + import global._ + + class ScaladocJavaUnitParser(unit: CompilationUnit) extends { + override val in = new ScaladocJavaUnitScanner(unit) + } with JavaUnitParser(unit) { } + + class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) { + /** buffer for the documentation comment + */ + var docBuffer: StringBuilder = null + + /** add the given character to the documentation buffer + */ + protected def putDocChar(c: Char) { + if (docBuffer ne null) docBuffer.append(c) + } + + override protected def skipComment(): Boolean = { + if (in.ch == '/') { + do { + in.next + } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU)) + true + } else if (in.ch == '*') { + docBuffer = null + in.next + val scalaDoc = ("/**", "*/") + if (in.ch == '*') + docBuffer = new StringBuilder(scalaDoc._1) + do { + do { + if (in.ch != '*' && in.ch != SU) { + in.next; putDocChar(in.ch) + } + } while (in.ch != '*' && in.ch != SU) + while (in.ch == '*') { + in.next; putDocChar(in.ch) + } + } while (in.ch != '/' && in.ch != SU) + if (in.ch == '/') in.next + else incompleteInputError("unclosed comment") + true + } else { + false + } + } + } + + class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) { + + private var docBuffer: StringBuilder = null // buffer for comments + private var docPos: Position = NoPosition // last doc comment position + private var inDocComment = false + + override def discardDocBuffer() = { + val doc = flushDoc + if (doc ne null) + unit.warning(docPos, "discarding unmoored doc comment") + } + + override def flushDoc(): DocComment = { + if (docBuffer eq null) null + else try DocComment(docBuffer.toString, docPos) finally docBuffer = null + } + + override protected def putCommentChar() { + if (inDocComment) + docBuffer append ch + + nextChar() + } + override def skipDocComment(): Unit = { + inDocComment = true + docBuffer = new StringBuilder("/**") + super.skipDocComment() + } + override def skipBlockComment(): Unit = { + inDocComment = false + docBuffer = new StringBuilder("/*") + super.skipBlockComment() + } + override def skipComment(): Boolean = { + super.skipComment() && { + if (docBuffer ne null) { + if (inDocComment) + foundDocComment(docBuffer.toString, offset, charOffset - 2) + else + try foundComment(docBuffer.toString, offset, charOffset - 2) finally docBuffer = null + } + true + } + } + def foundComment(value: String, start: Int, end: Int) { + val pos = new RangePosition(unit.source, start, start, end) + unit.comment(pos, value) + } + def foundDocComment(value: String, start: Int, end: Int) { + docPos = new RangePosition(unit.source, start, start, end) + unit.comment(docPos, value) + } + } + class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) { + override def newScanner() = new ScaladocUnitScanner(unit, patches) + override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches) + + override def joinComment(trees: => List[Tree]): List[Tree] = { + val doc = in.flushDoc + if ((doc ne null) && doc.raw.length > 0) { + log(s"joinComment(doc=$doc)") + val joined = trees map { + t => + DocDef(doc, t) setPos { + if (t.pos.isDefined) { + val pos = doc.pos.withEnd(t.pos.endOrPoint) + // always make the position transparent + pos.makeTransparent + } else { + t.pos + } + } + } + joined.find(_.pos.isOpaqueRange) foreach { + main => + val mains = List(main) + joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) } + } + joined + } + else trees + } + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala index 021e59a879..20f24dc753 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -6,93 +6,36 @@ package scala.tools.nsc package doc -import scala.util.control.ControlThrowable +import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch } +import scala.reflect.internal.Chars._ +import symtab._ import reporters.Reporter import typechecker.Analyzer -import scala.reflect.internal.util.BatchSourceFile +import scala.reflect.internal.util.{ BatchSourceFile, RangePosition } -trait ScaladocAnalyzer extends Analyzer { - val global : Global // generally, a ScaladocGlobal - import global._ +trait ScaladocGlobalTrait extends Global { + outer => - override def newTyper(context: Context): ScaladocTyper = new ScaladocTyper(context) - - class ScaladocTyper(context0: Context) extends Typer(context0) { - private def unit = context.unit - - override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { - val sym = docDef.symbol - - if ((sym ne null) && (sym ne NoSymbol)) { - val comment = docDef.comment - fillDocComment(sym, comment) - val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) - for (useCase <- comment.useCases) { - typer1.silent(_ => typer1 defineUseCases useCase) match { - case SilentTypeError(err) => - unit.warning(useCase.pos, err.errMsg) - case _ => - } - for (useCaseSym <- useCase.defined) { - if (sym.name != useCaseSym.name) - unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) - } - } - } - - super.typedDocDef(docDef, mode, pt) - } - - def defineUseCases(useCase: UseCase): List[Symbol] = { - def stringParser(str: String): syntaxAnalyzer.Parser = { - val file = new BatchSourceFile(context.unit.source.file, str) { - override def positionInUltimateSource(pos: Position) = { - pos.withSource(context.unit.source, useCase.pos.start) - } - } - val unit = new CompilationUnit(file) - new syntaxAnalyzer.UnitParser(unit) - } - - val trees = stringParser(useCase.body+";").nonLocalDefOrDcl - val enclClass = context.enclClass.owner - - def defineAlias(name: Name) = ( - if (context.scope.lookup(name) == NoSymbol) { - lookupVariable(name.toString.substring(1), enclClass) foreach { repl => - silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => - val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) - val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) - val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) - alias setInfo newInfo - context.scope.enter(alias) - } - } - } - ) - - for (tree <- trees; t <- tree) - t match { - case Ident(name) if name startsWith '$' => defineAlias(name) - case _ => - } - - useCase.aliases = context.scope.toList - namer.enterSyms(trees) - typedStats(trees, NoSymbol) - useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - - if (settings.debug.value) - useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) + override val useOffsetPositions = false + override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil) - useCase.defined + override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) { + val runsAfter = List[String]() + val runsRightAfter = None + } + override lazy val loaders = new SymbolLoaders { + val global: outer.type = outer + + // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented + // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects + // that are not in their correct place (see bug for details) + override protected def signalError(root: Symbol, ex: Throwable) { + log(s"Suppressing error involving $root: $ex") } } } -class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends { - override val useOffsetPositions = false -} with Global(settings, reporter) { +class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait { override protected def computeInternalPhases() { phasesSet += syntaxAnalyzer phasesSet += analyzer.namerFactory diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala index 3db9f18484..f0a9caac15 100644 --- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala +++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala @@ -5,9 +5,10 @@ package scala.tools.partest +import scala.tools.nsc import scala.tools.nsc._ import scala.tools.nsc.util.CommandLineParser -import scala.tools.nsc.doc.{Settings, DocFactory, Universe} +import scala.tools.nsc.doc.{ DocFactory, Universe } import scala.tools.nsc.doc.model._ import scala.tools.nsc.doc.model.diagram._ import scala.tools.nsc.doc.base.comment._ @@ -78,11 +79,11 @@ abstract class ScaladocModelTest extends DirectTest { System.setErr(prevErr) } - private[this] var settings: Settings = null + private[this] var settings: doc.Settings = null // create a new scaladoc compiler private[this] def newDocFactory: DocFactory = { - settings = new Settings(_ => ()) + settings = new doc.Settings(_ => ()) settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! val args = extraSettings + " " + scaladocSettings new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think @@ -96,7 +97,6 @@ abstract class ScaladocModelTest extends DirectTest { // so we don't get the newSettings warning override def isDebug = false - // finally, enable easy navigation inside the entities object access { -- cgit v1.2.3