summaryrefslogtreecommitdiff
path: root/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
blob: 530a20fc3ac21ba63298b38c9535fd783f3ec7c1 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
/* NSC -- new Scala compiler
 * Copyright 2007-2013 LAMP/EPFL
 * @author  Paul Phillips
 */

package scala.tools.nsc
package doc

import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
import typechecker.Analyzer
import scala.reflect.internal.Chars._
import scala.reflect.internal.util.{ BatchSourceFile, Position }
import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo, LinkToExternal }

trait ScaladocAnalyzer extends Analyzer {
  val global : Global // generally, a ScaladocGlobal
  import global._

  override def newTyper(context: Context): Typer = if (context.unit.isJava) {
    new Typer(context) with ScaladocJavaTyper //super hack
  } else {
    new Typer(context) with ScaladocTyper
  }

  //super hack
  trait ScaladocJavaTyper extends Typer {
    private def unit = context.unit

    val docCollector = new Traverser {
      override def traverse(tree: Tree): Unit = tree match {
        case dd: DocDef if dd.symbol != null && dd.symbol != NoSymbol =>
          println("got a docdef: " + dd.comment)
          docComments(dd.symbol) = dd.comment
          dd.comment.defineVariables(dd.symbol)
        case dd: DocDef =>
          println("Argh!!! No sym in doc comment")
        case t =>
          println("Unknown tree: "  + t.getClass.getSimpleName)
          super.traverse(t)
      }
    }


    override def typed(tree: Tree): Tree = {
      println("The java typer says hi! From source file: " + unit.source.file.name)
      docCollector.traverse(tree)
      tree
    }

    override protected def macroImplementationNotFoundMessage(name: Name): String = (
        super.macroImplementationNotFoundMessage(name)
      + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
    )

    override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
      val sym = docDef.symbol

      if ((sym ne null) && (sym ne NoSymbol)) {
        val comment = docDef.comment
        docComments(sym) = comment
        comment.defineVariables(sym)
        val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
        for (useCase <- comment.useCases) {
          typer1.silent(_.asInstanceOf[ScaladocTyper].defineUseCases(useCase)) match {
            case SilentTypeError(err) =>
              reporter.warning(useCase.pos, err.errMsg)
            case _ =>
          }
          for (useCaseSym <- useCase.defined) {
            if (sym.name != useCaseSym.name)
              reporter.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
          }
        }
      }

      super.typedDocDef(docDef, mode, pt)
    }
  }

  trait ScaladocTyper extends Typer {
    private def unit = context.unit

    override def canAdaptConstantTypeToLiteral = false

    override protected def macroImplementationNotFoundMessage(name: Name): String = (
        super.macroImplementationNotFoundMessage(name)
      + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
    )

    override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
      val sym = docDef.symbol

      if ((sym ne null) && (sym ne NoSymbol)) {
        val comment = docDef.comment
        docComments(sym) = comment
        comment.defineVariables(sym)
        val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
        for (useCase <- comment.useCases) {
          typer1.silent(_.asInstanceOf[ScaladocTyper].defineUseCases(useCase)) match {
            case SilentTypeError(err) =>
              reporter.warning(useCase.pos, err.errMsg)
            case _ =>
          }
          for (useCaseSym <- useCase.defined) {
            if (sym.name != useCaseSym.name)
              reporter.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
          }
        }
      }

      super.typedDocDef(docDef, mode, pt)
    }

    def defineUseCases(useCase: UseCase): List[Symbol] = {
      def stringParser(str: String): syntaxAnalyzer.Parser = {
        val file = new BatchSourceFile(context.unit.source.file, str) {
          override def positionInUltimateSource(pos: Position) = {
            pos withSource context.unit.source withShift useCase.pos.start
          }
        }
        newUnitParser(new CompilationUnit(file))
      }

      val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
      val enclClass = context.enclClass.owner

      def defineAlias(name: Name) = (
        if (context.scope.lookup(name) == NoSymbol) {
          lookupVariable(name.toString.substring(1), enclClass) foreach { repl =>
            silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt =>
              val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
              val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
              val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
              alias setInfo newInfo
              context.scope.enter(alias)
            }
          }
        }
      )

      for (tree <- trees; t <- tree)
        t match {
          case Ident(name) if name startsWith '$' => defineAlias(name)
          case _ =>
        }

      useCase.aliases = context.scope.toList
      namer.enterSyms(trees)
      typedStats(trees, NoSymbol)
      useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)

      if (settings.debug)
        useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))

      useCase.defined
    }
  }
}

abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
  import global._

  class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
    override val in = new ScaladocJavaUnitScanner(unit)
  } with JavaUnitParser(unit) {

    override def joinComment(trees: => List[Tree]): List[Tree] = {
      val doc = in.flushDoc()

      if ((doc ne null) && doc.raw.length > 0) {
        log(s"joinComment(doc=$doc)")
        val joined = trees map { t =>
          DocDef(doc, t) setPos {
            if (t.pos.isDefined) {
              val pos = doc.pos.withEnd(t.pos.end)
              pos.makeTransparent
            } else {
              t.pos
            }
          }
        }
        joined.find(_.pos.isOpaqueRange) foreach { main =>
          val mains = List(main)
          joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
        }
        joined
      } else {
        trees
      }
    }


  }

  class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {

    private val docBuffer: StringBuilder = new StringBuilder
    private var inDocComment = false //is the scanner currently in a doc comment
    private var docStart: Int = 0
    private var lastDoc: DocComment = null
    def flushDoc(): DocComment = try lastDoc finally lastDoc = null


    /** add the current character to the documentation buffer */
    override protected def putCommentChar(): Unit = {
      if (inDocComment) docBuffer append in.ch
      in.next
    }

    override protected def skipBlockComment(isDoc: Boolean): Unit = {
      if (!inDocComment && isDoc) {
        docBuffer append "/*" // at this point, in.ch == * and will be appended next
        docStart = currentPos.start
        inDocComment = true
      }
      super.skipBlockComment(isDoc)
    }

    override protected def skipComment(): Boolean = {
      val skipped = super.skipComment()
      if (skipped && inDocComment) {
        val raw = docBuffer.toString
        val position = Position.range(unit.source, docStart, docStart, in.cpos - 1)
        println(Position.formatMessage(position, "Start of java doc comment: ", true))
        println(Position.formatMessage(position.focusEnd, "End of java doc comment: ", true))
        lastDoc = DocComment(raw, position)
        signalParsedDocComment(raw, position)
        docBuffer.setLength(0) //clear buffer
        inDocComment = false
        true
      } else {
        skipped
      }
    }

  }

  class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {

    private var docBuffer: StringBuilder = null        // buffer for comments (non-null while scanning)
    private var inDocComment             = false       // if buffer contains double-star doc comment
    private var lastDoc: DocComment      = null        // last comment if it was double-star doc

    private object unmooredParser extends {                // minimalist comment parser
      val global: Global = ScaladocSyntaxAnalyzer.this.global
    }
    with CommentFactoryBase with MemberLookupBase {
      import global.{ settings, Symbol }
      def parseComment(comment: DocComment) = {
        val nowarnings = settings.nowarn.value
        settings.nowarn.value = true
        try parseAtSymbol(comment.raw, comment.raw, comment.pos)
        finally settings.nowarn.value = nowarnings
      }

      override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
      override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption.orNull
      override def toString(link: LinkTo): String = "No link"
      override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
      override def warnNoLink: Boolean = false
    }

    /**
     * Warn when discarding buffered doc at the end of a block.
     * This mechanism doesn't warn about arbitrary unmoored doc.
     * Also warn under -Xlint, but otherwise only warn in the presence of suspicious
     * tags that appear to be documenting API.  Warnings are suppressed while parsing
     * the local comment so that comments of the form `[at] Martin` will not trigger a warning.
     * By omission, tags for `see`, `todo`, `note` and `example` are ignored.
     */
    override def discardDocBuffer() = {
      import scala.tools.nsc.doc.base.comment.Comment
      val doc = flushDoc
      // tags that make a local double-star comment look unclean, as though it were API
      def unclean(comment: Comment): Boolean = {
        import comment._
        authors.nonEmpty || result.nonEmpty || throws.nonEmpty || valueParams.nonEmpty ||
        typeParams.nonEmpty || version.nonEmpty || since.nonEmpty
      }
      def isDirty = unclean(unmooredParser parseComment doc)
      if ((doc ne null) && (settings.warnDocDetached || isDirty))
        reporter.warning(doc.pos, "discarding unmoored doc comment")
    }

    override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)

    override protected def putCommentChar() {
      if (inDocComment)
        docBuffer append ch

      nextChar()
    }
    override def skipDocComment(): Unit = {
      inDocComment = true
      docBuffer = new StringBuilder("/**")
      super.skipDocComment()
    }
    override def skipBlockComment(): Unit = {
      inDocComment = false // ??? this means docBuffer won't receive contents of this comment???
      docBuffer = new StringBuilder("/*")
      super.skipBlockComment()
    }
    override def skipComment(): Boolean = {
      // emit a block comment; if it's double-star, make Doc at this pos
      def foundStarComment(start: Int, end: Int) = try {
        val str = docBuffer.toString
        val pos = Position.range(unit.source, start, start, end)
        if (inDocComment) {
          signalParsedDocComment(str, pos)
          lastDoc = DocComment(str, pos)
        }
        true
      } finally {
        docBuffer    = null
        inDocComment = false
      }
      super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
    }
  }
  class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {

    override def newScanner() = new ScaladocUnitScanner(unit, patches)
    override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches)

    override def joinComment(trees: => List[Tree]): List[Tree] = {
      val doc = in.flushDoc
      if ((doc ne null) && doc.raw.length > 0) {
        log(s"joinComment(doc=$doc)")
        val joined = trees map {
          t =>
            DocDef(doc, t) setPos {
              if (t.pos.isDefined) {
                val pos = doc.pos.withEnd(t.pos.end)
                // always make the position transparent
                pos.makeTransparent
              } else {
                t.pos
              }
            }
        }
        joined.find(_.pos.isOpaqueRange) foreach {
          main =>
            val mains = List(main)
            joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
        }
        joined
      }
      else trees
    }
  }
}