aboutsummaryrefslogtreecommitdiff
path: root/compiler/test/dotty
diff options
context:
space:
mode:
authorFelix Mulder <felix.mulder@gmail.com>2016-11-02 11:08:28 +0100
committerGuillaume Martres <smarter@ubuntu.com>2016-11-22 01:35:07 +0100
commit8a61ff432543a29234193cd1f7c14abd3f3d31a0 (patch)
treea8147561d307af862c295cfc8100d271063bb0dd /compiler/test/dotty
parent6a455fe6da5ff9c741d91279a2dc6fe2fb1b472f (diff)
downloaddotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.gz
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.bz2
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.zip
Move compiler and compiler tests to compiler dir
Diffstat (limited to 'compiler/test/dotty')
-rw-r--r--compiler/test/dotty/partest/DPConfig.scala40
-rw-r--r--compiler/test/dotty/partest/DPConsoleRunner.scala356
-rw-r--r--compiler/test/dotty/partest/DPDirectCompiler.scala36
-rw-r--r--compiler/test/dotty/tools/ContextEscapeDetection.java36
-rw-r--r--compiler/test/dotty/tools/ContextEscapeDetector.java108
-rw-r--r--compiler/test/dotty/tools/DottyTest.scala73
-rw-r--r--compiler/test/dotty/tools/DottyTypeStealer.scala32
-rw-r--r--compiler/test/dotty/tools/ShowClassTests.scala154
-rw-r--r--compiler/test/dotty/tools/TypeStealer.scala21
-rw-r--r--compiler/test/dotty/tools/backend/jvm/AsmConverters.scala256
-rw-r--r--compiler/test/dotty/tools/backend/jvm/AsmNode.scala61
-rw-r--r--compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala208
-rw-r--r--compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala188
-rw-r--r--compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala32
-rw-r--r--compiler/test/dotty/tools/dotc/CompilerTest.scala623
-rw-r--r--compiler/test/dotty/tools/dotc/EntryPointsTest.scala72
-rw-r--r--compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala72
-rw-r--r--compiler/test/dotty/tools/dotc/ast/TreeInfoTest.scala30
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala96
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/DocstringTest.scala34
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/DocstringTests.scala491
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala164
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/ParserTest.scala44
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala65
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala28
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/parseFile.scala13
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/parsePackage.scala83
-rw-r--r--compiler/test/dotty/tools/dotc/parsing/showTree.scala26
-rw-r--r--compiler/test/dotty/tools/dotc/repl/TestREPL.scala66
-rw-r--r--compiler/test/dotty/tools/dotc/reporting/TestMessageLaziness.scala37
-rw-r--r--compiler/test/dotty/tools/dotc/reporting/TestReporter.scala52
-rw-r--r--compiler/test/dotty/tools/dotc/transform/CreateCompanionObjectsTest.scala128
-rw-r--r--compiler/test/dotty/tools/dotc/transform/LazyValsTest.scala361
-rw-r--r--compiler/test/dotty/tools/dotc/transform/PostTyperTransformerTest.scala132
-rw-r--r--compiler/test/dotty/tools/dotc/transform/TreeTransformerTest.scala198
-rw-r--r--compiler/test/dotty/tools/showClass.scala17
36 files changed, 4433 insertions, 0 deletions
diff --git a/compiler/test/dotty/partest/DPConfig.scala b/compiler/test/dotty/partest/DPConfig.scala
new file mode 100644
index 000000000..5c493f465
--- /dev/null
+++ b/compiler/test/dotty/partest/DPConfig.scala
@@ -0,0 +1,40 @@
+package dotty.partest
+
+import scala.collection.JavaConversions._
+import scala.reflect.io.Path
+import java.io.File
+
+import scala.tools.partest.PartestDefaults
+
+
+/** Dotty Partest runs all tests in the provided testDirs located under
+ * testRoot. There can be several directories with pos resp. neg tests, as
+ * long as the prefix is pos/neg.
+ *
+ * Each testDir can also have a __defaultFlags.flags file, which provides
+ * compiler flags and is used unless there's a specific flags file (e.g. for
+ * test pos/A.scala, if there's a pos/A.flags file those flags are used,
+ * otherwise pos/__defaultFlags.flags are used if the file exists).
+ */
+object DPConfig {
+ /** Options used for _running_ the run tests.
+ * Note that this is different from the options used when _compiling_ tests,
+ * those are determined by the sbt configuration.
+ */
+ val runJVMOpts = s"-Xms64M -Xmx1024M ${PartestDefaults.javaOpts}"
+
+ val testRoot = (Path("..") / Path("tests") / Path("partest-generated")).toString
+ val genLog = Path(testRoot) / Path("gen.log")
+
+ lazy val testDirs = {
+ val root = new File(testRoot)
+ val dirs = if (!root.exists) Array.empty[String] else root.listFiles.filter(_.isDirectory).map(_.getName)
+ if (dirs.isEmpty)
+ throw new Exception("Partest did not detect any generated sources")
+ dirs
+ }
+
+ // Tests finish faster when running in parallel, but console output is
+ // out of order and sometimes the compiler crashes
+ val runTestsInParallel = true
+}
diff --git a/compiler/test/dotty/partest/DPConsoleRunner.scala b/compiler/test/dotty/partest/DPConsoleRunner.scala
new file mode 100644
index 000000000..4552d1137
--- /dev/null
+++ b/compiler/test/dotty/partest/DPConsoleRunner.scala
@@ -0,0 +1,356 @@
+/* NOTE: Adapted from ScalaJSPartest.scala in
+ * https://github.com/scala-js/scala-js/
+ * TODO make partest configurable */
+
+package dotty.partest
+
+import scala.reflect.io.AbstractFile
+import scala.tools.partest._
+import scala.tools.partest.nest._
+import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
+import ClassPath.{ join, split }
+import FileManager.{ compareFiles, compareContents, joinPaths, withTempFile }
+import scala.util.matching.Regex
+import tools.nsc.io.{ File => NSCFile }
+import java.io.{ File, PrintStream, FileOutputStream, PrintWriter, FileWriter }
+import java.net.URLClassLoader
+
+/** Runs dotty partest from the Console, discovering test sources in
+ * DPConfig.testRoot that have been generated automatically by
+ * DPPrepJUnitRunner. Use `sbt partest` to run. If additional jars are
+ * required by some run tests, add them to partestDeps in the sbt Build.scala.
+ */
+object DPConsoleRunner {
+ def main(args: Array[String]): Unit = {
+ // unfortunately sbt runTask passes args as single string
+ // extra jars for run tests are passed with -dottyJars <count> <jar1> <jar2> ...
+ val jarFinder = """-dottyJars (\d*) (.*)""".r
+ val (jarList, otherArgs) = args.toList.partition(jarFinder.findFirstIn(_).isDefined)
+ val (extraJars, moreArgs) = jarList match {
+ case Nil => sys.error("Error: DPConsoleRunner needs \"-dottyJars <jarCount> <jars>*\".")
+ case jarFinder(nr, jarString) :: Nil =>
+ val jars = jarString.split(" ").toList
+ val count = nr.toInt
+ if (jars.length < count)
+ sys.error("Error: DPConsoleRunner found wrong number of dottyJars: " + jars + ", expected: " + nr)
+ else (jars.take(count), jars.drop(count))
+ case list => sys.error("Error: DPConsoleRunner found several -dottyJars options: " + list)
+ }
+ new DPConsoleRunner((otherArgs ::: moreArgs) mkString (" "), extraJars).runPartest
+ }
+}
+
+// console runner has a suite runner which creates a test runner for each test
+class DPConsoleRunner(args: String, extraJars: List[String]) extends ConsoleRunner(args) {
+ override val suiteRunner = new DPSuiteRunner (
+ testSourcePath = optSourcePath getOrElse DPConfig.testRoot,
+ fileManager = new DottyFileManager(extraJars),
+ updateCheck = optUpdateCheck,
+ failed = optFailed,
+ consoleArgs = args)
+
+ override def run = {}
+ def runPartest = super.run
+}
+
+class DottyFileManager(extraJars: List[String]) extends FileManager(Nil) {
+ lazy val extraJarList = extraJars.map(NSCFile(_))
+ override lazy val libraryUnderTest = Path(extraJars.find(_.contains("scala-library")).getOrElse(""))
+ override lazy val reflectUnderTest = Path(extraJars.find(_.contains("scala-reflect")).getOrElse(""))
+ override lazy val compilerUnderTest = Path(extraJars.find(_.contains("dotty")).getOrElse(""))
+}
+
+class DPSuiteRunner(testSourcePath: String, // relative path, like "files", or "pending"
+ fileManager: DottyFileManager,
+ updateCheck: Boolean,
+ failed: Boolean,
+ consoleArgs: String,
+ javaCmdPath: String = PartestDefaults.javaCmd,
+ javacCmdPath: String = PartestDefaults.javacCmd,
+ scalacExtraArgs: Seq[String] = Seq.empty,
+ javaOpts: String = DPConfig.runJVMOpts)
+extends SuiteRunner(testSourcePath, fileManager, updateCheck, failed, javaCmdPath, javacCmdPath, scalacExtraArgs, javaOpts) {
+
+ if (!DPConfig.runTestsInParallel)
+ sys.props("partest.threads") = "1"
+
+ sys.props("partest.root") = "."
+
+ // override to provide Dotty banner
+ override def banner: String = {
+ s"""|Welcome to Partest for Dotty! Partest version: ${Properties.versionNumberString}
+ |Compiler under test: dotty.tools.dotc.Bench or dotty.tools.dotc.Main
+ |Generated test sources: ${PathSettings.srcDir}${File.separator}
+ |Test directories: ${DPConfig.testDirs.toList.mkString(", ")}
+ |Debugging: failed tests have compiler output in test-kind.clog, run output in test-kind.log, class files in test-kind.obj
+ |Parallel: ${DPConfig.runTestsInParallel}
+ |Options: (use partest --help for usage information) ${consoleArgs}
+ """.stripMargin
+ }
+
+ // override for DPTestRunner and redirecting compilation output to test.clog
+ override def runTest(testFile: File): TestState = {
+ val runner = new DPTestRunner(testFile, this)
+
+ val state =
+ try {
+ runner.run match {
+ // Append compiler output to transcript if compilation failed,
+ // printed with --verbose option
+ case TestState.Fail(f, r@"compilation failed", transcript) =>
+ TestState.Fail(f, r, transcript ++ runner.cLogFile.fileLines.dropWhile(_ == ""))
+ case res => res
+ }
+ } catch {
+ case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
+ }
+ reportTest(state)
+ runner.cleanup()
+
+ onFinishTest(testFile, state)
+ }
+
+ // override NestUI.reportTest because --show-diff doesn't work. The diff used
+ // seems to add each line to transcript separately, whereas NestUI assumes
+ // that the diff string was added as one entry in the transcript
+ def reportTest(state: TestState) = {
+ import NestUI._
+ import NestUI.color._
+
+ if (isTerse && state.isOk) {
+ NestUI.reportTest(state)
+ } else {
+ echo(statusLine(state))
+ if (!state.isOk && isDiffy) {
+ val differ = bold(red("% ")) + "diff "
+ state.transcript.dropWhile(s => !(s startsWith differ)) foreach (echo(_))
+ // state.transcript find (_ startsWith differ) foreach (echo(_)) // original
+ }
+ }
+ }
+}
+
+class DPTestRunner(testFile: File, suiteRunner: DPSuiteRunner) extends nest.Runner(testFile, suiteRunner) {
+ val cLogFile = SFile(logFile).changeExtension("clog")
+
+ // override to provide DottyCompiler
+ override def newCompiler = new dotty.partest.DPDirectCompiler(this)
+
+ // Adapted from nest.Runner#javac because:
+ // - Our classpath handling is different and we need to pass extraClassPath
+ // to java to get the scala-library which is required for some java tests
+ // - The compiler output should be redirected to cLogFile, like the output of
+ // dotty itself
+ override def javac(files: List[File]): TestState = {
+ import fileManager._
+ import suiteRunner._
+ import FileManager.joinPaths
+ // compile using command-line javac compiler
+ val args = Seq(
+ javacCmdPath,
+ "-d",
+ outDir.getAbsolutePath,
+ "-classpath",
+ joinPaths(outDir :: extraClasspath ++ testClassPath)
+ ) ++ files.map(_.getAbsolutePath)
+
+ pushTranscript(args mkString " ")
+
+ val captured = StreamCapture(runCommand(args, cLogFile))
+ if (captured.result) genPass() else {
+ cLogFile appendAll captured.stderr
+ cLogFile appendAll captured.stdout
+ genFail("java compilation failed")
+ }
+ }
+
+ // Overriden in order to recursively get all sources that should be handed to
+ // the compiler. Otherwise only sources in the top dir is compiled - works
+ // because the compiler is on the classpath.
+ override def sources(file: File): List[File] =
+ if (file.isDirectory)
+ file.listFiles.toList.flatMap { f =>
+ if (f.isDirectory) sources(f)
+ else if (f.isJavaOrScala) List(f)
+ else Nil
+ }
+ else List(file)
+
+ // Enable me to "fix" the depth issue - remove once completed
+ //override def compilationRounds(file: File): List[CompileRound] = {
+ // val srcs = sources(file) match {
+ // case Nil =>
+ // System.err.println {
+ // s"""|================================================================================
+ // |Warning! You attempted to compile sources from:
+ // | $file
+ // |but partest was unable to find any sources - uncomment DPConsoleRunner#sources
+ // |================================================================================""".stripMargin
+ // }
+ // List(new File("./examples/hello.scala")) // "just compile some crap" - Guillaume
+ // case xs =>
+ // xs
+ // }
+ // (groupedFiles(srcs) map mixedCompileGroup).flatten
+ //}
+
+ // FIXME: This is copy-pasted from nest.Runner where it is private
+ // Remove this once https://github.com/scala/scala-partest/pull/61 is merged
+ /** Runs command redirecting standard out and
+ * error out to output file.
+ */
+ def runCommand(args: Seq[String], outFile: File): Boolean = {
+ import scala.sys.process.{ Process, ProcessLogger }
+ //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
+ val pl = ProcessLogger(outFile)
+ val nonzero = 17 // rounding down from 17.3
+ def run: Int = {
+ val p = Process(args) run pl
+ try p.exitValue
+ catch {
+ case e: InterruptedException =>
+ NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
+ p.destroy
+ nonzero
+ case t: Throwable =>
+ NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
+ p.destroy
+ throw t
+ }
+ finally pl.close()
+ }
+ (pl buffer run) == 0
+ }
+
+ // override to provide default dotty flags from file in directory
+ override def flagsForCompilation(sources: List[File]): List[String] = {
+ val specificFlags = super.flagsForCompilation(sources)
+ if (specificFlags.isEmpty) defaultFlags
+ else specificFlags
+ }
+
+ val defaultFlags = {
+ val defaultFile = parentFile.listFiles.toList.find(_.getName == "__defaultFlags.flags")
+ defaultFile.map({ file =>
+ SFile(file).safeSlurp.map({ content => words(content).filter(_.nonEmpty) }).getOrElse(Nil)
+ }).getOrElse(Nil)
+ }
+
+ // override to add the check for nr of compilation errors if there's a
+ // target.nerr file
+ override def runNegTest() = runInContext {
+ import scala.reflect.internal.FatalError
+
+ sealed abstract class NegTestState
+ // Don't get confused, the neg test passes when compilation fails for at
+ // least one round (optionally checking the number of compiler errors and
+ // compiler console output)
+ case class CompFailed() extends NegTestState
+ // the neg test fails when all rounds return either of these:
+ case class CompFailedButWrongNErr(expected: String, found: String) extends NegTestState
+ case class CompFailedButWrongDiff() extends NegTestState
+ case class CompSucceeded() extends NegTestState
+
+ def nerrIsOk(reason: String) = {
+ val nerrFinder = """compilation failed with (\d+) errors""".r
+ reason match {
+ case nerrFinder(found) =>
+ SFile(FileOps(testFile) changeExtension "nerr").safeSlurp match {
+ case Some(exp) if (exp != found) => CompFailedButWrongNErr(exp, found)
+ case _ => CompFailed
+ }
+ case _ => CompFailed
+ }
+ }
+
+ // we keep the partest semantics where only one round needs to fail
+ // compilation, not all
+ val compFailingRounds =
+ compilationRounds(testFile)
+ .map { round =>
+ val ok = round.isOk
+ setLastState(if (ok) genPass else genFail("compilation failed"))
+ (round.result, ok)
+ }
+ .filter { case (_, ok) => !ok }
+
+ val failureStates = compFailingRounds.map({ case (result, _) => result match {
+ // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
+ case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => CompSucceeded
+ case Fail(_, reason, _) => if (diffIsOk) nerrIsOk(reason) else CompFailedButWrongDiff
+ case _ => if (diffIsOk) CompFailed else CompFailedButWrongDiff
+ }})
+
+ if (failureStates.exists({ case CompFailed => true; case _ => false })) {
+ true
+ } else {
+ val existsNerr = failureStates.exists({
+ case CompFailedButWrongNErr(exp, found) =>
+ nextTestActionFailing(s"wrong number of compilation errors, expected: $exp, found: $found")
+ true
+ case _ =>
+ false
+ })
+
+ if (existsNerr) false
+ else {
+ val existsDiff = failureStates.exists({
+ case CompFailedButWrongDiff() =>
+ nextTestActionFailing(s"output differs")
+ true
+ case _ =>
+ false
+ })
+ if (existsDiff) false
+ else nextTestActionFailing("expected compilation failure")
+ }
+ }
+ }
+
+ // override to change check file updating to original file, not generated
+ override def diffIsOk: Boolean = {
+ // always normalize the log first
+ normalizeLog()
+ val diff = currentDiff
+ // if diff is not empty, is update needed?
+ val updating: Option[Boolean] = (
+ if (diff == "") None
+ else Some(suiteRunner.updateCheck)
+ )
+ pushTranscript(s"diff $logFile $checkFile")
+ nextTestAction(updating) {
+ case Some(true) =>
+ val origCheck = SFile(checkFile.changeExtension("checksrc").fileLines(1))
+ NestUI.echo("Updating original checkfile " + origCheck)
+ origCheck writeAll file2String(logFile)
+ genUpdated()
+ case Some(false) =>
+ // Get a word-highlighted diff from git if we can find it
+ val bestDiff = if (updating.isEmpty) "" else {
+ if (checkFile.canRead)
+ gitDiff(logFile, checkFile) getOrElse {
+ s"diff $logFile $checkFile\n$diff"
+ }
+ else diff
+ }
+ pushTranscript(bestDiff)
+ genFail("output differs")
+ case None => genPass() // redundant default case
+ } getOrElse true
+ }
+
+ // override to add dotty and scala jars to classpath
+ override def extraClasspath =
+ suiteRunner.fileManager.asInstanceOf[DottyFileManager].extraJarList ::: super.extraClasspath
+
+ // override to keep class files if failed and delete clog if ok
+ override def cleanup = if (lastState.isOk) try {
+ logFile.delete
+ cLogFile.delete
+ Directory(outDir).deleteRecursively
+ } catch {
+ case t: Throwable =>
+ println("whhhhhhhhhhhhhhhhhhhhhhhhhhhaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaat")
+ throw t
+ }
+}
diff --git a/compiler/test/dotty/partest/DPDirectCompiler.scala b/compiler/test/dotty/partest/DPDirectCompiler.scala
new file mode 100644
index 000000000..410dac338
--- /dev/null
+++ b/compiler/test/dotty/partest/DPDirectCompiler.scala
@@ -0,0 +1,36 @@
+package dotty.partest
+
+import dotty.tools.dotc.reporting.ConsoleReporter
+import scala.tools.partest.{ TestState, nest }
+import java.io.{ File, PrintWriter, FileWriter }
+
+
+/* NOTE: Adapted from partest.DirectCompiler */
+class DPDirectCompiler(runner: DPTestRunner) extends nest.DirectCompiler(runner) {
+
+ override def compile(opts0: List[String], sources: List[File]): TestState = {
+ val clogFWriter = new FileWriter(runner.cLogFile.jfile, true)
+ val clogWriter = new PrintWriter(clogFWriter, true)
+ clogWriter.println("\ncompiling " + sources.mkString(" ") + "\noptions: " + opts0.mkString(" "))
+
+ try {
+ val processor =
+ if (opts0.exists(_.startsWith("#"))) dotty.tools.dotc.Bench else dotty.tools.dotc.Main
+ val clogger = new ConsoleReporter(writer = clogWriter)
+ val reporter = processor.process((sources.map(_.toString) ::: opts0).toArray, clogger)
+ if (!reporter.hasErrors) runner.genPass()
+ else {
+ clogWriter.println(reporter.summary)
+ runner.genFail(s"compilation failed with ${reporter.errorCount} errors")
+ }
+ } catch {
+ case t: Throwable =>
+ t.printStackTrace
+ t.printStackTrace(clogWriter)
+ runner.genCrash(t)
+ } finally {
+ clogFWriter.close
+ clogWriter.close
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/ContextEscapeDetection.java b/compiler/test/dotty/tools/ContextEscapeDetection.java
new file mode 100644
index 000000000..6a47de5c6
--- /dev/null
+++ b/compiler/test/dotty/tools/ContextEscapeDetection.java
@@ -0,0 +1,36 @@
+package dotty.tools;
+
+import dotty.tools.dotc.core.Contexts;
+import org.junit.*;
+
+import java.lang.ref.WeakReference;
+import java.util.LinkedList;
+import java.util.List;
+
+public abstract class ContextEscapeDetection {
+ public static class TestContext{
+ public TestContext(WeakReference<Contexts.Context> context, String testName) {
+ this.context = context;
+ this.testName = testName;
+ }
+
+ public final WeakReference<Contexts.Context> context;
+ public final String testName;
+
+ }
+ public static final List<TestContext> contexts = new LinkedList<TestContext>();
+
+ public abstract Contexts.Context getCtx();
+
+ public abstract void clearCtx();
+
+ @Before
+ public synchronized void stealContext() {
+ contexts.add(new TestContext(new WeakReference<Contexts.Context>(this.getCtx()), this.getClass().getName()));
+ }
+
+ @After
+ public synchronized void clearContext() {
+ this.clearCtx();
+ }
+}
diff --git a/compiler/test/dotty/tools/ContextEscapeDetector.java b/compiler/test/dotty/tools/ContextEscapeDetector.java
new file mode 100644
index 000000000..e19fc5a64
--- /dev/null
+++ b/compiler/test/dotty/tools/ContextEscapeDetector.java
@@ -0,0 +1,108 @@
+package dotty.tools;
+
+import org.junit.runner.Result;
+import org.junit.runner.notification.RunListener;
+import org.junit.Assert;
+import java.lang.ref.WeakReference;
+
+public class ContextEscapeDetector extends RunListener {
+
+ //context can be captured by objects, eg NoDenotation
+ public static final int CONTEXTS_ALLOWED = 1;
+
+ @Override
+ public void testRunFinished(Result result) throws Exception {
+ if (contextsAlive() > CONTEXTS_ALLOWED) {
+ forceGCHeuristic0();
+ if (contextsAlive() > CONTEXTS_ALLOWED) {
+ forceGCHeuristic1();
+ if (contextsAlive() > CONTEXTS_ALLOWED) {
+ forceGCHeuristic2();
+ forceGCHeuristic1();
+ int contextAlive = contextsAlive();
+ if (contextAlive > CONTEXTS_ALLOWED) {
+ StringBuilder names = new StringBuilder();
+ for (ContextEscapeDetection.TestContext ref : ContextEscapeDetection.contexts) {
+ if (ref.context.get() != null) names.append(ref.testName).append(' ');
+ }
+ Assert.fail("Multiple contexts survived test suite: " + names.toString());
+ }
+ }
+ }
+ }
+ super.testRunFinished(result);
+ }
+
+ private static synchronized int contextsAlive() {
+ int count = 0;
+ for (ContextEscapeDetection.TestContext ref : ContextEscapeDetection.contexts) {
+ if (ref.context.get() != null) count++;
+ }
+ return count;
+ }
+
+ @SuppressWarnings("unused")
+ private static volatile Object o = null;
+
+ private static synchronized void forceGCHeuristic0() {
+ System.gc();
+ Runtime.getRuntime().gc();
+ System.gc();
+ Runtime.getRuntime().gc();
+ System.gc();
+ Runtime.getRuntime().gc();
+ System.gc();
+ Runtime.getRuntime().gc();
+ System.gc();
+ }
+
+ private static synchronized void forceGCHeuristic1() {
+ Object obj = new Object();
+ WeakReference<Object> ref = new WeakReference<>(obj);
+ obj = null;
+ while (ref.get() != null) {
+ System.gc();
+ }
+ }
+
+ private static synchronized void forceGCHeuristic2() {
+ try {
+ Object[] arr = new Object[1024]; // upto 8 GB
+ WeakReference<Object> ref = new WeakReference<>(arr);
+ o = arr; // make sure array isn't optimized away
+
+ Runtime runtime = Runtime.getRuntime();
+ // allocate memory until no more that 64MB is left
+ for (int i = 0; i < 1024 &&
+ runtime.totalMemory() != runtime.maxMemory() ||
+ runtime.freeMemory() < 1024 * 1024 * 64; i++) {
+ int[] data = new int[1024 * 1024]; // 8MB
+ for (int j = 0; j < 1024 * 1024; j++) {
+ data[j] = j; // force actual pages allocation
+ }
+ arr[i] = data;
+ }
+ o = null;
+ arr = new Object[128];
+ o = arr;
+ // allocate 1 more GB
+ for (int i = 0; i < 128; i++) {
+ int[] data = new int[1024 * 1024]; // 8MB
+ for (int j = 0; j < 1024 * 1024; j++) {
+ data[j] = j; // force actual pages allocation
+ }
+ arr[i] = data;
+ }
+ o = null;
+ arr = null;
+
+ forceGCHeuristic0();
+ while (ref.get() != null) {
+ System.gc();
+ }
+ } catch (OutOfMemoryError e) {
+ o = null;
+ // just swallow
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/DottyTest.scala b/compiler/test/dotty/tools/DottyTest.scala
new file mode 100644
index 000000000..77dc97bec
--- /dev/null
+++ b/compiler/test/dotty/tools/DottyTest.scala
@@ -0,0 +1,73 @@
+package dotty
+package tools
+
+import dotc.core._
+import dotc.core.Contexts._
+import dotc.core.Symbols._
+import dotc.core.Flags._
+import Types._, Symbols._, Decorators._
+import dotc.printing.Texts._
+import dotc.reporting.ConsoleReporter
+import dotc.core.Decorators._
+import dotc.ast.tpd
+import dotc.Compiler
+
+import dotc.core.Phases.Phase
+
+class DottyTest extends ContextEscapeDetection{
+
+ dotc.parsing.Scanners // initialize keywords
+
+ implicit var ctx: Contexts.Context = {
+ val base = new ContextBase {}
+ import base.settings._
+ val ctx = base.initialCtx.fresh
+ ctx.setSetting(ctx.settings.encoding, "UTF8")
+ ctx.setSetting(
+ ctx.settings.classpath,
+ "../library/target/scala-2.11/dotty-library_2.11-0.1-SNAPSHOT.jar"
+ )
+ // when classpath is changed in ctx, we need to re-initialize to get the
+ // correct classpath from PathResolver
+ base.initialize()(ctx)
+ ctx
+ }
+
+ override def getCtx: Context = ctx
+ override def clearCtx() = {
+ ctx = null
+ }
+
+ private def compilerWithChecker(phase: String)(assertion:(tpd.Tree, Context) => Unit) = new Compiler {
+ override def phases = {
+ val allPhases = super.phases
+ val targetPhase = allPhases.flatten.find(p => p.phaseName == phase).get
+ val groupsBefore = allPhases.takeWhile(x => !x.contains(targetPhase))
+ val lastGroup = allPhases.find(x => x.contains(targetPhase)).get.takeWhile(x => !(x eq targetPhase))
+ val checker = new Phase {
+ def phaseName = "assertionChecker"
+ override def run(implicit ctx: Context): Unit = assertion(ctx.compilationUnit.tpdTree, ctx)
+ }
+ val lastGroupAppended = List(lastGroup ::: targetPhase :: Nil)
+
+ groupsBefore ::: lastGroupAppended ::: List(List(checker))
+ }
+ }
+
+ def checkCompile(checkAfterPhase: String, source: String)(assertion: (tpd.Tree, Context) => Unit): Unit = {
+ val c = compilerWithChecker(checkAfterPhase)(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compile(source)
+ }
+
+ def checkCompile(checkAfterPhase: String, sources:List[String])(assertion:(tpd.Tree, Context) => Unit): Unit = {
+ val c = compilerWithChecker(checkAfterPhase)(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compile(sources)
+ }
+
+ def methType(names: String*)(paramTypes: Type*)(resultType: Type = defn.UnitType) =
+ MethodType(names.toList map (_.toTermName), paramTypes.toList, resultType)
+}
diff --git a/compiler/test/dotty/tools/DottyTypeStealer.scala b/compiler/test/dotty/tools/DottyTypeStealer.scala
new file mode 100644
index 000000000..819f19d25
--- /dev/null
+++ b/compiler/test/dotty/tools/DottyTypeStealer.scala
@@ -0,0 +1,32 @@
+package dotty.tools
+
+import dotc.ast.tpd
+import dotc.core.Names._
+import dotc.ast.tpd._
+import dotc.core.Contexts.Context
+import dotc.core.Decorators._
+import dotc.core.Types.Type
+
+object DottyTypeStealer {
+ def stealType(source: String, typeStrings: String*): (Context, List[Type]) = {
+ val dummyName = "x_x_x"
+ val vals = typeStrings.zipWithIndex.map{case (s, x)=> s"val ${dummyName}$x: $s = ???"}.mkString("\n")
+ val gatheredSource = s" ${source}\n object A$dummyName {$vals}"
+ var scontext : Context = null
+ var tp: List[Type] = null
+ new DottyTest().checkCompile("frontend",gatheredSource) {
+ (tree, context) =>
+ implicit val ctx = context
+ val findValDef: (List[ValDef], tpd.Tree) => List[ValDef] =
+ (acc , tree) => { tree match {
+ case t: ValDef if t.name.startsWith(dummyName.toTermName) => t :: acc
+ case _ => acc
+ }
+ }
+ val d = new DeepFolder[List[ValDef]](findValDef).foldOver(Nil, tree)
+ tp = d.map(_.tpe.widen).reverse
+ scontext = context
+ }
+ (scontext, tp)
+ }
+}
diff --git a/compiler/test/dotty/tools/ShowClassTests.scala b/compiler/test/dotty/tools/ShowClassTests.scala
new file mode 100644
index 000000000..3c730b716
--- /dev/null
+++ b/compiler/test/dotty/tools/ShowClassTests.scala
@@ -0,0 +1,154 @@
+package dotty.tools
+
+import dotc.core._
+import dotc.core.Contexts._
+import dotc.core.Symbols._
+import dotc.core.Flags._
+import dotc.core.Types._
+import dotc.printing.Texts._
+import NameOps._
+import dotc.core.Decorators._
+import org.junit.Test
+
+class ShowClassTests extends DottyTest {
+ ctx = {
+ val base = new ContextBase
+ import base.settings._
+ val ctx = base.initialCtx.fresh
+ ctx.setSetting(ctx.settings.encoding, "UTF8")
+ ctx.setSetting(
+ ctx.settings.classpath,
+ "../library/target/scala-2.11/dotty-library_2.11-0.1-SNAPSHOT.jar" +
+ ":../interfaces/target/dotty-interfaces-0.1-SNAPSHOT.jar"
+ )
+ base.initialize()(ctx)
+ ctx
+ }
+
+ def debug_println(msg: => Any) = {
+ if (sys.props.isDefinedAt("test.ShowClassTests.verbose"))
+ println(msg)
+ }
+
+ private val blackList = List(
+ // the following classes cannot be read correctly because they
+ // contain illegally pickled @throws annotations
+ "scala.actors.remote.Proxy",
+ "scala.actors.remote.Serializer",
+ "scala.actors.remote.JavaSerializer",
+ "scala.build.genprod",
+ "scala.tools.nsc.symtab.classfile.AbstractFileReader",
+ "scala.remoting.Channel",
+ "scala.runtime.remoting.RegistryDelegate",
+ "scala.concurrent.Future",
+ "scala.concurrent.impl.Future",
+ "scala.concurrent.Await",
+ "scala.concurrent.Awaitable",
+ "scala.concurrent.impl.Promise",
+ // the following class cannot be read because it does not exist anymore
+ "scala.reflect.macros.Context",
+ // the following packages and classes cannot be read because
+ // they refer to external libraries which are not available
+ // (apache.ant, usually)
+ "scala.tools.ant",
+ "scala.tools.partest.PartestTask",
+ "dotty.tools.dotc.core.pickling.AbstractFileReader")
+
+ def doTwice(test: Context => Unit)(implicit ctx: Context): Unit = {
+ test(ctx.fresh.setSetting(ctx.base.settings.debug, true))
+ test(ctx.fresh.setSetting(ctx.base.settings.debug, false))
+ }
+
+ def showPackage(pkg: TermSymbol)(implicit ctx: Context): Unit = {
+ val path = pkg.fullName.toString
+ if (blackList contains path)
+ debug_println(s"blacklisted package: $path")
+ else {
+ for (
+ sym <- pkg.info.decls if sym.owner == pkg.moduleClass && !(sym.name contains '$')
+ ) {
+ debug_println(s"showing $sym in ${pkg.fullName}")
+ if (sym is PackageVal) showPackage(sym.asTerm)
+ else if (sym.isClass && !(sym is Module)) showClass(sym)
+ else if (sym is ModuleVal) showClass(sym.moduleClass)
+ }
+ }
+ }
+
+ def showPackage(path: String, expectedStubs: Int)(implicit ctx: Context): Unit = doTwice { implicit ctx =>
+ showPackage(ctx.requiredPackage(path))
+ val nstubs = Symbols.stubs.length
+ debug_println(s"$nstubs stubs")
+ assert(nstubs <= expectedStubs, s"stubs found: $nstubs, expected: $expectedStubs\nstubs: ${Symbols.stubs.mkString(",")}")
+ }
+
+ def showClass(cls: Symbol)(implicit ctx: Context) = {
+ val path = cls.fullName.stripModuleClassSuffix.toString
+ if (blackList contains path)
+ debug_println(s"blacklisted: $path")
+ else {
+ debug_println(s"showing $path -> ${cls.denot}")
+ val cinfo = cls.info
+ val infoStr = if (cinfo.exists) cinfo.show else " is missing"
+ debug_println("======================================")
+ debug_println(cls.show + infoStr)
+ }
+ }
+
+ def showClasses(path: String)(implicit ctx: Context): Unit = doTwice { implicit ctx =>
+ debug_println(s"showing file $path")
+ val cls = ctx.requiredClass(path.toTypeName)
+ showClass(cls)
+ showClass(cls.linkedClass)
+ }
+/*
+ @Test
+ def loadSimpleClasses() = {
+ showClasses("scala.Array")
+ showClasses("scala.math.Ordering")
+ }
+
+ @Test
+ def loadMoreClasses() = {
+ showClasses("scala.collection.JavaConversions")
+ showClasses("scala.collection.convert.Wrappers")
+ showClasses("scala.collection.mutable.WeakHashMap")
+ showClasses("scala.collection.GenIterable")
+ showClasses("scala.collection.Traversable")
+ showClasses("scala.collection.LinearSeqLike")
+ showClasses("scala.collection.immutable.List")
+ showClasses("scala.collection.convert.Wrappers")
+ showClasses("scala.collection.generic.package")
+ showClasses("scala.collection.MapLike")
+ showClasses("scala.Function1")
+ }
+
+ @Test
+ def loadScalaReflect() = {
+ showPackage(ctx.requiredPackage("scala.reflect"))
+ }
+
+ @Test
+ def loadScalaCollection() = {
+ showPackage(ctx.requiredPackage("scala.collection"))
+ }
+*/
+ /*@Test
+ def showScala() = {
+ showPackage("scala", 1)
+ } */
+ // ping @odersky dotty.tools.dotc.core.Types$CyclicReference: cyclic reference involving class AnyVals, took 1.303 sec
+ //
+
+ @Test
+ def loadDotty() = {
+ showPackage("dotty", 5)
+ }
+
+
+ /*
+ * @Test
+ def showReflectAliases() = { // tests for cycles during findMember
+ showClasses("scala.reflect.macros.runtime.Aliases")
+ }*/
+}
diff --git a/compiler/test/dotty/tools/TypeStealer.scala b/compiler/test/dotty/tools/TypeStealer.scala
new file mode 100644
index 000000000..ccaf2d41e
--- /dev/null
+++ b/compiler/test/dotty/tools/TypeStealer.scala
@@ -0,0 +1,21 @@
+package dotty.tools
+
+import scala.tools.nsc.interpreter._
+import scala.tools.nsc.Settings
+
+object TypeStealer {
+ def main(args: Array[String]): Unit = {
+ def repl = new ILoop {}
+
+ val settings = new Settings
+ settings.Yreplsync.value = true
+
+ //use when launching normally outside SBT
+ settings.usejavacp.value = true
+
+ //an alternative to 'usejavacp' setting, when launching from within SBT
+ //settings.embeddedDefaults[Repl.type]
+
+ repl.process(settings)
+ }
+}
diff --git a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala
new file mode 100644
index 000000000..499037c47
--- /dev/null
+++ b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala
@@ -0,0 +1,256 @@
+package dotty.tools.backend.jvm
+
+import scala.tools.asm
+import asm._
+import asm.tree._
+import scala.collection.JavaConverters._
+
+/** Makes using ASM from tests more convenient.
+ *
+ * Wraps ASM instructions in case classes so that equals and toString work
+ * for the purpose of bytecode diffing and pretty printing.
+ */
+object ASMConverters {
+ import asm.{tree => t}
+
+ /**
+ * Transform the instructions of an ASM Method into a list of [[Instruction]]s.
+ */
+ def instructionsFromMethod(meth: t.MethodNode): List[Instruction] = new AsmToScala(meth).instructions
+
+ def convertMethod(meth: t.MethodNode): Method = new AsmToScala(meth).method
+
+ implicit class RichInstructionLists(val self: List[Instruction]) extends AnyVal {
+ def === (other: List[Instruction]) = equivalentBytecode(self, other)
+
+ def dropLinesFrames = self.filterNot(i => i.isInstanceOf[LineNumber] || i.isInstanceOf[FrameEntry])
+
+ private def referencedLabels(instruction: Instruction): Set[Instruction] = instruction match {
+ case Jump(op, label) => Set(label)
+ case LookupSwitch(op, dflt, keys, labels) => (dflt :: labels).toSet
+ case TableSwitch(op, min, max, dflt, labels) => (dflt :: labels).toSet
+ case LineNumber(line, start) => Set(start)
+ case _ => Set.empty
+ }
+
+ def dropStaleLabels = {
+ val definedLabels: Set[Instruction] = self.filter(_.isInstanceOf[Label]).toSet
+ val usedLabels: Set[Instruction] = self.flatMap(referencedLabels)(collection.breakOut)
+ self.filterNot(definedLabels diff usedLabels)
+ }
+
+ def dropNonOp = dropLinesFrames.dropStaleLabels
+
+ def summary: List[Any] = dropNonOp map {
+ case i: Invoke => i.name
+ case i => i.opcode
+ }
+
+ def summaryText: String = {
+ def comment(i: Instruction) = i match {
+ case j: Jump => s" /*${j.label.offset}*/"
+ case l: Label => s" /*${l.offset}*/"
+ case _ => ""
+ }
+ dropNonOp.map({
+ case i: Invoke => s""""${i.name}""""
+ case ins => opcodeToString(ins.opcode, ins.opcode) + comment(ins)
+ }).mkString("List(", ", ", ")")
+ }
+ }
+
+ def opcodeToString(op: Int, default: Any = "?"): String = {
+ import scala.tools.asm.util.Printer.OPCODES
+ if (OPCODES.isDefinedAt(op)) OPCODES(op) else default.toString
+ }
+
+ sealed abstract class Instruction extends Product {
+ def opcode: Int
+
+ // toString such that the first field, "opcode: Int", is printed textually.
+ final override def toString() = {
+ val printOpcode = opcode != -1
+ productPrefix + (
+ if (printOpcode) Iterator(opcodeToString(opcode)) ++ productIterator.drop(1)
+ else productIterator
+ ).mkString("(", ", ", ")")
+ }
+ }
+
+ case class Method(instructions: List[Instruction], handlers: List[ExceptionHandler], localVars: List[LocalVariable])
+
+ case class Field (opcode: Int, owner: String, name: String, desc: String) extends Instruction
+ case class Incr (opcode: Int, `var`: Int, incr: Int) extends Instruction
+ case class Op (opcode: Int) extends Instruction
+ case class IntOp (opcode: Int, operand: Int) extends Instruction
+ case class Jump (opcode: Int, label: Label) extends Instruction
+ case class Ldc (opcode: Int, cst: Any) extends Instruction
+ case class LookupSwitch (opcode: Int, dflt: Label, keys: List[Int], labels: List[Label]) extends Instruction
+ case class TableSwitch (opcode: Int, min: Int, max: Int, dflt: Label, labels: List[Label]) extends Instruction
+ case class Invoke (opcode: Int, owner: String, name: String, desc: String, itf: Boolean) extends Instruction
+ case class InvokeDynamic(opcode: Int, name: String, desc: String, bsm: MethodHandle, bsmArgs: List[AnyRef]) extends Instruction
+ case class NewArray (opcode: Int, desc: String, dims: Int) extends Instruction
+ case class TypeOp (opcode: Int, desc: String) extends Instruction
+ case class VarOp (opcode: Int, `var`: Int) extends Instruction
+ case class Label (offset: Int) extends Instruction { def opcode: Int = -1 }
+ case class FrameEntry (`type`: Int, local: List[Any], stack: List[Any]) extends Instruction { def opcode: Int = -1 }
+ case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: Int = -1 }
+
+ case class MethodHandle(tag: Int, owner: String, name: String, desc: String)
+
+ case class ExceptionHandler(start: Label, end: Label, handler: Label, desc: Option[String])
+ case class LocalVariable(name: String, desc: String, signature: Option[String], start: Label, end: Label, index: Int)
+
+ class AsmToScala(asmMethod: t.MethodNode) {
+
+ def instructions: List[Instruction] = asmMethod.instructions.iterator.asScala.toList map apply
+
+ def method: Method = Method(instructions, convertHandlers(asmMethod), convertLocalVars(asmMethod))
+
+ private def labelIndex(l: t.LabelNode): Int = asmMethod.instructions.indexOf(l)
+
+ private def op(i: t.AbstractInsnNode): Int = i.getOpcode
+
+ private def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
+
+ // Heterogeneous List[Any] is used in FrameNode: type information about locals / stack values
+ // are stored in a List[Any] (Integer, String or LabelNode), see Javadoc of MethodNode#visitFrame.
+ // Opcodes (eg Opcodes.INTEGER) and Reference types (eg "java/lang/Object") are returned unchanged,
+ // LabelNodes are mapped to their LabelEntry.
+ private def mapOverFrameTypes(is: List[Any]): List[Any] = is map {
+ case i: t.LabelNode => applyLabel(i)
+ case x => x
+ }
+
+ // avoids some casts
+ private def applyLabel(l: t.LabelNode) = this(l: t.AbstractInsnNode).asInstanceOf[Label]
+
+ private def apply(x: t.AbstractInsnNode): Instruction = x match {
+ case i: t.FieldInsnNode => Field (op(i), i.owner, i.name, i.desc)
+ case i: t.IincInsnNode => Incr (op(i), i.`var`, i.incr)
+ case i: t.InsnNode => Op (op(i))
+ case i: t.IntInsnNode => IntOp (op(i), i.operand)
+ case i: t.JumpInsnNode => Jump (op(i), applyLabel(i.label))
+ case i: t.LdcInsnNode => Ldc (op(i), i.cst: Any)
+ case i: t.LookupSwitchInsnNode => LookupSwitch (op(i), applyLabel(i.dflt), lst(i.keys) map (x => x: Int), lst(i.labels) map applyLabel)
+ case i: t.TableSwitchInsnNode => TableSwitch (op(i), i.min, i.max, applyLabel(i.dflt), lst(i.labels) map applyLabel)
+ case i: t.MethodInsnNode => Invoke (op(i), i.owner, i.name, i.desc, i.itf)
+ case i: t.InvokeDynamicInsnNode => InvokeDynamic(op(i), i.name, i.desc, convertMethodHandle(i.bsm), convertBsmArgs(i.bsmArgs))
+ case i: t.MultiANewArrayInsnNode => NewArray (op(i), i.desc, i.dims)
+ case i: t.TypeInsnNode => TypeOp (op(i), i.desc)
+ case i: t.VarInsnNode => VarOp (op(i), i.`var`)
+ case i: t.LabelNode => Label (labelIndex(i))
+ case i: t.FrameNode => FrameEntry (i.`type`, mapOverFrameTypes(lst(i.local)), mapOverFrameTypes(lst(i.stack)))
+ case i: t.LineNumberNode => LineNumber (i.line, applyLabel(i.start))
+ }
+
+ private def convertBsmArgs(a: Array[Object]): List[Object] = a.map({
+ case h: asm.Handle => convertMethodHandle(h)
+ case _ => a // can be: Class, method Type, primitive constant
+ })(collection.breakOut)
+
+ private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc)
+
+ private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = {
+ method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut)
+ }
+
+ private def convertLocalVars(method: t.MethodNode): List[LocalVariable] = {
+ method.localVariables.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index))(collection.breakOut)
+ }
+ }
+
+ import collection.mutable.{Map => MMap}
+
+ /**
+ * Bytecode is equal modulo local variable numbering and label numbering.
+ */
+ def equivalentBytecode(as: List[Instruction], bs: List[Instruction], varMap: MMap[Int, Int] = MMap(), labelMap: MMap[Int, Int] = MMap()): Boolean = {
+ def same(v1: Int, v2: Int, m: MMap[Int, Int]) = {
+ if (m contains v1) m(v1) == v2
+ else if (m.valuesIterator contains v2) false // v2 is already associated with some different value v1
+ else { m(v1) = v2; true }
+ }
+ def sameVar(v1: Int, v2: Int) = same(v1, v2, varMap)
+ def sameLabel(l1: Label, l2: Label) = same(l1.offset, l2.offset, labelMap)
+ def sameLabels(ls1: List[Label], ls2: List[Label]) = (ls1 corresponds ls2)(sameLabel)
+
+ def sameFrameTypes(ts1: List[Any], ts2: List[Any]) = (ts1 corresponds ts2) {
+ case (t1: Label, t2: Label) => sameLabel(t1, t2)
+ case (x, y) => x == y
+ }
+
+ if (as.isEmpty) bs.isEmpty
+ else if (bs.isEmpty) false
+ else ((as.head, bs.head) match {
+ case (VarOp(op1, v1), VarOp(op2, v2)) => op1 == op2 && sameVar(v1, v2)
+ case (Incr(op1, v1, inc1), Incr(op2, v2, inc2)) => op1 == op2 && sameVar(v1, v2) && inc1 == inc2
+
+ case (l1 @ Label(_), l2 @ Label(_)) => sameLabel(l1, l2)
+ case (Jump(op1, l1), Jump(op2, l2)) => op1 == op2 && sameLabel(l1, l2)
+ case (LookupSwitch(op1, l1, keys1, ls1), LookupSwitch(op2, l2, keys2, ls2)) => op1 == op2 && sameLabel(l1, l2) && keys1 == keys2 && sameLabels(ls1, ls2)
+ case (TableSwitch(op1, min1, max1, l1, ls1), TableSwitch(op2, min2, max2, l2, ls2)) => op1 == op2 && min1 == min2 && max1 == max2 && sameLabel(l1, l2) && sameLabels(ls1, ls2)
+ case (LineNumber(line1, l1), LineNumber(line2, l2)) => line1 == line2 && sameLabel(l1, l2)
+ case (FrameEntry(tp1, loc1, stk1), FrameEntry(tp2, loc2, stk2)) => tp1 == tp2 && sameFrameTypes(loc1, loc2) && sameFrameTypes(stk1, stk2)
+
+ // this needs to go after the above. For example, Label(1) may not equal Label(1), if before
+ // the left 1 was associated with another right index.
+ case (a, b) if a == b => true
+
+ case _ => false
+ }) && equivalentBytecode(as.tail, bs.tail, varMap, labelMap)
+ }
+
+ def applyToMethod(method: t.MethodNode, instructions: List[Instruction]): Unit = {
+ val asmLabel = createLabelNodes(instructions)
+ instructions.foreach(visitMethod(method, _, asmLabel))
+ }
+
+ /**
+ * Convert back a [[Method]] to ASM land. The code is emitted into the parameter `asmMethod`.
+ */
+ def applyToMethod(asmMethod: t.MethodNode, method: Method): Unit = {
+ val asmLabel = createLabelNodes(method.instructions)
+ method.instructions.foreach(visitMethod(asmMethod, _, asmLabel))
+ method.handlers.foreach(h => asmMethod.visitTryCatchBlock(asmLabel(h.start), asmLabel(h.end), asmLabel(h.handler), h.desc.orNull))
+ method.localVars.foreach(v => asmMethod.visitLocalVariable(v.name, v.desc, v.signature.orNull, asmLabel(v.start), asmLabel(v.end), v.index))
+ }
+
+ private def createLabelNodes(instructions: List[Instruction]): Map[Label, asm.Label] = {
+ val labels = instructions collect {
+ case l: Label => l
+ }
+ assert(labels.distinct == labels, s"Duplicate labels in: $labels")
+ labels.map(l => (l, new asm.Label())).toMap
+ }
+
+ private def frameTypesToAsm(l: List[Any], asmLabel: Map[Label, asm.Label]): List[Object] = l map {
+ case l: Label => asmLabel(l)
+ case x => x.asInstanceOf[Object]
+ }
+
+ def unconvertMethodHandle(h: MethodHandle): asm.Handle = new asm.Handle(h.tag, h.owner, h.name, h.desc)
+ def unconvertBsmArgs(a: List[Object]): Array[Object] = a.map({
+ case h: MethodHandle => unconvertMethodHandle(h)
+ case o => o
+ })(collection.breakOut)
+
+ private def visitMethod(method: t.MethodNode, instruction: Instruction, asmLabel: Map[Label, asm.Label]): Unit = instruction match {
+ case Field(op, owner, name, desc) => method.visitFieldInsn(op, owner, name, desc)
+ case Incr(op, vr, incr) => method.visitIincInsn(vr, incr)
+ case Op(op) => method.visitInsn(op)
+ case IntOp(op, operand) => method.visitIntInsn(op, operand)
+ case Jump(op, label) => method.visitJumpInsn(op, asmLabel(label))
+ case Ldc(op, cst) => method.visitLdcInsn(cst)
+ case LookupSwitch(op, dflt, keys, labels) => method.visitLookupSwitchInsn(asmLabel(dflt), keys.toArray, (labels map asmLabel).toArray)
+ case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray: _*)
+ case Invoke(op, owner, name, desc, itf) => method.visitMethodInsn(op, owner, name, desc, itf)
+ case InvokeDynamic(op, name, desc, bsm, bsmArgs) => method.visitInvokeDynamicInsn(name, desc, unconvertMethodHandle(bsm), unconvertBsmArgs(bsmArgs))
+ case NewArray(op, desc, dims) => method.visitMultiANewArrayInsn(desc, dims)
+ case TypeOp(op, desc) => method.visitTypeInsn(op, desc)
+ case VarOp(op, vr) => method.visitVarInsn(op, vr)
+ case l: Label => method.visitLabel(asmLabel(l))
+ case FrameEntry(tp, local, stack) => method.visitFrame(tp, local.length, frameTypesToAsm(local, asmLabel).toArray, stack.length, frameTypesToAsm(stack, asmLabel).toArray)
+ case LineNumber(line, start) => method.visitLineNumber(line, asmLabel(start))
+ }
+}
diff --git a/compiler/test/dotty/tools/backend/jvm/AsmNode.scala b/compiler/test/dotty/tools/backend/jvm/AsmNode.scala
new file mode 100644
index 000000000..ac3f34258
--- /dev/null
+++ b/compiler/test/dotty/tools/backend/jvm/AsmNode.scala
@@ -0,0 +1,61 @@
+package dotty.tools.backend.jvm
+
+import java.lang.reflect.Modifier
+import scala.tools.asm
+import asm._
+import asm.tree._
+import scala.collection.JavaConverters._
+
+sealed trait AsmNode[+T] {
+ def node: T
+ def access: Int
+ def desc: String
+ def name: String
+ def signature: String
+ def attrs: List[Attribute]
+ def visibleAnnotations: List[AnnotationNode]
+ def invisibleAnnotations: List[AnnotationNode]
+ def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
+ def erasedCharacteristics = f"$name%15s $desc%-30s$accessString"
+
+ private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
+ private def sigString = if (signature == null) "" else " " + signature
+ override def toString = characteristics
+}
+
+object AsmNode {
+ type AsmMethod = AsmNode[MethodNode]
+ type AsmField = AsmNode[FieldNode]
+ type AsmMember = AsmNode[_]
+
+ implicit class ClassNodeOps(val node: ClassNode) {
+ def fieldsAndMethods: List[AsmMember] = {
+ val xs: List[AsmMember] = (
+ node.methods.asScala.toList.map(x => (x: AsmMethod))
+ ++ node.fields.asScala.toList.map(x => (x: AsmField))
+ )
+ xs sortBy (_.characteristics)
+ }
+ }
+ implicit class AsmMethodNode(val node: MethodNode) extends AsmNode[MethodNode] {
+ def access: Int = node.access
+ def desc: String = node.desc
+ def name: String = node.name
+ def signature: String = node.signature
+ def attrs: List[Attribute] = node.attrs.asScala.toList
+ def visibleAnnotations: List[AnnotationNode] = node.visibleAnnotations.asScala.toList
+ def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
+ }
+ implicit class AsmFieldNode(val node: FieldNode) extends AsmNode[FieldNode] {
+ def access: Int = node.access
+ def desc: String = node.desc
+ def name: String = node.name
+ def signature: String = node.signature
+ def attrs: List[Attribute] = node.attrs.asScala.toList
+ def visibleAnnotations: List[AnnotationNode] = node.visibleAnnotations.asScala.toList
+ def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
+ }
+
+ def apply(node: MethodNode): AsmMethodNode = new AsmMethodNode(node)
+ def apply(node: FieldNode): AsmFieldNode = new AsmFieldNode(node)
+}
diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala
new file mode 100644
index 000000000..fc9853691
--- /dev/null
+++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTest.scala
@@ -0,0 +1,208 @@
+package dotty.tools
+package backend.jvm
+
+import dotc.core.Contexts.{Context, ContextBase}
+import dotc.core.Phases.Phase
+import dotc.Compiler
+
+import scala.reflect.io.{VirtualDirectory => Directory}
+import scala.tools.asm
+import asm._
+import asm.tree._
+import scala.collection.JavaConverters._
+
+import scala.tools.nsc.util.JavaClassPath
+import scala.collection.JavaConverters._
+import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.tree._
+import java.io.{File => JFile, InputStream}
+
+class TestGenBCode(val outDir: String) extends GenBCode {
+ override def phaseName: String = "testGenBCode"
+ val virtualDir = new Directory(outDir, None)
+ override def outputDir(implicit ctx: Context) = virtualDir
+}
+
+trait DottyBytecodeTest extends DottyTest {
+ import AsmNode._
+ import ASMConverters._
+
+ protected object Opcode {
+ val newarray = 188
+ val anewarray = 189
+ val multianewarray = 197
+
+ val boolean = 4
+ val char = 5
+ val float = 6
+ val double = 7
+ val byte = 8
+ val short = 9
+ val int = 10
+ val long = 11
+
+ val boxedUnit = "scala/runtime/BoxedUnit"
+ val javaString = "java/lang/String"
+ }
+
+ private def bCodeCheckingComp(phase: TestGenBCode)(check: Directory => Unit) =
+ new Compiler {
+ override def phases = {
+ val updatedPhases = {
+ def replacePhase: Phase => Phase =
+ { p => if (p.phaseName == "genBCode") phase else p }
+
+ for (phaseList <- super.phases) yield phaseList.map(replacePhase)
+ }
+
+ val checkerPhase = List(List(new Phase {
+ def phaseName = "assertionChecker"
+ override def run(implicit ctx: Context): Unit =
+ check(phase.virtualDir)
+ }))
+
+ updatedPhases ::: checkerPhase
+ }
+ }
+
+ private def outPath(obj: Any) =
+ "/genBCodeTest" + math.abs(obj.hashCode) + System.currentTimeMillis
+
+ /** Checks source code from raw string */
+ def checkBCode(source: String)(assertion: Directory => Unit) = {
+ val comp = bCodeCheckingComp(new TestGenBCode(outPath(source)))(assertion)
+ comp.rootContext(ctx)
+ comp.newRun.compile(source)
+ }
+
+ /** Checks actual _files_ referenced in `sources` list */
+ def checkBCode(sources: List[String])(assertion: Directory => Unit) = {
+ val comp = bCodeCheckingComp(new TestGenBCode(outPath(sources)))(assertion)
+ comp.rootContext(ctx)
+ comp.newRun.compile(sources)
+ }
+
+ protected def loadClassNode(input: InputStream, skipDebugInfo: Boolean = true): ClassNode = {
+ val cr = new ClassReader(input)
+ val cn = new ClassNode()
+ cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
+ cn
+ }
+
+ protected def getMethod(classNode: ClassNode, name: String): MethodNode =
+ classNode.methods.asScala.find(_.name == name) getOrElse
+ sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
+
+ def diffInstructions(isa: List[Instruction], isb: List[Instruction]): String = {
+ val len = Math.max(isa.length, isb.length)
+ val sb = new StringBuilder
+ if (len > 0 ) {
+ val width = isa.map(_.toString.length).max
+ val lineWidth = len.toString.length
+ (1 to len) foreach { line =>
+ val isaPadded = isa.map(_.toString) orElse Stream.continually("")
+ val isbPadded = isb.map(_.toString) orElse Stream.continually("")
+ val a = isaPadded(line-1)
+ val b = isbPadded(line-1)
+
+ sb append (s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b\n""")
+ }
+ }
+ sb.toString
+ }
+
+ /**************************** Comparison Methods ****************************/
+ def verifySwitch(method: MethodNode, shouldFail: Boolean = false, debug: Boolean = false): Boolean = {
+ val instructions = instructionsFromMethod(method)
+
+ val succ = instructions
+ .collect {
+ case x: TableSwitch => x
+ case x: LookupSwitch => x
+ }
+ .length > 0
+
+ if (debug || !succ && !shouldFail || succ && shouldFail)
+ instructions.foreach(Console.err.println)
+
+ succ && !shouldFail || shouldFail && !succ
+ }
+
+ def sameBytecode(methA: MethodNode, methB: MethodNode) = {
+ val isa = instructionsFromMethod(methA)
+ val isb = instructionsFromMethod(methB)
+ assert(isa == isb, s"Bytecode wasn't same:\n${diffInstructions(isa, isb)}")
+ }
+
+ def similarBytecode(
+ methA: MethodNode,
+ methB: MethodNode,
+ similar: (List[Instruction], List[Instruction]) => Boolean
+ ) = {
+ val isa = instructionsFromMethod(methA)
+ val isb = instructionsFromMethod(methB)
+ assert(
+ similar(isa, isb),
+ s"""|Bytecode wasn't similar according to the provided predicate:
+ |${diffInstructions(isa, isb)}""".stripMargin)
+ }
+
+ def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.characteristics)
+
+ /**
+ * Same as sameMethodAndFieldSignatures, but ignoring generic signatures.
+ * This allows for methods which receive the same descriptor but differing
+ * generic signatures. In particular, this happens with value classes, which
+ * get a generic signature where a method written in terms of the underlying
+ * values does not.
+ */
+ def sameMethodAndFieldDescriptors(clazzA: ClassNode, clazzB: ClassNode): Unit = {
+ val (succ, msg) = sameCharacteristics(clazzA, clazzB)(_.erasedCharacteristics)
+ assert(succ, msg)
+ }
+
+ private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[_] => String): (Boolean, String) = {
+ val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
+ val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
+ val name1 = clazzA.name
+ val name2 = clazzB.name
+
+ if (ms1.length != ms2.length) {
+ (false, s"Different member counts in $name1 and $name2")
+ } else {
+ val msg = new StringBuilder
+ val success = (ms1, ms2).zipped forall { (m1, m2) =>
+ val c1 = f(m1)
+ val c2 = f(m2).replaceAllLiterally(name2, name1)
+ if (c1 == c2)
+ msg append (s"[ok] $m1")
+ else
+ msg append (s"[fail]\n in $name1: $c1\n in $name2: $c2")
+
+ c1 == c2
+ }
+
+ (success, msg.toString)
+ }
+ }
+
+ def correctNumberOfNullChecks(expectedChecks: Int, insnList: InsnList) = {
+ /** Is given instruction a null check?
+ *
+ * This will detect direct null comparison as in
+ * if (x == null) ...
+ * and not indirect as in
+ * val foo = null
+ * if (x == foo) ...
+ */
+ def isNullCheck(node: asm.tree.AbstractInsnNode): Boolean = {
+ val opcode = node.getOpcode
+ (opcode == asm.Opcodes.IFNULL) || (opcode == asm.Opcodes.IFNONNULL)
+ }
+ val actualChecks = insnList.iterator.asScala.count(isNullCheck)
+ assert(expectedChecks == actualChecks,
+ s"Wrong number of null checks ($actualChecks), expected: $expectedChecks"
+ )
+ }
+}
diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
new file mode 100644
index 000000000..ce71ef3cb
--- /dev/null
+++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
@@ -0,0 +1,188 @@
+package dotty.tools.backend.jvm
+
+import org.junit.Assert._
+import org.junit.Test
+
+class TestBCode extends DottyBytecodeTest {
+ import ASMConverters._
+ @Test def nullChecks = {
+ val source = """
+ |class Foo {
+ | def foo(x: AnyRef): Int = {
+ | val bool = x == null
+ | if (x != null) 1
+ | else 0
+ | }
+ |}
+ """.stripMargin
+
+ checkBCode(source) { dir =>
+ val clsIn = dir.lookupName("Foo.class", directory = false).input
+ val clsNode = loadClassNode(clsIn)
+ val methodNode = getMethod(clsNode, "foo")
+ correctNumberOfNullChecks(2, methodNode.instructions)
+ }
+ }
+
+ /** This test verifies that simple matches are transformed if possible
+ * despite no annotation
+ */
+ @Test def basicTransformNonAnnotated = {
+ val source = """
+ |object Foo {
+ | def foo(i: Int) = i match {
+ | case 2 => println(2)
+ | case 1 => println(1)
+ | }
+ |}""".stripMargin
+
+ checkBCode(source) { dir =>
+ val moduleIn = dir.lookupName("Foo$.class", directory = false)
+ val moduleNode = loadClassNode(moduleIn.input)
+ val methodNode = getMethod(moduleNode, "foo")
+ assert(verifySwitch(methodNode))
+ }
+ }
+
+ /** This test verifies that simple matches with `@switch` annotations are
+ * indeed transformed to a switch
+ */
+ @Test def basicTransfromAnnotated = {
+ val source = """
+ |object Foo {
+ | import scala.annotation.switch
+ | def foo(i: Int) = (i: @switch) match {
+ | case 2 => println(2)
+ | case 1 => println(1)
+ | }
+ |}""".stripMargin
+
+ checkBCode(source) { dir =>
+ val moduleIn = dir.lookupName("Foo$.class", directory = false)
+ val moduleNode = loadClassNode(moduleIn.input)
+ val methodNode = getMethod(moduleNode, "foo")
+ assert(verifySwitch(methodNode))
+ }
+ }
+
+ @Test def failTransform = {
+ val source = """
+ |object Foo {
+ | import scala.annotation.switch
+ | def foo(i: Any) = (i: @switch) match {
+ | case x: String => println("string!")
+ | case x :: xs => println("list!")
+ | }
+ |}""".stripMargin
+ checkBCode(source) { dir =>
+ val moduleIn = dir.lookupName("Foo$.class", directory = false)
+ val moduleNode = loadClassNode(moduleIn.input)
+ val methodNode = getMethod(moduleNode, "foo")
+
+ assert(verifySwitch(methodNode, shouldFail = true))
+ }
+ }
+
+ /** Make sure that creating multidim arrays reduces to "multinewarray"
+ * instruction
+ */
+ @Test def multidimArraysFromOfDim = {
+ val source = """
+ |object Arr {
+ | def arr = Array.ofDim[Int](2, 1)
+ |}""".stripMargin
+ checkBCode(source) { dir =>
+ val moduleIn = dir.lookupName("Arr$.class", directory = false)
+ val moduleNode = loadClassNode(moduleIn.input)
+ val method = getMethod(moduleNode, "arr")
+
+ val hadCorrectInstr =
+ instructionsFromMethod(method)
+ .collect {
+ case x @ NewArray(op, _, dims)
+ if op == Opcode.multianewarray && dims == 2 => x
+ }
+ .length > 0
+
+ assert(hadCorrectInstr,
+ "Did not contain \"multianewarray\" instruction in:\n" +
+ instructionsFromMethod(method).mkString("\n"))
+ }
+ }
+
+ @Test def arraysFromOfDim = {
+ val source = """
+ |object Arr {
+ | def arr1 = Array.ofDim[Int](2)
+ | def arr2 = Array.ofDim[Unit](2)
+ | def arr3 = Array.ofDim[String](2)
+ | def arr4 = Array.ofDim[Map[String, String]](2)
+ |}""".stripMargin
+ checkBCode(source) { dir =>
+ val moduleIn = dir.lookupName("Arr$.class", directory = false)
+ val moduleNode = loadClassNode(moduleIn.input)
+ val arr1 = getMethod(moduleNode, "arr1")
+ val arr2 = getMethod(moduleNode, "arr2")
+ val arr3 = getMethod(moduleNode, "arr3")
+
+ val arr1CorrectInstr =
+ instructionsFromMethod(arr1)
+ .collect {
+ case x @ IntOp(op, oprnd)
+ if op == Opcode.newarray && oprnd == Opcode.int => x
+ }
+ .length > 0
+
+ assert(arr1CorrectInstr,
+ "Did not contain \"multianewarray\" instruction in:\n" +
+ instructionsFromMethod(arr1).mkString("\n"))
+
+ val arr2CorrectInstr =
+ instructionsFromMethod(arr2)
+ .collect {
+ case x @ TypeOp(op, oprnd)
+ if op == Opcode.anewarray && oprnd == Opcode.boxedUnit => x
+ }
+ .length > 0
+
+ assert(arr2CorrectInstr,
+ "arr2 bytecode did not contain correct `anewarray` instruction:\n" +
+ instructionsFromMethod(arr2)mkString("\n"))
+
+ val arr3CorrectInstr =
+ instructionsFromMethod(arr3)
+ .collect {
+ case x @ TypeOp(op, oprnd)
+ if op == Opcode.anewarray && oprnd == Opcode.javaString => x
+ }
+ .length > 0
+
+ assert(arr3CorrectInstr,
+ "arr3 bytecode did not contain correct `anewarray` instruction:\n" +
+ instructionsFromMethod(arr3).mkString("\n"))
+ }
+ }
+
+ @Test def arraysFromDimAndFromNewEqual = {
+ val source = """
+ |object Arr {
+ | def arr1 = Array.ofDim[Int](2)
+ | def arr2 = new Array[Int](2)
+ |}""".stripMargin
+
+ checkBCode(source) { dir =>
+ val moduleIn = dir.lookupName("Arr$.class", directory = false)
+ val moduleNode = loadClassNode(moduleIn.input)
+ val arr1 = getMethod(moduleNode, "arr1")
+ val arr2 = getMethod(moduleNode, "arr2")
+
+ // First two instructions of `arr1` fetch the static reference to `Array`
+ val instructions1 = instructionsFromMethod(arr1).drop(2)
+ val instructions2 = instructionsFromMethod(arr2)
+
+ assert(instructions1 == instructions2,
+ "Creating arrays using `Array.ofDim[Int](2)` did not equal bytecode for `new Array[Int](2)`\n" +
+ diffInstructions(instructions1, instructions2))
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala
new file mode 100644
index 000000000..033783303
--- /dev/null
+++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala
@@ -0,0 +1,32 @@
+package dotty.tools.backend.jvm
+
+import org.junit.Assert._
+import org.junit.Test
+
+class InlineBytecodeTests extends DottyBytecodeTest {
+ import ASMConverters._
+ @Test def inlineUnit = {
+ val source = """
+ |class Foo {
+ | inline def foo: Int = 1
+ |
+ | def meth1: Unit = foo
+ | def meth2: Unit = 1
+ |}
+ """.stripMargin
+
+ checkBCode(source) { dir =>
+ val clsIn = dir.lookupName("Foo.class", directory = false).input
+ val clsNode = loadClassNode(clsIn)
+ val meth1 = getMethod(clsNode, "meth1")
+ val meth2 = getMethod(clsNode, "meth2")
+
+ val instructions1 = instructionsFromMethod(meth1)
+ val instructions2 = instructionsFromMethod(meth2)
+
+ assert(instructions1 == instructions2,
+ "`foo` was not properly inlined in `meth1`\n" +
+ diffInstructions(instructions1, instructions2))
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/CompilerTest.scala b/compiler/test/dotty/tools/dotc/CompilerTest.scala
new file mode 100644
index 000000000..fbec9003a
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/CompilerTest.scala
@@ -0,0 +1,623 @@
+package dotty.tools.dotc
+
+import repl.TestREPL
+import core.Contexts._
+import dotty.partest.DPConfig
+import interfaces.Diagnostic.ERROR
+import reporting._
+import diagnostic.MessageContainer
+import util.SourcePosition
+import config.CompilerCommand
+import dotty.tools.io.PlainFile
+import scala.collection.mutable.ListBuffer
+import scala.reflect.io.{ Path, Directory, File => SFile, AbstractFile }
+import scala.tools.partest.nest.{ FileManager, NestUI }
+import scala.annotation.tailrec
+import java.io.{ RandomAccessFile, File => JFile }
+
+
+/** This class has two modes: it can directly run compiler tests, or it can
+ * generate the necessary file structure for partest in the directory
+ * DPConfig.testRoot. Both modes are regular JUnit tests. Which mode is used
+ * depends on the existence of the tests/locks/partest-ppid.lock file which is
+ * created by sbt to trigger partest generation. Sbt will then run partest on
+ * the generated sources.
+ *
+ * Through overriding the partestableXX methods, tests can always be run as
+ * JUnit compiler tests. Run tests cannot be run by JUnit, only by partest.
+ *
+ * A test can either be a file or a directory. Partest will generate a
+ * <test>-<kind>.log file with output of failed tests. Partest reads compiler
+ * flags and the number of errors expected from a neg test from <test>.flags
+ * and <test>.nerr files (also generated). The test is in a parent directory
+ * that determines the kind of test:
+ * - pos: checks that compilation succeeds
+ * - neg: checks that compilation fails with the given number of errors
+ * - run: compilation succeeds, partest: test run generates the output in
+ * <test>.check. Run tests always need to be:
+ * object Test { def main(args: Array[String]): Unit = ... }
+ * Classpath jars can be added to partestDeps in the sbt Build.scala.
+ */
+abstract class CompilerTest {
+
+ /** Override with output dir of test so it can be patched. Partest expects
+ * classes to be in partest-generated/[kind]/[testname]-[kind].obj/ */
+ val defaultOutputDir: String
+
+ /** Override to filter out tests that should not be run by partest. */
+ def partestableFile(prefix: String, fileName: String, extension: String, args: List[String]) = true
+ def partestableDir(prefix: String, dirName: String, args: List[String]) = true
+ def partestableList(testName: String, files: List[String], args: List[String]) = true
+
+ val generatePartestFiles = {
+ /* Because we fork in test, the JVM in which this JUnit test runs has a
+ * different pid from the one that started the partest. But the forked VM
+ * receives the pid of the parent as system property. If the lock file
+ * exists, the parent is requesting partest generation. This mechanism
+ * allows one sbt instance to run test (JUnit only) and another partest.
+ * We cannot run two instances of partest at the same time, because they're
+ * writing to the same directories. The sbt lock file generation prevents
+ * this.
+ */
+ val pid = System.getProperty("partestParentID")
+ if (pid == null)
+ false
+ else
+ new JFile(".." + JFile.separator + "tests" + JFile.separator + "locks" + JFile.separator + s"partest-$pid.lock").exists
+ }
+
+ // Delete generated files from previous run and create new log
+ val logFile = if (!generatePartestFiles) None else Some(CompilerTest.init)
+
+ /** Always run with JUnit. */
+ def compileLine(cmdLine: String)(implicit defaultOptions: List[String]): Unit = {
+ if (generatePartestFiles)
+ log("WARNING: compileLine will always run with JUnit, no partest files generated.")
+ compileArgs(cmdLine.split("\n"), Nil)
+ }
+
+ /** Compiles the given code file.
+ *
+ * @param prefix the parent directory (including separator at the end)
+ * @param fileName the filename, by default without extension
+ * @param args arguments to the compiler
+ * @param extension the file extension, .scala by default
+ * @param defaultOptions more arguments to the compiler
+ */
+ def compileFile(prefix: String, fileName: String, args: List[String] = Nil, extension: String = ".scala", runTest: Boolean = false)
+ (implicit defaultOptions: List[String]): Unit = {
+ val filePath = s"$prefix$fileName$extension"
+ val expErrors = expectedErrors(filePath)
+ if (!generatePartestFiles || !partestableFile(prefix, fileName, extension, args ++ defaultOptions)) {
+ if (runTest)
+ log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$fileName$extension")
+ if (args.contains("-rewrite")) {
+ val file = new PlainFile(filePath)
+ val data = file.toByteArray
+ // compile with rewrite
+ compileArgs((filePath :: args).toArray, expErrors)
+ // compile again, check that file now compiles without -language:Scala2
+ val plainArgs = args.filter(arg => arg != "-rewrite" && arg != "-language:Scala2")
+ compileFile(prefix, fileName, plainArgs, extension, runTest)
+ // restore original test file
+ val out = file.output
+ out.write(data)
+ out.close()
+ }
+ else compileArgs((filePath :: args).toArray, expErrors)
+ } else {
+ val kind = testKind(prefix, runTest)
+ log(s"generating partest files for test file: $prefix$fileName$extension of kind $kind")
+
+ val sourceFile = new JFile(prefix + fileName + extension)
+ if (sourceFile.exists) {
+ val firstDest = SFile(DPConfig.testRoot + JFile.separator + kind + JFile.separator + fileName + extension)
+ val xerrors = expErrors.map(_.totalErrors).sum
+ computeDestAndCopyFiles(sourceFile, firstDest, kind, args ++ defaultOptions, xerrors.toString)
+ } else {
+ throw new java.io.FileNotFoundException(s"Unable to locate test file $prefix$fileName")
+ }
+ }
+ }
+ def runFile(prefix: String, fileName: String, args: List[String] = Nil, extension: String = ".scala")
+ (implicit defaultOptions: List[String]): Unit = {
+ compileFile(prefix, fileName, args, extension, true)
+ }
+
+ def findJarFromRuntime(partialName: String): String = {
+ val urls = ClassLoader.getSystemClassLoader.asInstanceOf[java.net.URLClassLoader].getURLs.map(_.getFile.toString)
+ urls.find(_.contains(partialName)).getOrElse {
+ throw new java.io.FileNotFoundException(
+ s"""Unable to locate $partialName on classpath:\n${urls.toList.mkString("\n")}"""
+ )
+ }
+ }
+
+ private def compileWithJavac(
+ fs: Array[String],
+ args: Array[String]
+ )(implicit defaultOptions: List[String]): Boolean = {
+ val scalaLib = findJarFromRuntime("scala-library")
+ val fullArgs = Array(
+ "javac",
+ "-classpath",
+ s".:$scalaLib"
+ ) ++ args ++ defaultOptions.dropWhile("-d" != _).take(2) ++ fs
+
+ Runtime.getRuntime.exec(fullArgs).waitFor() == 0
+ }
+
+ /** Compiles the code files in the given directory together. If args starts
+ * with "-deep", all files in subdirectories (and so on) are included. */
+ def compileDir(prefix: String, dirName: String, args: List[String] = Nil, runTest: Boolean = false)
+ (implicit defaultOptions: List[String]): Unit = {
+ def computeFilePathsAndExpErrors = {
+ val dir = Directory(prefix + dirName)
+ val (files, normArgs) = args match {
+ case "-deep" :: args1 => (dir.deepFiles, args1)
+ case _ => (dir.files, args)
+ }
+ val (filePaths, javaFilePaths) = files
+ .toArray.map(_.toString)
+ .foldLeft((Array.empty[String], Array.empty[String])) { case (acc @ (fp, jfp), name) =>
+ if (name endsWith ".scala") (name +: fp, jfp)
+ else if (name endsWith ".java") (fp, name +: jfp)
+ else (fp, jfp)
+ }
+ val expErrors = expectedErrors(filePaths.toList)
+ (filePaths, javaFilePaths, normArgs, expErrors)
+ }
+ if (!generatePartestFiles || !partestableDir(prefix, dirName, args ++ defaultOptions)) {
+ if (runTest)
+ log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$dirName")
+ val (filePaths, javaFilePaths, normArgs, expErrors) = computeFilePathsAndExpErrors
+ compileWithJavac(javaFilePaths, Array.empty) // javac needs to run first on dotty-library
+ compileArgs(javaFilePaths ++ filePaths ++ normArgs, expErrors)
+ } else {
+ val (sourceDir, flags, deep) = args match {
+ case "-deep" :: args1 => (flattenDir(prefix, dirName), args1 ++ defaultOptions, "deep")
+ case _ => (new JFile(prefix + dirName), args ++ defaultOptions, "shallow")
+ }
+ val kind = testKind(prefix, runTest)
+ log(s"generating partest files for test directory ($deep): $prefix$dirName of kind $kind")
+
+ if (sourceDir.exists) {
+ val firstDest = Directory(DPConfig.testRoot + JFile.separator + kind + JFile.separator + dirName)
+ val xerrors = if (isNegTest(prefix)) {
+ val (_, _, _, expErrors) = computeFilePathsAndExpErrors
+ expErrors.map(_.totalErrors).sum
+ } else 0
+ computeDestAndCopyFiles(sourceDir, firstDest, kind, flags, xerrors.toString)
+ if (deep == "deep")
+ Directory(sourceDir).deleteRecursively
+ } else {
+ throw new java.io.FileNotFoundException(s"Unable to locate test dir $prefix$dirName")
+ }
+ }
+ }
+ def runDir(prefix: String, dirName: String, args: List[String] = Nil)
+ (implicit defaultOptions: List[String]): Unit =
+ compileDir(prefix, dirName, args, true)
+
+ /** Compiles each source in the directory path separately by calling
+ * compileFile resp. compileDir. */
+ def compileFiles(path: String, args: List[String] = Nil, verbose: Boolean = true, runTest: Boolean = false,
+ compileSubDirs: Boolean = true)(implicit defaultOptions: List[String]): Unit = {
+ val dir = Directory(path)
+ val fileNames = dir.files.toArray.map(_.jfile.getName).filter(name => (name endsWith ".scala") || (name endsWith ".java"))
+ for (name <- fileNames) {
+ if (verbose) log(s"testing $path$name")
+ compileFile(path, name, args, "", runTest)
+ }
+ if (compileSubDirs)
+ for (subdir <- dir.dirs) {
+ if (verbose) log(s"testing $subdir")
+ compileDir(path, subdir.jfile.getName, args, runTest)
+ }
+ }
+ def runFiles(path: String, args: List[String] = Nil, verbose: Boolean = true)
+ (implicit defaultOptions: List[String]): Unit =
+ compileFiles(path, args, verbose, true)
+
+ /** Compiles the given list of code files. */
+ def compileList(testName: String, files: List[String], args: List[String] = Nil)
+ (implicit defaultOptions: List[String]): Unit = {
+ if (!generatePartestFiles || !partestableList(testName, files, args ++ defaultOptions)) {
+ val expErrors = expectedErrors(files)
+ compileArgs((files ++ args).toArray, expErrors)
+ } else {
+ val destDir = Directory(DPConfig.testRoot + JFile.separator + testName)
+ files.foreach({ file =>
+ val jfile = new JFile(file)
+ recCopyFiles(jfile, destDir / jfile.getName)
+ })
+ compileDir(DPConfig.testRoot + JFile.separator, testName, args)
+ destDir.deleteRecursively
+ }
+ }
+
+ def replFile(prefix: String, fileName: String): Unit = {
+ val path = s"$prefix$fileName"
+ val f = new PlainFile(path)
+ val repl = new TestREPL(new String(f.toCharArray))
+ repl.process(Array[String]())
+ repl.check()
+ }
+
+ def replFiles(path: String): Unit = {
+ val dir = Directory(path)
+ val fileNames = dir.files.toArray.map(_.jfile.getName).filter(_ endsWith ".check")
+ for (name <- fileNames) {
+ log(s"testing $path$name")
+ replFile(path, name)
+ }
+ }
+
+ // ========== HELPERS =============
+
+ private def expectedErrors(filePaths: List[String]): List[ErrorsInFile] = if (filePaths.exists(isNegTest(_))) filePaths.map(getErrors(_)) else Nil
+
+ private def expectedErrors(filePath: String): List[ErrorsInFile] = expectedErrors(List(filePath))
+
+ private def isNegTest(testPath: String) = testPath.contains("/neg/")
+
+ private def compileArgs(args: Array[String], expectedErrorsPerFile: List[ErrorsInFile])
+ (implicit defaultOptions: List[String]): Unit = {
+ val allArgs = args ++ defaultOptions
+ //println(s"""all args: ${allArgs.mkString("\n")}""")
+ val processor = if (allArgs.exists(_.startsWith("#"))) Bench else Main
+ val storeReporter = new Reporter with UniqueMessagePositions with HideNonSensicalMessages {
+ private val consoleReporter = new ConsoleReporter()
+ private val innerStoreReporter = new StoreReporter(consoleReporter)
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ if (m.level == ERROR) {
+ innerStoreReporter.flush()
+ consoleReporter.doReport(m)
+ }
+ else if (errorCount > 0) consoleReporter.doReport(m)
+ else innerStoreReporter.doReport(m)
+ }
+ }
+ val reporter = processor.process(allArgs, storeReporter)
+
+ val nerrors = reporter.errorCount
+ val xerrors = (expectedErrorsPerFile map {_.totalErrors}).sum
+ def expectedErrorFiles =
+ expectedErrorsPerFile.collect{
+ case er if er.totalErrors > 0 => er.fileName
+ }
+ assert(nerrors == xerrors,
+ s"""Wrong # of errors. Expected: $xerrors, found: $nerrors
+ |Files with expected errors: $expectedErrorFiles
+ |errors:
+ """.stripMargin)
+ // NEG TEST
+ if (xerrors > 0) {
+ val errorLines = reporter.allErrors.map(_.pos)
+ // reporter didn't record as many errors as its errorCount says
+ assert(errorLines.length == nerrors, s"Not enough errors recorded.")
+
+ // Some compiler errors have an associated source position. Each error
+ // needs to correspond to a "// error" marker on that line in the source
+ // file and vice versa.
+ // Other compiler errors don't have an associated source position. Their
+ // number should correspond to the total count of "// nopos-error"
+ // markers in all files
+ val (errorsByFile, errorsWithoutPos) = errorLines.groupBy(_.source.file).toList.partition(_._1.toString != "<no source>")
+
+ // check errors with source position
+ val foundErrorsPerFile = errorsByFile.map({ case (fileName, errorList) =>
+ val posErrorLinesToNr = errorList.groupBy(_.line).toList.map({ case (line, list) => (line, list.length) }).sortBy(_._1)
+ ErrorsInFile(fileName.toString, 0, posErrorLinesToNr)
+ })
+ val expectedErrorsPerFileZeroed = expectedErrorsPerFile.map({
+ case ErrorsInFile(fileName, _, posErrorLinesToNr) =>
+ ErrorsInFile(fileName.toString, 0, posErrorLinesToNr)
+ })
+ checkErrorsWithPosition(expectedErrorsPerFileZeroed, foundErrorsPerFile)
+
+ // check errors without source position
+ val expectedNoPos = expectedErrorsPerFile.map(_.noposErrorNr).sum
+ val foundNoPos = errorsWithoutPos.map(_._2.length).sum
+ assert(foundNoPos == expectedNoPos,
+ s"Wrong # of errors without source position. Expected (all files): $expectedNoPos, found (compiler): $foundNoPos")
+ }
+ }
+
+ // ========== NEG TEST HELPERS =============
+
+ /** Captures the number of nopos-errors in the given file and the number of
+ * errors with a position, represented as a tuple of source line and number
+ * of errors on that line. */
+ case class ErrorsInFile(fileName: String, noposErrorNr: Int, posErrorLinesToNr: List[(Int, Int)]) {
+ def totalErrors = noposErrorNr + posErrorLinesToNr.map(_._2).sum
+ }
+
+ /** Extracts the errors expected for the given neg test file. */
+ def getErrors(fileName: String): ErrorsInFile = {
+ val content = SFile(fileName).slurp
+ val (line, rest) = content.span(_ != '\n')
+
+ @tailrec
+ def checkLine(line: String, rest: String, index: Int, noposAcc: Int, posAcc: List[(Int, Int)]): ErrorsInFile = {
+ val posErrors = "// ?error".r.findAllIn(line).length
+ val newPosAcc = if (posErrors > 0) (index, posErrors) :: posAcc else posAcc
+ val newNoPosAcc = noposAcc + "// ?nopos-error".r.findAllIn(line).length
+ val (newLine, newRest) = rest.span(_ != '\n')
+ if (newRest.isEmpty)
+ ErrorsInFile(fileName.toString, newNoPosAcc, newPosAcc.reverse)
+ else
+ checkLine(newLine, newRest.tail, index + 1, newNoPosAcc, newPosAcc) // skip leading '\n'
+ }
+
+ checkLine(line, rest.tail, 0, 0, Nil) // skip leading '\n'
+ }
+
+ /** Asserts that the expected and found number of errors correspond, and
+ * otherwise throws an error with the filename, plus optionally a line
+ * number if available. */
+ def errorMsg(fileName: String, lineNumber: Option[Int], exp: Int, found: Int) = {
+ val i = lineNumber.map({ i => ":" + (i + 1) }).getOrElse("")
+ assert(found == exp, s"Wrong # of errors for $fileName$i. Expected (file): $exp, found (compiler): $found")
+ }
+
+ /** Compares the expected with the found errors and creates a nice error
+ * message if they don't agree. */
+ def checkErrorsWithPosition(expected: List[ErrorsInFile], found: List[ErrorsInFile]): Unit = {
+ // create nice error messages
+ expected.diff(found) match {
+ case Nil => // nothing missing
+ case ErrorsInFile(fileName, _, expectedLines) :: xs =>
+ found.find(_.fileName == fileName) match {
+ case None =>
+ // expected some errors, but none found for this file
+ errorMsg(fileName, None, expectedLines.map(_._2).sum, 0)
+ case Some(ErrorsInFile(_,_,foundLines)) =>
+ // found wrong number/location of markers for this file
+ compareLines(fileName, expectedLines, foundLines)
+ }
+ }
+
+ found.diff(expected) match {
+ case Nil => // nothing missing
+ case ErrorsInFile(fileName, _, foundLines) :: xs =>
+ expected.find(_.fileName == fileName) match {
+ case None =>
+ // found some errors, but none expected for this file
+ errorMsg(fileName, None, 0, foundLines.map(_._2).sum)
+ case Some(ErrorsInFile(_,_,expectedLines)) =>
+ // found wrong number/location of markers for this file
+ compareLines(fileName, expectedLines, foundLines)
+ }
+ }
+ }
+
+ /** Gives an error message for one line where the expected number of errors and
+ * the number of compiler errors differ. */
+ def compareLines(fileName: String, expectedLines: List[(Int, Int)], foundLines: List[(Int, Int)]) = {
+ expectedLines.foreach({ case (line, expNr) =>
+ foundLines.find(_._1 == line) match {
+ case Some((_, `expNr`)) => // this line is ok
+ case Some((_, foundNr)) => errorMsg(fileName, Some(line), expNr, foundNr)
+ case None => errorMsg(fileName, Some(line), expNr, 0)
+ }
+ })
+ foundLines.foreach({ case (line, foundNr) =>
+ expectedLines.find(_._1 == line) match {
+ case Some((_, `foundNr`)) => // this line is ok
+ case Some((_, expNr)) => errorMsg(fileName, Some(line), expNr, foundNr)
+ case None => errorMsg(fileName, Some(line), 0, foundNr)
+ }
+ })
+ }
+
+ // ========== PARTEST HELPERS =============
+
+ // In particular, don't copy flags from scalac tests
+ private val extensionsToCopy = scala.collection.immutable.HashSet("scala", "java")
+
+ /** Determines what kind of test to run. */
+ private def testKind(prefixDir: String, runTest: Boolean) = {
+ if (runTest) "run"
+ else if (isNegTest(prefixDir)) "neg"
+ else if (prefixDir.endsWith("run" + JFile.separator)) {
+ log("WARNING: test is being run as pos test despite being in a run directory. " +
+ "Use runFile/runDir instead of compileFile/compileDir to do a run test")
+ "pos"
+ } else "pos"
+ }
+
+ /** The three possibilities: no generated sources exist yet, the same sources
+ * exist already, different sources exist. */
+ object Difference extends Enumeration {
+ type Difference = Value
+ val NotExists, ExistsSame, ExistsDifferent = Value
+ }
+ import Difference._
+
+ /** The same source might be used for several partest test cases (e.g. with
+ * different flags). Detects existing versions and computes the path to be
+ * used for this version, e.g. testname_v1 for the first alternative. */
+ private def computeDestAndCopyFiles(source: JFile, dest: Path, kind: String, oldFlags: List[String], nerr: String,
+ nr: Int = 0, oldOutput: String = defaultOutputDir): Unit = {
+
+ val partestOutput = dest.jfile.getParentFile + JFile.separator + dest.stripExtension + "-" + kind + ".obj"
+
+ val altOutput =
+ source.getParentFile.getAbsolutePath.map(x => if (x == JFile.separatorChar) '_' else x)
+
+ val (beforeCp, remaining) = oldFlags
+ .map(f => if (f == oldOutput) partestOutput else f)
+ .span(_ != "-classpath")
+ val flags = beforeCp ++ List("-classpath", (partestOutput :: remaining.drop(1)).mkString(":"))
+
+ val difference = getExisting(dest).isDifferent(source, flags, nerr)
+ difference match {
+ case NotExists => copyFiles(source, dest, partestOutput, flags, nerr, kind)
+ case ExistsSame => // nothing else to do
+ case ExistsDifferent =>
+ val nextDest = dest.parent / (dest match {
+ case d: Directory =>
+ val newVersion = replaceVersion(d.name, nr).getOrElse(altOutput)
+ Directory(newVersion)
+ case f =>
+ val newVersion = replaceVersion(f.stripExtension, nr).getOrElse(altOutput)
+ SFile(newVersion).addExtension(f.extension)
+ })
+ computeDestAndCopyFiles(source, nextDest, kind, flags, nerr, nr + 1, partestOutput)
+ }
+ }
+
+ /** Copies the test sources. Creates flags, nerr, check and output files. */
+ private def copyFiles(sourceFile: Path, dest: Path, partestOutput: String, flags: List[String], nerr: String, kind: String) = {
+ recCopyFiles(sourceFile, dest)
+
+ new JFile(partestOutput).mkdirs
+
+ if (flags.nonEmpty)
+ dest.changeExtension("flags").createFile(true).writeAll(flags.mkString(" "))
+ if (nerr != "0")
+ dest.changeExtension("nerr").createFile(true).writeAll(nerr)
+ sourceFile.changeExtension("check").ifFile({ check =>
+ if (kind == "run") {
+ FileManager.copyFile(check.jfile, dest.changeExtension("check").jfile)
+ dest.changeExtension("checksrc").createFile(true).writeAll("check file generated from source:\n" + check.toString)
+ } else {
+ log(s"WARNING: ignoring $check for test kind $kind")
+ }
+ })
+
+ }
+
+ /** Recursively copy over source files and directories, excluding extensions
+ * that aren't in extensionsToCopy. */
+ private def recCopyFiles(sourceFile: Path, dest: Path): Unit = {
+
+ def copyfile(file: SFile, bytewise: Boolean): Unit = {
+ if (bytewise) {
+ val in = file.inputStream()
+ val out = SFile(dest).outputStream()
+ val buffer = new Array[Byte](1024)
+ def loop(available: Int):Unit = {
+ if (available < 0) {()}
+ else {
+ out.write(buffer, 0, available)
+ val read = in.read(buffer)
+ loop(read)
+ }
+ }
+ loop(0)
+ in.close()
+ out.close()
+ } else {
+ try {
+ SFile(dest)(scala.io.Codec.UTF8).writeAll((s"/* !!!!! WARNING: DO NOT MODIFY. Original is at: $file !!!!! */").replace("\\", "/"), file.slurp("UTF-8"))
+ } catch {
+ case unmappable: java.nio.charset.MalformedInputException =>
+ copyfile(file, true) //there are bytes that can't be mapped with UTF-8. Bail and just do a straight byte-wise copy without the warning header.
+ }
+ }
+ }
+
+ processFileDir(sourceFile, { sf =>
+ if (extensionsToCopy.contains(sf.extension)) {
+ dest.parent.jfile.mkdirs
+ copyfile(sf, false)
+ } else {
+ log(s"WARNING: ignoring $sf")
+ }
+ }, { sdir =>
+ dest.jfile.mkdirs
+ sdir.list.foreach(path => recCopyFiles(path, dest / path.name))
+ }, Some("DPCompilerTest.recCopyFiles: sourceFile not found: " + sourceFile))
+ }
+
+ /** Reads the existing files for the given test source if any. */
+ private def getExisting(dest: Path): ExistingFiles = {
+ val content: Option[Option[String]] = processFileDir(dest, f => try Some(f.slurp("UTF8")) catch {case io: java.io.IOException => Some(io.toString())}, d => Some(""))
+ if (content.isDefined && content.get.isDefined) {
+ val flags = (dest changeExtension "flags").toFile.safeSlurp
+ val nerr = (dest changeExtension "nerr").toFile.safeSlurp
+ ExistingFiles(content.get, flags, nerr)
+ } else ExistingFiles()
+ }
+
+ /** Encapsulates existing generated test files. */
+ case class ExistingFiles(genSrc: Option[String] = None, flags: Option[String] = None, nerr: Option[String] = None) {
+ def isDifferent(sourceFile: JFile, otherFlags: List[String], otherNerr: String): Difference = {
+ if (!genSrc.isDefined) {
+ NotExists
+ } else {
+ val source = processFileDir(sourceFile, { f => try Some(f.slurp("UTF8")) catch {case _: java.io.IOException => None} }, { d => Some("") },
+ Some("DPCompilerTest sourceFile doesn't exist: " + sourceFile)).get
+ if (source == genSrc) {
+ nerr match {
+ case Some(n) if (n != otherNerr) => ExistsDifferent
+ case None if (otherNerr != "0") => ExistsDifferent
+ case _ if (flags.map(_ == otherFlags.mkString(" ")).getOrElse(otherFlags.isEmpty)) => ExistsSame
+ case _ => ExistsDifferent
+ }
+ } else ExistsDifferent
+ }
+ }
+ }
+
+ import scala.util.matching.Regex
+ val nrFinder = """(.*_v)(\d+)""".r
+ /** Changes the version number suffix in the name (without extension). */
+ private def replaceVersion(name: String, nr: Int): Option[String] = {
+ val nrString = nr.toString
+ name match {
+ case nrFinder(prefix, `nrString`) => Some(prefix + (nr + 1))
+ case _ if nr != 0 => None
+ case _ => Some(name + "_v1")
+ }
+ }
+
+ /** Returns None if the given path doesn't exist, otherwise returns Some of
+ * applying either processFile or processDir, depending on what the path
+ * refers to in the file system. If failMsgOnNone is defined, this function
+ * asserts that the file exists using the provided message. */
+ private def processFileDir[T](input: Path, processFile: SFile => T, processDir: Directory => T, failMsgOnNone: Option[String] = None): Option[T] = {
+ val res = input.ifFile(f => processFile(f)).orElse(input.ifDirectory(d => processDir(d)))
+ (failMsgOnNone, res) match {
+ case (Some(msg), None) => assert(false, msg); None
+ case _ => res
+ }
+ }
+
+ /** Creates a temporary directory and copies all (deep) files over, thus
+ * flattening the directory structure. */
+ private def flattenDir(prefix: String, dirName: String): JFile = {
+ val destDir = Directory(DPConfig.testRoot + JFile.separator + "_temp")
+ Directory(prefix + dirName).deepFiles.foreach(source => recCopyFiles(source, destDir / source.name))
+ destDir.jfile
+ }
+
+ /** Write either to console (JUnit) or log file (partest). */
+ private def log(msg: String) = logFile.map(_.appendAll(msg + "\n")).getOrElse(println(msg))
+}
+
+object CompilerTest extends App {
+
+ /** Deletes generated partest sources from a previous run, recreates
+ * directory and returns the freshly created log file. */
+ lazy val init: SFile = {
+ scala.reflect.io.Directory(DPConfig.testRoot).deleteRecursively
+ new JFile(DPConfig.testRoot).mkdirs
+ val log = DPConfig.genLog.createFile(true)
+ println(s"CompilerTest is generating tests for partest, log: $log")
+ log
+ }
+
+// val dotcDir = "/Users/odersky/workspace/dotty/src/dotty/"
+
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "CompilationUnit")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Compiler")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Driver")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Main")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Run")
+
+// new CompilerTest().compileDir(dotcDir + "tools/dotc")
+ // new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Run")
+}
diff --git a/compiler/test/dotty/tools/dotc/EntryPointsTest.scala b/compiler/test/dotty/tools/dotc/EntryPointsTest.scala
new file mode 100644
index 000000000..4a87bbcb5
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/EntryPointsTest.scala
@@ -0,0 +1,72 @@
+package dotty.tools
+package dotc
+
+import org.junit.Test
+import org.junit.Assert._
+import dotty.tools.dotc.interfaces.{CompilerCallback, SourceFile}
+import reporting._
+import reporting.diagnostic.MessageContainer
+import core.Contexts._
+import java.io.File
+import scala.collection.mutable.ListBuffer
+
+/** Test the compiler entry points that depend on dotty
+ *
+ * This file also serve as an example for using [[dotty.tools.dotc.Driver#process]].
+ *
+ * @see [[InterfaceEntryPointTest]]
+ */
+class EntryPointsTest {
+ private val sources =
+ List("../tests/pos/HelloWorld.scala").map(p => new java.io.File(p).getPath())
+ private val dottyInterfaces =
+ new java.io.File("../interfaces/dotty-interfaces-0.1-SNAPSHOT.jar").getPath
+ private val dottyLibrary =
+ new java.io.File("../library/target/scala-2.11/dotty-library_2.11-0.1-SNAPSHOT.jar").getPath
+ private val args =
+ sources ++
+ List("-d", "../out/") ++
+ List("-classpath", dottyInterfaces + ":" + dottyLibrary)
+
+ @Test def runCompiler = {
+ val reporter = new CustomReporter
+ val callback = new CustomCompilerCallback
+
+ Main.process(args.toArray, reporter, callback)
+
+ assertEquals("Number of errors", false, reporter.hasErrors)
+ assertEquals("Number of warnings", false, reporter.hasWarnings)
+ assertEquals("Compiled sources", sources, callback.paths)
+ }
+
+ @Test def runCompilerWithContext = {
+ val reporter = new CustomReporter
+ val callback = new CustomCompilerCallback
+ val context = (new ContextBase).initialCtx.fresh
+ .setReporter(reporter)
+ .setCompilerCallback(callback)
+
+ Main.process(args.toArray, context)
+
+ assertEquals("Number of errors", false, reporter.hasErrors)
+ assertEquals("Number of warnings", false, reporter.hasWarnings)
+ assertEquals("Compiled sources", sources, callback.paths)
+ }
+
+ private class CustomReporter extends Reporter
+ with UniqueMessagePositions
+ with HideNonSensicalMessages {
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ }
+ }
+
+ private class CustomCompilerCallback extends CompilerCallback {
+ private val pathsBuffer = new ListBuffer[String]
+ def paths = pathsBuffer.toList
+
+ override def onSourceCompiled(source: SourceFile): Unit = {
+ if (source.jfile.isPresent)
+ pathsBuffer += source.jfile.get.getPath
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala b/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala
new file mode 100644
index 000000000..b36ea2955
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala
@@ -0,0 +1,72 @@
+package dotty.tools.dotc
+
+import org.junit.Test
+import org.junit.Assert._
+import interfaces._
+import scala.collection.mutable.ListBuffer
+
+/** Test that demonstrates how to use dotty-interfaces
+ *
+ * This test requires:
+ * - dotty-interfaces to be present at compile-time
+ * - dotty-interfaces and dotty to be present at run-time
+ *
+ * Since the ABI of dotty-interfaces is stable, this means that users can write
+ * code that works with multiple versions of dotty without recompilation.
+ *
+ * @see [[OtherEntryPointsTest]]
+ */
+class InterfaceEntryPointTest {
+ @Test def runCompilerFromInterface = {
+ val sources =
+ List("../tests/pos/HelloWorld.scala").map(p => new java.io.File(p).getPath())
+ val dottyInterfaces =
+ new java.io.File("../interfaces/dotty-interfaces-0.1-SNAPSHOT.jar").getPath
+ val dottyLibrary =
+ new java.io.File("../library/target/scala-2.11/dotty-library_2.11-0.1-SNAPSHOT.jar").getPath
+
+ val args =
+ sources ++
+ List("-d", "../out/") ++
+ List("-classpath", dottyInterfaces + ":" + dottyLibrary)
+
+ val mainClass = Class.forName("dotty.tools.dotc.Main")
+ val process = mainClass.getMethod("process",
+ classOf[Array[String]], classOf[SimpleReporter], classOf[CompilerCallback])
+
+ val reporter = new CustomSimpleReporter
+ val callback = new CustomCompilerCallback
+
+ // Run the compiler by calling dotty.tools.dotc.Main.process
+ process.invoke(null, args.toArray, reporter, callback)
+
+ assertEquals("Number of errors", 0, reporter.errorCount)
+ assertEquals("Number of warnings", 0, reporter.warningCount)
+ assertEquals("Compiled sources", sources, callback.paths)
+ }
+
+ private class CustomSimpleReporter extends SimpleReporter {
+ var errorCount = 0
+ var warningCount = 0
+
+ def report(diag: Diagnostic): Unit = {
+ if (diag.level == Diagnostic.ERROR)
+ errorCount += 1
+ if (diag.level == Diagnostic.WARNING)
+ warningCount += 1
+
+ println(diag.message)
+ }
+ }
+
+ private class CustomCompilerCallback extends CompilerCallback {
+ private val pathsBuffer = new ListBuffer[String]
+ def paths = pathsBuffer.toList
+
+ override def onSourceCompiled(source: SourceFile): Unit = {
+ if (source.jfile.isPresent)
+ pathsBuffer += source.jfile.get.getPath
+ }
+ }
+}
+
diff --git a/compiler/test/dotty/tools/dotc/ast/TreeInfoTest.scala b/compiler/test/dotty/tools/dotc/ast/TreeInfoTest.scala
new file mode 100644
index 000000000..a55973c43
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/ast/TreeInfoTest.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package ast
+
+import org.junit.Test
+import core.Names._
+import core.Types._
+import core.Symbols._
+import org.junit.Assert._
+
+class TreeInfoTest extends DottyTest {
+
+ import tpd._
+
+ @Test
+ def testDefPath = checkCompile("frontend", "class A { def bar = { val x = { val z = 0; 0} }} ") {
+ (tree, context) =>
+ implicit val ctx = context
+ val xTree = tree.find(tree => tree.symbol.name == termName("x")).get
+ val path = defPath(xTree.symbol, tree)
+ assertEquals(List(
+ ("PackageDef", EMPTY_PACKAGE),
+ ("TypeDef", typeName("A")),
+ ("Template", termName("<local A>")),
+ ("DefDef", termName("bar")),
+ ("Block", NoSymbol.name),
+ ("ValDef", termName("x"))
+ ), path.map(x => (x.productPrefix, x.symbol.name)))
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala
new file mode 100644
index 000000000..1f79c2cf6
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala
@@ -0,0 +1,96 @@
+package dotty.tools
+package dotc
+package parsing
+
+import Tokens._, Parsers._
+import scala.reflect.io._
+import util._
+import core._
+import ast.Trees._
+import ast.desugar
+import ast.desugar._
+import core.Mode
+import Contexts.Context
+
+import scala.collection.mutable.ListBuffer
+
+class DeSugarTest extends ParserTest {
+
+ import dotty.tools.dotc.ast.untpd._
+
+ import Mode._
+
+ val Expr = Mode(0)
+
+ object DeSugar extends UntypedTreeMap {
+ var curMode: Mode = Expr
+ def withMode[T](mode: Mode)(op: => T) = {
+ val saved = curMode
+ curMode = mode
+ try op
+ finally curMode = saved
+ }
+
+ def transform(tree: Tree, mode: Mode)(implicit ctx: Context): Tree = withMode(mode) { transform(tree) }
+ def transform(trees: List[Tree], mode: Mode)(implicit ctx: Context): List[Tree] = withMode(mode) { transform(trees) }
+
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = {
+ val tree1 = desugar(tree)(ctx.withModeBits(curMode))
+ tree1 match {
+ case TypedSplice(t) =>
+ tree1
+ case PostfixOp(od, op) =>
+ PostfixOp(transform(od), op)
+ case Select(qual, name) =>
+ cpy.Select(tree1)(transform(qual, Expr), name)
+ case Apply(fn, args) =>
+ cpy.Apply(tree1)(transform(fn, Expr), transform(args))
+ case TypeApply(fn, args) =>
+ cpy.TypeApply(tree1)(transform(fn, Expr), transform(args, Type))
+ case New(tpt) =>
+ cpy.New(tree1)(transform(tpt, Type))
+ case Typed(expr, tpt) =>
+ cpy.Typed(tree1)(transform(expr), transform(tpt, Type))
+ case CaseDef(pat, guard, body) =>
+ cpy.CaseDef(tree1)(transform(pat, Pattern), transform(guard), transform(body))
+ case SeqLiteral(elems, elemtpt) =>
+ cpy.SeqLiteral(tree1)(transform(elems), transform(elemtpt))
+ case UnApply(fun, implicits, patterns) =>
+ cpy.UnApply(tree1)(transform(fun, Expr), transform(implicits), transform(patterns))
+ case tree1 @ ValDef(name, tpt, _) =>
+ cpy.ValDef(tree1)(name, transform(tpt, Type), transform(tree1.rhs))
+ case tree1 @ DefDef(name, tparams, vparamss, tpt, _) =>
+ cpy.DefDef(tree1)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt, Type), transform(tree1.rhs))
+ case tree1 @ TypeDef(name, rhs) =>
+ cpy.TypeDef(tree1)(name, transform(rhs, Type))
+ case impl @ Template(constr, parents, self, _) =>
+ cpy.Template(tree1)(transformSub(constr), transform(parents), transformSub(self), transform(impl.body, Expr))
+ case Thicket(trees) =>
+ Thicket(flatten(trees mapConserve super.transform))
+ case tree1 =>
+ super.transform(tree1)
+ }
+ }
+ }
+
+ def firstClass(stats: List[Tree]): String = stats match {
+ case Nil => "<empty>"
+ case TypeDef(name, _) :: _ => name.toString
+ case ModuleDef(name, _) :: _ => name.toString
+ case (pdef: PackageDef) :: _ => firstClass(pdef)
+ case stat :: stats => firstClass(stats)
+ }
+
+ def firstClass(tree: Tree): String = tree match {
+ case PackageDef(pid, stats) =>
+ pid.show + "." + firstClass(stats)
+ case _ => "??? " + tree.getClass
+ }
+
+ def desugarTree(tree: Tree): Tree = {
+ //println("***** desugaring " + firstClass(tree))
+ DeSugar.transform(tree)
+ }
+
+ def desugarAll() = parsedTrees foreach (desugarTree(_).show)
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/DocstringTest.scala b/compiler/test/dotty/tools/dotc/parsing/DocstringTest.scala
new file mode 100644
index 000000000..30e885f70
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/DocstringTest.scala
@@ -0,0 +1,34 @@
+package dotty.tools
+package dotc
+package parsing
+
+import ast.Trees._
+import core.Contexts.Context
+
+trait DocstringTest extends DottyTest {
+ ctx = ctx.fresh.setSetting(ctx.settings.YkeepComments, true)
+
+ def checkDocString(actual: Option[String], expected: String): Unit = actual match {
+ case Some(str) =>
+ assert(str == expected, s"""Docstring: "$str" didn't match expected "$expected"""")
+ case None =>
+ assert(false, s"""No docstring found, expected: "$expected"""")
+ }
+
+ def expectNoDocString(doc: Option[String]): Unit =
+ doc.fold(()) { d => assert(false, s"""Expected not to find a docstring, but found: "$d"""") }
+
+ def defaultAssertion: PartialFunction[Any, Unit] = {
+ case t: Tree[Untyped] =>
+ assert(false, s"Couldn't match resulting AST to expected AST in: ${t.show}")
+ case x =>
+ assert(false, s"Couldn't match resulting AST to expected AST in: $x")
+ }
+
+ def checkFrontend(source: String)(docAssert: PartialFunction[Tree[Untyped], Unit]) = {
+ checkCompile("frontend", source) { (_, ctx) =>
+ implicit val c = ctx
+ (docAssert orElse defaultAssertion)(ctx.compilationUnit.untpdTree)
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/DocstringTests.scala b/compiler/test/dotty/tools/dotc/parsing/DocstringTests.scala
new file mode 100644
index 000000000..930ec117a
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/DocstringTests.scala
@@ -0,0 +1,491 @@
+package dotty.tools
+package dotc
+package parsing
+
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.ast.Trees._
+
+import org.junit.Assert._
+import org.junit.Test
+
+class DocstringTests extends DocstringTest {
+
+ @Test def noComment = {
+ import dotty.tools.dotc.ast.untpd._
+ val source = "class Class"
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(c: TypeDef)) =>
+ assert(c.rawComment.map(_.raw) == None, "Should not have a comment, mainly used for exhaustive tests")
+ }
+ }
+
+ @Test def singleClassInPackage = {
+ val source =
+ """
+ |package a
+ |
+ |/** Hello world! */
+ |class Class(val x: String)
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" =>
+ checkDocString(t.rawComment.map(_.raw), "/** Hello world! */")
+ }
+ }
+
+ @Test def multipleOpenedOnSingleClassInPackage = {
+ val source =
+ """
+ |package a
+ |
+ |/** Hello /* multiple open */ world! */
+ |class Class(val x: String)
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" =>
+ checkDocString(t.rawComment.map(_.raw), "/** Hello /* multiple open */ world! */")
+ }
+ }
+ @Test def multipleClassesInPackage = {
+ val source =
+ """
+ |package a
+ |
+ |/** Class1 docstring */
+ |class Class1(val x: String)
+ |
+ |/** Class2 docstring */
+ |class Class2(val x: String)
+ """.stripMargin
+
+ checkCompile("frontend", source) { (_, ctx) =>
+ ctx.compilationUnit.untpdTree match {
+ case PackageDef(_, Seq(c1 @ TypeDef(_,_), c2 @ TypeDef(_,_))) => {
+ checkDocString(c1.rawComment.map(_.raw), "/** Class1 docstring */")
+ checkDocString(c2.rawComment.map(_.raw), "/** Class2 docstring */")
+ }
+ }
+ }
+ }
+
+ @Test def singleCaseClassWithoutPackage = {
+ val source =
+ """
+ |/** Class without package */
+ |case class Class(val x: Int)
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.raw), "/** Class without package */")
+ }
+ }
+
+ @Test def SingleTraitWihoutPackage = {
+ val source = "/** Trait docstring */\ntrait Trait"
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.raw), "/** Trait docstring */")
+ }
+ }
+
+ @Test def multipleTraitsWithoutPackage = {
+ val source =
+ """
+ |/** Trait1 docstring */
+ |trait Trait1
+ |
+ |/** Trait2 docstring */
+ |trait Trait2
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(t1 @ TypeDef(_,_), t2 @ TypeDef(_,_))) => {
+ checkDocString(t1.rawComment.map(_.raw), "/** Trait1 docstring */")
+ checkDocString(t2.rawComment.map(_.raw), "/** Trait2 docstring */")
+ }
+ }
+ }
+
+ @Test def multipleMixedEntitiesWithPackage = {
+ val source =
+ """
+ |/** Trait1 docstring */
+ |trait Trait1
+ |
+ |/** Class2 docstring */
+ |class Class2(val x: Int)
+ |
+ |/** CaseClass3 docstring */
+ |case class CaseClass3()
+ |
+ |case class NoComment()
+ |
+ |/** AbstractClass4 docstring */
+ |abstract class AbstractClass4(val x: Int)
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(t1 @ TypeDef(_,_), c2 @ TypeDef(_,_), cc3 @ TypeDef(_,_), _, ac4 @ TypeDef(_,_))) => {
+ checkDocString(t1.rawComment.map(_.raw), "/** Trait1 docstring */")
+ checkDocString(c2.rawComment.map(_.raw), "/** Class2 docstring */")
+ checkDocString(cc3.rawComment.map(_.raw), "/** CaseClass3 docstring */")
+ checkDocString(ac4.rawComment.map(_.raw), "/** AbstractClass4 docstring */")
+ }
+ }
+ }
+
+ @Test def nestedClass = {
+ val source =
+ """
+ |/** Outer docstring */
+ |class Outer {
+ | /** Inner docstring */
+ | class Inner(val x: Int)
+ |}
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(outer @ TypeDef(_, tpl @ Template(_,_,_,_)))) => {
+ checkDocString(outer.rawComment.map(_.raw), "/** Outer docstring */")
+ tpl.body match {
+ case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.raw), "/** Inner docstring */")
+ case _ => assert(false, "Couldn't find inner class")
+ }
+ }
+ }
+ }
+
+ @Test def nestedClassThenOuter = {
+ val source =
+ """
+ |/** Outer1 docstring */
+ |class Outer1 {
+ | /** Inner docstring */
+ | class Inner(val x: Int)
+ |}
+ |
+ |/** Outer2 docstring */
+ |class Outer2
+ """.stripMargin
+
+ checkFrontend(source) {
+ case PackageDef(_, Seq(o1 @ TypeDef(_, tpl @ Template(_,_,_,_)), o2 @ TypeDef(_,_))) => {
+ checkDocString(o1.rawComment.map(_.raw), "/** Outer1 docstring */")
+ checkDocString(o2.rawComment.map(_.raw), "/** Outer2 docstring */")
+ tpl.body match {
+ case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.raw), "/** Inner docstring */")
+ case _ => assert(false, "Couldn't find inner class")
+ }
+ }
+ }
+ }
+
+ @Test def objects = {
+ val source =
+ """
+ |package p
+ |
+ |/** Object1 docstring */
+ |object Object1
+ |
+ |/** Object2 docstring */
+ |object Object2
+ """.stripMargin
+
+ checkFrontend(source) {
+ case p @ PackageDef(_, Seq(o1: MemberDef[Untyped], o2: MemberDef[Untyped])) => {
+ assertEquals(o1.name.toString, "Object1")
+ checkDocString(o1.rawComment.map(_.raw), "/** Object1 docstring */")
+ assertEquals(o2.name.toString, "Object2")
+ checkDocString(o2.rawComment.map(_.raw), "/** Object2 docstring */")
+ }
+ }
+ }
+
+ @Test def objectsNestedClass = {
+ val source =
+ """
+ |package p
+ |
+ |/** Object1 docstring */
+ |object Object1
+ |
+ |/** Object2 docstring */
+ |object Object2 {
+ | class A1
+ | /** Inner docstring */
+ | class Inner
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case p @ PackageDef(_, Seq(o1: ModuleDef, o2: ModuleDef)) => {
+ assert(o1.name.toString == "Object1")
+ checkDocString(o1.rawComment.map(_.raw), "/** Object1 docstring */")
+ assert(o2.name.toString == "Object2")
+ checkDocString(o2.rawComment.map(_.raw), "/** Object2 docstring */")
+
+ o2.impl.body match {
+ case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.raw), "/** Inner docstring */")
+ case _ => assert(false, "Couldn't find inner class")
+ }
+ }
+ }
+ }
+
+ @Test def packageObject = {
+ val source =
+ """
+ |/** Package object docstring */
+ |package object foo {
+ | /** Boo docstring */
+ | case class Boo()
+ |
+ | /** Trait docstring */
+ | trait Trait
+ |
+ | /** InnerObject docstring */
+ | object InnerObject {
+ | /** InnerClass docstring */
+ | class InnerClass
+ | }
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(p: ModuleDef)) => {
+ checkDocString(p.rawComment.map(_.raw), "/** Package object docstring */")
+
+ p.impl.body match {
+ case (b: TypeDef) :: (t: TypeDef) :: (o: ModuleDef) :: Nil => {
+ checkDocString(b.rawComment.map(_.raw), "/** Boo docstring */")
+ checkDocString(t.rawComment.map(_.raw), "/** Trait docstring */")
+ checkDocString(o.rawComment.map(_.raw), "/** InnerObject docstring */")
+ checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment.map(_.raw), "/** InnerClass docstring */")
+ }
+ case _ => assert(false, "Incorrect structure inside package object")
+ }
+ }
+ }
+ }
+
+ @Test def multipleDocStringsBeforeEntity = {
+ val source =
+ """
+ |/** First comment */
+ |/** Second comment */
+ |/** Real comment */
+ |class Class
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(c: TypeDef)) =>
+ checkDocString(c.rawComment.map(_.raw), "/** Real comment */")
+ }
+ }
+
+ @Test def multipleDocStringsBeforeAndAfter = {
+ val source =
+ """
+ |/** First comment */
+ |/** Second comment */
+ |/** Real comment */
+ |class Class
+ |/** Following comment 1 */
+ |/** Following comment 2 */
+ |/** Following comment 3 */
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(c: TypeDef)) =>
+ checkDocString(c.rawComment.map(_.raw), "/** Real comment */")
+ }
+ }
+
+ @Test def valuesWithDocString = {
+ val source =
+ """
+ |object Object {
+ | /** val1 */
+ | val val1 = 1
+ |
+ | /** val2 */
+ | val val2: Int = 2
+ | /** bogus docstring */
+ |
+ | /** bogus docstring */
+ | /** val3 */
+ | val val3: List[Int] = 1 :: 2 :: 3 :: Nil
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(o: ModuleDef)) => {
+ o.impl.body match {
+ case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
+ checkDocString(v1.rawComment.map(_.raw), "/** val1 */")
+ checkDocString(v2.rawComment.map(_.raw), "/** val2 */")
+ checkDocString(v3.rawComment.map(_.raw), "/** val3 */")
+ }
+ case _ => assert(false, "Incorrect structure inside object")
+ }
+ }
+ }
+ }
+
+ @Test def varsWithDocString = {
+ val source =
+ """
+ |object Object {
+ | /** var1 */
+ | var var1 = 1
+ |
+ | /** var2 */
+ | var var2: Int = 2
+ | /** bogus docstring */
+ |
+ | /** bogus docstring */
+ | /** var3 */
+ | var var3: List[Int] = 1 :: 2 :: 3 :: Nil
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(o: ModuleDef)) => {
+ o.impl.body match {
+ case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
+ checkDocString(v1.rawComment.map(_.raw), "/** var1 */")
+ checkDocString(v2.rawComment.map(_.raw), "/** var2 */")
+ checkDocString(v3.rawComment.map(_.raw), "/** var3 */")
+ }
+ case _ => assert(false, "Incorrect structure inside object")
+ }
+ }
+ }
+ }
+
+ @Test def defsWithDocString = {
+ val source =
+ """
+ |object Object {
+ | /** def1 */
+ | def def1 = 1
+ |
+ | /** def2 */
+ | def def2: Int = 2
+ | /** bogus docstring */
+ |
+ | /** bogus docstring */
+ | /** def3 */
+ | def def3: List[Int] = 1 :: 2 :: 3 :: Nil
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(o: ModuleDef)) => {
+ o.impl.body match {
+ case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
+ checkDocString(v1.rawComment.map(_.raw), "/** def1 */")
+ checkDocString(v2.rawComment.map(_.raw), "/** def2 */")
+ checkDocString(v3.rawComment.map(_.raw), "/** def3 */")
+ }
+ case _ => assert(false, "Incorrect structure inside object")
+ }
+ }
+ }
+ }
+
+ @Test def typesWithDocString = {
+ val source =
+ """
+ |object Object {
+ | /** type1 */
+ | type T1 = Int
+ |
+ | /** type2 */
+ | type T2 = String
+ | /** bogus docstring */
+ |
+ | /** bogus docstring */
+ | /** type3 */
+ | type T3 = T2
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(o: ModuleDef)) => {
+ o.impl.body match {
+ case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
+ checkDocString(v1.rawComment.map(_.raw), "/** type1 */")
+ checkDocString(v2.rawComment.map(_.raw), "/** type2 */")
+ checkDocString(v3.rawComment.map(_.raw), "/** type3 */")
+ }
+ case _ => assert(false, "Incorrect structure inside object")
+ }
+ }
+ }
+ }
+
+ @Test def defInnerClass = {
+ val source =
+ """
+ |object Foo {
+ | def foo() = {
+ | /** Innermost */
+ | class Innermost
+ | }
+ |}
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case PackageDef(_, Seq(o: ModuleDef)) =>
+ o.impl.body match {
+ case (foo: MemberDef) :: Nil =>
+ expectNoDocString(foo.rawComment.map(_.raw))
+ case _ => assert(false, "Incorrect structure inside object")
+ }
+ }
+ }
+
+ @Test def withExtends = {
+ val source =
+ """
+ |trait Trait1
+ |/** Class1 */
+ |class Class1 extends Trait1
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case p @ PackageDef(_, Seq(_, c: TypeDef)) =>
+ checkDocString(c.rawComment.map(_.raw), "/** Class1 */")
+ }
+ }
+
+ @Test def withAnnotation = {
+ val source =
+ """
+ |/** Class1 */
+ |@SerialVersionUID(1)
+ |class Class1
+ """.stripMargin
+
+ import dotty.tools.dotc.ast.untpd._
+ checkFrontend(source) {
+ case p @ PackageDef(_, Seq(c: TypeDef)) =>
+ checkDocString(c.rawComment.map(_.raw), "/** Class1 */")
+ }
+ }
+} /* End class */
diff --git a/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala b/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala
new file mode 100644
index 000000000..e31ef2160
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala
@@ -0,0 +1,164 @@
+package dotty.tools
+package dotc
+package parsing
+
+import org.junit.Test
+import org.junit.Assert._
+
+import ast.untpd.modsDeco
+import ast.untpd._
+import ast.{ Trees => d }
+import Parsers.Parser
+import util.SourceFile
+import core.Contexts.ContextBase
+import core.Flags
+
+object ModifiersParsingTest {
+ implicit val ctx = (new ContextBase).initialCtx
+
+ implicit def parse(code: String): Tree = {
+ val (_, stats) = new Parser(new SourceFile("<meta>", code.toCharArray)).templateStatSeq()
+ stats match { case List(stat) => stat; case stats => Thicket(stats) }
+ }
+
+ implicit class TreeDeco(val code: Tree) extends AnyVal {
+ def firstConstrValDef: ValDef = code match {
+ case d.TypeDef(_, d.Template(constr, _, _, _)) =>
+ constr.vparamss.head.head
+ }
+
+ def firstTypeParam: TypeDef = code match {
+ case d.TypeDef(_, d.Template(constr, _, _, _)) =>
+ constr.tparams.head
+ }
+
+ def defParam(i: Int): ValDef = code match {
+ case d.DefDef(_, _, vparamss, _, _) =>
+ vparamss.head.toArray.apply(i)
+ }
+
+ def defParam(i: Int, j: Int): ValDef = code match {
+ case d.DefDef(_, _, vparamss, _, _) =>
+ vparamss.toArray.apply(i).toArray.apply(j)
+ }
+
+ def funParam(i: Int): Tree = code match {
+ case Function(params, _) =>
+ params.toArray.apply(i)
+ }
+
+ def field(i: Int): Tree = code match {
+ case d.TypeDef(_, t: Template) =>
+ t.body.toArray.apply(i)
+ }
+
+ def field(name: String): Tree = code match {
+ case d.TypeDef(_, t: Template) =>
+ t.body.find({
+ case m: MemberDef => m.name.show == name
+ case _ => false
+ }).get
+ }
+
+ def stat(i: Int): Tree = code match {
+ case d.Block(stats, expr) =>
+ if (i < stats.length) stats.toArray.apply(i)
+ else expr
+ }
+
+ def modifiers: List[Mod] = code match {
+ case t: MemberDef => t.mods.mods
+ }
+ }
+}
+
+
+class ModifiersParsingTest {
+ import ModifiersParsingTest._
+
+
+ @Test def valDef = {
+ var source: Tree = "class A(var a: Int)"
+ assert(source.firstConstrValDef.modifiers == List(Mod.Var()))
+
+ source = "class A(val a: Int)"
+ assert(source.firstConstrValDef.modifiers == List(Mod.Val()))
+
+ source = "class A(private val a: Int)"
+ assert(source.firstConstrValDef.modifiers == List(Mod.Private(), Mod.Val()))
+
+ source = "class A(protected var a: Int)"
+ assert(source.firstConstrValDef.modifiers == List(Mod.Protected(), Mod.Var()))
+
+ source = "class A(protected implicit val a: Int)"
+ assert(source.firstConstrValDef.modifiers == List(Mod.Protected(), Mod.Implicit(), Mod.Val()))
+
+ source = "class A[T]"
+ assert(source.firstTypeParam.modifiers == List())
+
+ source = "class A[type T]"
+ assert(source.firstTypeParam.modifiers == List(Mod.Type()))
+ }
+
+ @Test def typeDef = {
+ var source: Tree = "class A"
+ assert(source.modifiers == List())
+
+ source = "sealed class A"
+ assert(source.modifiers == List(Mod.Sealed()))
+
+ source = "implicit class A"
+ assert(source.modifiers == List(Mod.Implicit()))
+
+ source = "abstract sealed class A"
+ assert(source.modifiers == List(Mod.Abstract(), Mod.Sealed()))
+ }
+
+ @Test def fieldDef = {
+ val source: Tree =
+ """
+ | class A {
+ | lazy var a = ???
+ | lazy private val b = ???
+ | final val c = ???
+ |
+ | abstract override def f: Boolean
+ | inline def g(n: Int) = ???
+ | }
+ """.stripMargin
+
+ assert(source.field("a").modifiers == List(Mod.Lazy(), Mod.Var()))
+ assert(source.field("b").modifiers == List(Mod.Lazy(), Mod.Private(), Mod.Val()))
+ assert(source.field("c").modifiers == List(Mod.Final(), Mod.Val()))
+ assert(source.field("f").modifiers == List(Mod.Abstract(), Mod.Override()))
+ assert(source.field("g").modifiers == List(Mod.Inline()))
+ }
+
+ @Test def paramDef = {
+ var source: Tree = "def f(inline a: Int) = ???"
+ assert(source.defParam(0).modifiers == List(Mod.Inline()))
+
+ source = "def f(implicit a: Int, b: Int) = ???"
+ println(source.defParam(0).modifiers)
+ assert(source.defParam(0).modifiers == List(Mod.Implicit(Flags.Implicit)))
+ assert(source.defParam(1).modifiers == List(Mod.Implicit(Flags.Implicit)))
+
+ source = "def f(x: Int, y: Int)(implicit a: Int, b: Int) = ???"
+ assert(source.defParam(0, 0).modifiers == List())
+ assert(source.defParam(1, 0).modifiers == List(Mod.Implicit(Flags.Implicit)))
+ }
+
+ @Test def blockDef = {
+ var source: Tree = "implicit val x : A = ???"
+ assert(source.modifiers == List(Mod.Implicit(), Mod.Val()))
+
+ source = "implicit var x : A = ???"
+ assert(source.modifiers == List(Mod.Implicit(), Mod.Var()))
+
+ source = "{ implicit var x : A = ??? }"
+ assert(source.stat(0).modifiers == List(Mod.Implicit(), Mod.Var()))
+
+ source = "{ implicit x => x * x }"
+ assert(source.stat(0).funParam(0).modifiers == List(Mod.Implicit()))
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala b/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala
new file mode 100644
index 000000000..a89b34512
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/ParserTest.scala
@@ -0,0 +1,44 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.reflect.io._
+import util._
+import core._
+import scala.io.Codec
+import Tokens._, Parsers._
+import ast.untpd._
+import org.junit.Test
+import scala.collection.mutable.ListBuffer
+
+class ParserTest extends DottyTest {
+
+ def parse(name: String): Tree = parse(new PlainFile(name))
+
+ var parsed = 0
+ val parsedTrees = new ListBuffer[Tree]
+
+ def reset() = {
+ parsed = 0
+ parsedTrees.clear()
+ }
+
+ def parse(file: PlainFile): Tree = {
+ //println("***** parsing " + file)
+ val source = new SourceFile(file, Codec.UTF8)
+ val parser = new Parser(source)
+ val tree = parser.parse()
+ parsed += 1
+ parsedTrees += tree
+ tree
+ }
+
+ def parseDir(path: String): Unit = parseDir(Directory(path))
+
+ def parseDir(dir: Directory): Unit = {
+ for (f <- dir.files)
+ if (f.name.endsWith(".scala")) parse(new PlainFile(f))
+ for (d <- dir.dirs)
+ parseDir(d.path)
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala b/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala
new file mode 100644
index 000000000..b024a63db
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/ScannerTest.scala
@@ -0,0 +1,65 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.reflect.io._
+import scala.io.Codec
+import util._
+import Tokens._, Scanners._
+import org.junit.Test
+
+class ScannerTest extends DottyTest {
+
+ val blackList = List(
+ "/scaladoc/scala/tools/nsc/doc/html/page/Index.scala",
+ "/scaladoc/scala/tools/nsc/doc/html/page/Template.scala"
+ )
+
+ def scan(name: String): Unit = scan(new PlainFile(name))
+
+ def scan(file: PlainFile): Unit = {
+ //println("***** scanning " + file)
+ val source = new SourceFile(file, Codec.UTF8)
+ val scanner = new Scanner(source)
+ var i = 0
+ while (scanner.token != EOF) {
+// print("[" + scanner.token.show +"]")
+ scanner.nextToken
+// i += 1
+// if (i % 10 == 0) println()
+ }
+ }
+
+ def scanDir(path: String): Unit = scanDir(Directory(path))
+
+ def scanDir(dir: Directory): Unit = {
+ if (blackList exists (dir.jfile.toString endsWith _))
+ println(s"blacklisted package: ${dir.jfile.getAbsolutePath}")
+ else
+ for (f <- dir.files)
+ if (f.name.endsWith(".scala"))
+ if (blackList exists (f.jfile.toString endsWith _))
+ println(s"blacklisted file: ${f.jfile.getAbsolutePath}")
+ else
+ scan(new PlainFile(f))
+ for (d <- dir.dirs)
+ scanDir(d.path)
+ }
+
+ @Test
+ def scanList() = {
+ println(System.getProperty("user.dir"))
+ scan("./src/dotty/tools/dotc/core/Symbols.scala")
+ scan("./src/dotty/tools/dotc/core/Symbols.scala")
+ }
+
+ @Test
+ def scanDotty() = {
+ scanDir("src")
+ }
+
+ @Test
+ def scanScala() = {
+ scanDir("../scala-scala/src")
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala b/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala
new file mode 100644
index 000000000..84bf7a2d1
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/desugarPackage.scala
@@ -0,0 +1,28 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core._, ast._
+import Trees._
+
+object desugarPackage extends DeSugarTest {
+
+ def test() = {
+ reset()
+ val start = System.nanoTime()
+ val startNodes = Trees.ntrees
+ parseDir("./src")
+ parseDir("./scala-scala/src")
+ val ms1 = (System.nanoTime() - start)/1000000
+ val nodes = Trees.ntrees
+ val buf = parsedTrees map desugarTree
+ val ms2 = (System.nanoTime() - start)/1000000
+ println(s"$parsed files parsed in ${ms1}ms, ${nodes - startNodes} nodes desugared in ${ms2-ms1}ms, total trees created = ${Trees.ntrees - startNodes}")
+ ctx.reporter.printSummary(ctx)
+ }
+
+ def main(args: Array[String]): Unit = {
+// parse("/Users/odersky/workspace/scala/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala")
+ for (i <- 0 until 10) test()
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/parseFile.scala b/compiler/test/dotty/tools/dotc/parsing/parseFile.scala
new file mode 100644
index 000000000..c41a286cb
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/parseFile.scala
@@ -0,0 +1,13 @@
+package dotty.tools.dotc.parsing
+
+object parseFile extends ParserTest {
+
+ def main(args: Array[String]): Unit = {
+ if (args.isEmpty) println("usage: scala test.parseFile file1.scala ... fileN.scala")
+ for (arg <- args) {
+ val tree = parse(arg)
+ println("parsed: " + arg)
+ println(tree.show)
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala b/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala
new file mode 100644
index 000000000..df5368ffe
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/parsePackage.scala
@@ -0,0 +1,83 @@
+package dotty.tools
+package dotc
+package parsing
+
+import dotty.tools.dotc._
+import core._, ast._
+import Trees._
+import Contexts.Context
+
+object parsePackage extends ParserTest {
+
+ import ast.untpd._
+
+ var nodes = 0
+
+ val transformer = new UntypedTreeMap {
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = {
+ nodes += 1
+ tree match {
+ case Ident(name) =>
+ Ident(name)
+ case This(name) =>
+ This(name)
+ case TypedSplice(t) =>
+ TypedSplice(t)
+ case SymbolLit(str) =>
+ tree
+ case InterpolatedString(id, segments) =>
+ InterpolatedString(id, segments map transform)
+ case mdef @ ModuleDef(name, impl) =>
+ ModuleDef(name, transformSub(impl)).withMods(mdef.mods)
+ case Function(params, body) =>
+ Function(params map transform, body)
+ case InfixOp(l, o, r) =>
+ InfixOp(transform(l), o, transform(r))
+ case PostfixOp(l, o) =>
+ PostfixOp(transform(l), o)
+ case PrefixOp(o, t) =>
+ PrefixOp(o, transform(t))
+ case Parens(t) =>
+ Parens(transform(t))
+ case Tuple(ts) =>
+ Tuple(ts map transform)
+ case WhileDo(cond, body) =>
+ WhileDo(transform(cond), transform(body))
+ case DoWhile(body, cond) =>
+ DoWhile(transform(body), transform(cond))
+ case ForYield(enums, expr) =>
+ ForYield(enums map transform, transform(expr))
+ case ForDo(enums, expr) =>
+ ForDo(enums map transform, transform(expr))
+ case GenFrom(pat, expr) =>
+ GenFrom(transform(pat), transform(expr))
+ case GenAlias(pat, expr) =>
+ GenAlias(transform(pat), transform(expr))
+ case PatDef(mods, pats, tpt, expr) =>
+ PatDef(mods, pats map transform, transform(tpt), transform(expr))
+ case ContextBounds(bounds, cxBounds) =>
+ ContextBounds(transformSub(bounds), cxBounds map transform)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ }
+
+ def test() = {
+ reset()
+ nodes = 0
+ val start = System.nanoTime()
+ parseDir("./src")
+ parseDir("./scala-scala/src")
+ val ms1 = (System.nanoTime() - start)/1000000
+ val buf = parsedTrees map transformer.transform
+ val ms2 = (System.nanoTime() - start)/1000000
+ println(s"$parsed files parsed in ${ms1}ms, $nodes nodes transformed in ${ms2-ms1}ms, total trees created = ${Trees.ntrees}")
+ ctx.reporter.printSummary(ctx)
+ }
+
+ def main(args: Array[String]): Unit = {
+// parse("/Users/odersky/workspace/scala/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala")
+ for (i <- 0 until 10) test()
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/parsing/showTree.scala b/compiler/test/dotty/tools/dotc/parsing/showTree.scala
new file mode 100644
index 000000000..18b2203d5
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/parsing/showTree.scala
@@ -0,0 +1,26 @@
+package dotty.tools
+package dotc
+package parsing
+
+import ast.Trees._
+import ast.desugar
+import ast.desugar._
+import core.Mode
+
+object showTree extends DeSugarTest {
+
+ import dotty.tools.dotc.ast.untpd._
+
+ import Mode._
+
+ def test(arg: String) = {
+ val tree: Tree = parse(arg)
+ println("result = " + tree.show)
+ println("desugared = " + DeSugar.transform(tree).show)
+ }
+
+ def main(args: Array[String]): Unit = {
+ test("src/dotty/tools/dotc/core/Types.scala")
+ for (arg <- args) test(arg)
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/repl/TestREPL.scala b/compiler/test/dotty/tools/dotc/repl/TestREPL.scala
new file mode 100644
index 000000000..2263e85a0
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/repl/TestREPL.scala
@@ -0,0 +1,66 @@
+package dotty.tools.dotc
+package repl
+
+import core.Contexts.Context
+import collection.mutable
+import java.io.StringWriter
+
+/** A subclass of REPL used for testing.
+ * It takes a transcript of a REPL session in `script`. The transcript
+ * starts with the first input prompt `scala> ` and ends with `scala> :quit` and a newline.
+ * Invoking `process()` on the `TestREPL` runs all input lines and
+ * collects then interleaved with REPL output in a string writer `out`.
+ * Invoking `check()` checks that the collected output matches the original
+ * `script`.
+ */
+class TestREPL(script: String) extends REPL {
+
+ private val out = new StringWriter()
+
+ override lazy val config = new REPL.Config {
+ override val output = new NewLinePrintWriter(out)
+
+ override def context(ctx: Context) = {
+ val fresh = ctx.fresh
+ fresh.setSetting(ctx.settings.color, "never")
+ fresh.setSetting(
+ ctx.settings.classpath,
+ "../library/target/scala-2.11/dotty-library_2.11-0.1-SNAPSHOT.jar"
+ )
+ fresh.initialize()(fresh)
+ fresh
+ }
+
+ override def input(in: Interpreter)(implicit ctx: Context) = new InteractiveReader {
+ val lines = script.lines.buffered
+ def readLine(prompt: String): String = {
+ val line = lines.next
+ val buf = new StringBuilder
+ if (line.startsWith(prompt)) {
+ output.println(line)
+ buf append line.drop(prompt.length)
+ while (lines.hasNext && lines.head.startsWith(continuationPrompt)) {
+ val continued = lines.next
+ output.println(continued)
+ buf append System.lineSeparator()
+ buf append continued.drop(continuationPrompt.length)
+ }
+ buf.toString
+ }
+ else readLine(prompt)
+ }
+ val interactive = false
+ }
+ }
+
+ def check() = {
+ out.close()
+ val printed = out.toString
+ val transcript = printed.drop(printed.indexOf(config.prompt))
+ if (transcript.toString.lines.toList != script.lines.toList) {
+ println("input differs from transcript:")
+ println(transcript)
+ assert(false)
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/reporting/TestMessageLaziness.scala b/compiler/test/dotty/tools/dotc/reporting/TestMessageLaziness.scala
new file mode 100644
index 000000000..6892739e8
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/reporting/TestMessageLaziness.scala
@@ -0,0 +1,37 @@
+package dotty.tools
+package dotc
+package reporting
+
+import org.junit.Assert._
+import org.junit.Test
+
+import core.Contexts._
+
+import test.DottyTest
+
+import diagnostic.{ Message, MessageContainer, ExtendMessage }
+
+class TestMessageLaziness extends DottyTest {
+ ctx = ctx.fresh.setReporter(new NonchalantReporter)
+
+ class NonchalantReporter(implicit ctx: Context) extends Reporter
+ with UniqueMessagePositions with HideNonSensicalMessages {
+ def doReport(m: MessageContainer)(implicit ctx: Context) = ???
+
+ override def report(m: MessageContainer)(implicit ctx: Context) = ()
+ }
+
+ case class LazyError() extends Message(1000) {
+ throw new Error("Didn't stay lazy.")
+
+ val kind = "Test"
+ val msg = "Please don't blow up"
+ val explanation = ""
+ }
+
+ @Test def assureLazy =
+ ctx.error(LazyError())
+
+ @Test def assureLazyExtendMessage =
+ ctx.strictWarning(LazyError())
+}
diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
new file mode 100644
index 000000000..70d18d031
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
@@ -0,0 +1,52 @@
+package dotty.tools
+package dotc
+package reporting
+
+import scala.collection.mutable
+import util.SourcePosition
+import core.Contexts._
+import Reporter._
+import java.io.PrintWriter
+import scala.reflect.internal.util._
+import diagnostic.{ Message, MessageContainer, NoExplanation }
+import diagnostic.messages._
+
+class TestReporter(writer: PrintWriter) extends Reporter
+with UniqueMessagePositions with HideNonSensicalMessages {
+
+ import MessageContainer._
+
+ /** maximal number of error messages to be printed */
+ protected def ErrorLimit = 100
+
+ def printPos(pos: SourcePosition): Unit =
+ if (pos.exists) {
+ if (pos.outer.exists) {
+ writer.println(s"\ninlined at ${pos.outer}:\n")
+ printPos(pos.outer)
+ }
+ }
+
+ /** Prints the message with the given position indication. */
+ def printMessageAndPos(msg: String, pos: SourcePosition)(implicit ctx: Context): Unit = {
+ val posStr = s"${pos.line + 1}: "
+ writer.println(posStr + msg)
+ printPos(pos)
+ }
+
+ override def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ // Here we add extra information that we should know about the error message
+ val extra = m.contained match {
+ case pm: PatternMatchExhaustivity => s": ${pm.uncovered}"
+ case _ => ""
+ }
+
+ m match {
+ case m: Error =>
+ printMessageAndPos(m.contained.kind + extra, m.pos)
+ case w: Warning =>
+ printMessageAndPos(w.contained.kind + extra, w.pos)
+ case _ =>
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/transform/CreateCompanionObjectsTest.scala b/compiler/test/dotty/tools/dotc/transform/CreateCompanionObjectsTest.scala
new file mode 100644
index 000000000..18acb2105
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/transform/CreateCompanionObjectsTest.scala
@@ -0,0 +1,128 @@
+package dotty.tools
+package dotc
+package transform
+
+import org.junit.{Assert, Test}
+import core._
+import ast.{tpd, Trees}
+import Contexts._
+import Flags._
+import Denotations._
+import NameOps._
+import Symbols._
+import Types._
+import Decorators._
+import Trees._
+import TreeTransforms.{TreeTransform, TreeTransformer}
+
+
+class CreateCompanionObjectsTest extends DottyTest {
+ /* FIXME: re-enable after adapting to new scheme
+
+ import tpd._
+
+ type PostTyperTransformer = TreeTransformer // FIXME do without
+
+ @Test
+ def shouldCreateNonExistingObjectsInPackage = checkCompile("frontend", "class A{} ") {
+ (tree, context) =>
+ implicit val ctx = context
+
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new CreateCompanionObjects {
+
+ override def name: String = "create all companion objects"
+ override def predicate(cts: TypeDef)(implicit ctx:Context): Boolean = true
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ })
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module <synthetic>,,List()),A$"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ Assert.assertTrue("should create non-existing objects in package",
+ classPos < modulePos
+ )
+ }
+
+ @Test
+ def shouldCreateNonExistingObjectsInBlock = checkCompile("frontend", "class D {def p = {class A{}; 1}} ") {
+ (tree, context) =>
+ implicit val ctx = context
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new CreateCompanionObjects {
+
+ override def name: String = "create all companion modules"
+ override def predicate(cts: TypeDef)(implicit ctx:Context): Boolean = true
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ })
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module <synthetic>,,List()),A$"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ Assert.assertTrue("should create non-existing objects in block",
+ classPos < modulePos
+ )
+ }
+
+ @Test
+ def shouldCreateNonExistingObjectsInTemplate = checkCompile("frontend", "class D {class A{}; } ") {
+ (tree, context) =>
+ implicit val ctx = context
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new CreateCompanionObjects {
+ override def name: String = "create all companion modules"
+ override def predicate(cts: TypeDef)(implicit ctx:Context): Boolean = true
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ })
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module <synthetic>,,List()),A$"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ Assert.assertTrue("should create non-existing objects in template",
+ classPos < modulePos
+ )
+ }
+
+ @Test
+ def shouldCreateOnlyIfAskedFor = checkCompile("frontend", "class DONT {class CREATE{}; } ") {
+ (tree, context) =>
+ implicit val ctx = context
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new CreateCompanionObjects {
+ override def name: String = "create all companion modules"
+ override def predicate(cts: TypeDef)(implicit ctx:Context): Boolean = cts.name.toString.contains("CREATE")
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ })
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module <synthetic>,,List()),CREATE$"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ val notCreatedModulePattern = "TypeDef(Modifiers(final module <synthetic>,,List()),DONT"
+ val notCreatedPos = transformed.indexOf(notCreatedModulePattern)
+
+ Assert.assertTrue("should create non-existing objects in template",
+ classPos < modulePos && (notCreatedPos < 0)
+ )
+ }
+ */
+}
diff --git a/compiler/test/dotty/tools/dotc/transform/LazyValsTest.scala b/compiler/test/dotty/tools/dotc/transform/LazyValsTest.scala
new file mode 100644
index 000000000..96298c571
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/transform/LazyValsTest.scala
@@ -0,0 +1,361 @@
+package dotty.tools
+package dotc
+package transform
+
+import org.junit.Test
+import org.junit.Assert
+
+class LazyValsTest extends DottyTest {
+ /* FIXME: re-enable after adapting to new scheme
+ @Test
+ def doNotRewriteObjects = {
+ checkCompile("LazyVals", "object O"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy shouldn't rewrite module instance definitions", tree.toString.contains(
+ "ValDef(Modifiers(final module <stable>,,List()),O,"
+ ))
+ }
+ }
+
+ @Test
+ def localInt = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = 1; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy int rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyInt)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyInt)]),<init>),List(Literal(Constant(1)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localLong = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = 1L; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy long rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyLong)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyLong)]),<init>),List(Literal(Constant(1)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localFloat = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = 1.0f; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy float rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyFloat)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyFloat)]),<init>),List(Literal(Constant(1.0)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localDouble = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = 1.0; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy double rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyDouble)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyDouble)]),<init>),List(Literal(Constant(1.0)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localBoolean = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = true; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy boolean rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyBoolean)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyBoolean)]),<init>),List(Literal(Constant(true)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localChar = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = 'a'; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy char rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyChar)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyChar)]),<init>),List(Literal(Constant(a)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localByte = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s:Byte = 1; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy byte rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyByte)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyByte)]),<init>),List(Literal(Constant(1)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localShort = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s:Short = 1; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy short rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyShort)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyShort)]),<init>),List(Literal(Constant(1)))))"
+ ))
+ }
+ }
+
+ @Test
+ def localRef = {
+ checkCompile("LazyVals", "class LocalLV { def m = { lazy val s = \"string\"; s }}"){ (tree, ctx) =>
+ Assert.assertTrue("local lazy ref rewritten to class creation", tree.toString.contains(
+ "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class runtime),LazyRef)],Apply(Select(New(TypeTree[TypeRef(ThisType(module class runtime),LazyRef)]),<init>),List(Literal(Constant(string)))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldRef = {
+ checkCompile("LazyVals", "class LV { lazy val s = \"string\" }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy int rewritten to class creation", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class lang),String)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(string)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldInt = {
+ checkCompile("LazyVals", "class LV { lazy val s = 1 }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy int rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Int)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(1)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldLong = {
+ checkCompile("LazyVals", "class LV { lazy val s = 1L }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy long rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Long)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(1)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldShort = {
+ checkCompile("LazyVals", "class LV { lazy val s:Short = 1 }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy short rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Short)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(1)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldByte = {
+ checkCompile("LazyVals", "class LV { lazy val s:Byte = 1 }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy byte rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Byte)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(1)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldBoolean = {
+ checkCompile("LazyVals", "class LV { lazy val s = true }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy boolean rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Boolean)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(true)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldDouble = {
+ checkCompile("LazyVals", "class LV { lazy val s = 1.0 }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy double rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Double)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(1.0)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldFloat = {
+ checkCompile("LazyVals", "class LV { lazy val s = 1.0f }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy float rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Float)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(1.0)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def fieldChar = {
+ checkCompile("LazyVals", "class LV { lazy val s = 'a' }"){ (tree, ctx) =>
+ Assert.assertTrue("field lazy char rewritten", tree.toString.contains(
+ "DefDef(Modifiers(,,List()),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Char)],If(Ident(sbitmap$1),Ident(s$lzy1),Block(List(Assign(Ident(sbitmap$1),Literal(Constant(true))), Assign(Ident(s$lzy1),Literal(Constant(a)))),Ident(s$lzy1))))"
+ ))
+ }
+ }
+
+ @Test
+ def volatileFieldRef = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = \"a\" }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class lang),String)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class lang),String)],Literal(Constant(null))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(a))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class lang),String)],Literal(Constant(null))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ //println(treeS)
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldInt = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = 1 }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class scala),Int)],Literal(Constant(0))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(1))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class scala),Int)],Literal(Constant(0))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldLong = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = 1L }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Long)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(1))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldFloat = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = 1.0f }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Float)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class scala),Float)],Literal(Constant(0.0))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(1.0))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class scala),Float)],Literal(Constant(0.0))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldDouble = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = 1.0 }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Double)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class scala),Double)],Literal(Constant(0.0))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(1.0))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class scala),Double)],Literal(Constant(0.0))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldBoolean = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = true }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Boolean)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(false))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(true))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(false))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldByte = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s:Byte = 1 }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Byte)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Byte)],Literal(Constant(0))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(1))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Byte)],Literal(Constant(0))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldShort = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s:Short = 1 }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Short)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Short)],Literal(Constant(0))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(1))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(TermRef(ThisType(module class <root>),scala),Short)],Literal(Constant(0))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatileFieldChar = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val s = 'a' }") {
+ (tree, ctx) =>
+ val accessor = "DefDef(Modifiers(,,List(Apply(Select(New(Ident(volatile)),<init>),List()))),s,List(),List(),TypeTree[TypeRef(ThisType(module class scala),Char)],Block(List(ValDef(Modifiers(,,List()),result,TypeTree[TypeRef(ThisType(module class scala),Char)],Literal(Constant(\u0000))), ValDef(Modifiers(,,List()),retry,TypeTree[TypeRef(ThisType(module class scala),Boolean)],Literal(Constant(true))), ValDef(Modifiers(,,List()),flag,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0))), WhileDo(Ident(retry),Block(List(Assign(Ident(flag),Apply(Select(Ident(LazyVals),get),List(This(LV), Select(Ident(LV),OFFSET$0))))),Match(Apply(Select(Ident(LazyVals),STATE),List(Ident(flag), Literal(Constant(0)))),List(CaseDef(Literal(Constant(0)),EmptyTree,If(Apply(Select(Ident(LazyVals),CAS),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(1)), Literal(Constant(0)))),Block(List(Try(Assign(Ident(result),Literal(Constant(a))),Block(List(DefDef(Modifiers(,,List()),$anonfun,List(),List(List(ValDef(Modifiers(,,List()),x$1,TypeTree[TypeRef(ThisType(module class lang),Throwable)],EmptyTree))),TypeTree[TypeRef(ThisType(module class scala),Int)],Block(List(Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(0)), Literal(Constant(0))))),Throw(Ident(x$1))))),Closure(List(),Ident($anonfun),EmptyTree)),EmptyTree), Assign(Ident(s$lzy1),Ident(result)), Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(0)))), Assign(Ident(retry),Literal(Constant(false)))),Literal(Constant(()))),Literal(Constant(())))), CaseDef(Literal(Constant(1)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(2)),EmptyTree,Apply(Select(Ident(LazyVals),wait4Notification),List(This(LV), Select(Ident(LV),OFFSET$0), Ident(flag), Literal(Constant(0))))), CaseDef(Literal(Constant(3)),EmptyTree,Block(List(Assign(Ident(retry),Literal(Constant(false))), Assign(Ident(result),Ident(s$lzy1))),Literal(Constant(()))))))))),Ident(result)))"
+ val fields = "ValDef(Modifiers(,,List()),s$lzy1,TypeTree[TypeRef(ThisType(module class scala),Char)],Literal(Constant(\u0000))), ValDef(Modifiers(,,List()),bitmap$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Literal(Constant(0)))"
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(accessor) && treeS.contains(fields) && treeS.contains(moduleField))
+ }
+ }
+
+ @Test
+ def volatilesReuseBitmaps = {
+ checkCompile("LazyVals", "class LV { @volatile lazy val a = 'a'; @volatile lazy val b = 'b'; }") {
+ (tree, ctx) =>
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val reuseFieldPattern = "Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$0), Literal(Constant(3)), Literal(Constant(1))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation\n" + treeS,
+ treeS.contains(moduleField) && treeS.contains(reuseFieldPattern))
+ }
+ }
+
+ @Test
+ def volatilesCreateNewBitmaps = {
+ checkCompile("LazyVals",
+ """
+ | class LV {
+ | @volatile lazy val a1 = '1';
+ | @volatile lazy val a2 = '1';
+ | @volatile lazy val a3 = '1';
+ | @volatile lazy val a4 = '1';
+ | @volatile lazy val a5 = '1';
+ | @volatile lazy val a6 = '1';
+ | @volatile lazy val a7 = '1';
+ | @volatile lazy val a8 = '1';
+ | @volatile lazy val a9 = '1';
+ | @volatile lazy val a10 = '1';
+ | @volatile lazy val a11 = '1';
+ | @volatile lazy val a12 = '1';
+ | @volatile lazy val a13 = '1';
+ | @volatile lazy val a14 = '1';
+ | @volatile lazy val a15 = '1';
+ | @volatile lazy val a16 = '1';
+ | @volatile lazy val a17 = '1';
+ | @volatile lazy val a18 = '1';
+ | @volatile lazy val a19 = '1';
+ | @volatile lazy val a20 = '1';
+ | @volatile lazy val a21 = '1';
+ | @volatile lazy val a22 = '1';
+ | @volatile lazy val a23 = '1';
+ | @volatile lazy val a24 = '1';
+ | @volatile lazy val a25 = '1';
+ | @volatile lazy val a26 = '1';
+ | @volatile lazy val a27 = '1';
+ | @volatile lazy val a28 = '1';
+ | @volatile lazy val a29 = '1';
+ | @volatile lazy val a30 = '1';
+ | @volatile lazy val a31 = '1';
+ | @volatile lazy val a32 = '1';
+ | @volatile lazy val a33 = '1';
+ | @volatile lazy val a34 = '1';
+ | }
+ """.stripMargin ){
+ (tree, ctx) =>
+ val moduleField = "TypeDef(Modifiers(final module <synthetic>,,List()),LV$,Template(DefDef(Modifiers(,,List()),<init>,List(),List(List()),TypeTree[TypeRef(ThisType(module class <empty>),LV$)],EmptyTree),List(Apply(Select(New(TypeTree[TypeRef(ThisType(module class lang),Object)]),<init>),List())),ValDef(Modifiers(,,List()),_,TypeTree[TermRef(ThisType(module class <empty>),LV)],EmptyTree),List(ValDef(Modifiers(,,List()),OFFSET$1,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$1))))), ValDef(Modifiers(,,List()),OFFSET$0,TypeTree[TypeRef(ThisType(module class scala),Long)],Apply(Select(Ident(LazyVals),getOffset),List(This(LV), Literal(Constant(bitmap$0))))))))"
+ val reuseFieldPattern = "Apply(Select(Ident(LazyVals),setFlag),List(This(LV), Select(Ident(LV),OFFSET$1), Literal(Constant(3)), Literal(Constant(1))))"
+ val treeS = tree.toString
+ Assert.assertTrue("volatile field lazy ref rewritten to class creation",
+ treeS.contains(moduleField) && treeS.contains(reuseFieldPattern))
+ }
+ }*/
+}
diff --git a/compiler/test/dotty/tools/dotc/transform/PostTyperTransformerTest.scala b/compiler/test/dotty/tools/dotc/transform/PostTyperTransformerTest.scala
new file mode 100644
index 000000000..03d6d9b36
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/transform/PostTyperTransformerTest.scala
@@ -0,0 +1,132 @@
+package dotty.tools
+package dotc
+package transform
+
+import org.junit.{Assert, Test}
+import core._
+import ast.Trees
+import Contexts._
+import Flags._
+import Denotations._
+import NameOps._
+import Symbols._
+import Types._
+import Decorators._
+import Trees._
+import TreeTransforms.{TreeTransform, TreeTransformer}
+
+class PostTyperTransformerTest extends DottyTest {
+ /* FIXME: re-enable after adapting to new scheme
+
+ @Test
+ def shouldStripImports = checkCompile("frontend", "class A{ import scala.collection.mutable._; val d = 1}") {
+ (tree, context) =>
+ implicit val ctx = context
+ class EmptyTransform extends TreeTransform {
+ override def name: String = "empty"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new EmptyTransform)
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree)
+
+ Assert.assertTrue("should strip imports",
+ !transformed.toString.toLowerCase.contains("import")
+ )
+ }
+
+ @Test
+ def shouldStripNamedArgs = checkCompile("frontend", "class A{ def p(x:Int, y:Int= 2) = 1; p(1, y = 2)}") {
+ (tree, context) =>
+ implicit val ctx = context
+ class EmptyTransform extends TreeTransform {
+ override def name: String = "empty"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new EmptyTransform)
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree)
+
+ Assert.assertTrue("should string named arguments",
+ !transformed.toString.contains("NamedArg")
+ )
+ }
+
+ @Test
+ def shouldReorderExistingObjectsInPackage = checkCompile("frontend", "object A{}; class A{} ") {
+ (tree, context) =>
+ implicit val ctx = context
+ class EmptyTransform extends TreeTransform {
+ override def name: String = "empty"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new EmptyTransform)
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module,,List()),A$,"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ Assert.assertTrue("should reorder existing objects in package",
+ classPos < modulePos
+ )
+ }
+
+ @Test
+ def shouldReorderExistingObjectsInBlock = checkCompile("frontend", "class D {def p = {object A{}; class A{}; 1}} ") {
+ (tree, context) =>
+ implicit val ctx = context
+ class EmptyTransform extends TreeTransform {
+ override def name: String = "empty"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new EmptyTransform)
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module,,List()),A$,"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ Assert.assertTrue("should reorder existing objects in block",
+ classPos < modulePos
+ )
+ }
+
+ @Test
+ def shouldReorderExistingObjectsInTemplate = checkCompile("frontend", "class D {object A{}; class A{}; } ") {
+ (tree, context) =>
+ implicit val ctx = context
+ class EmptyTransform extends TreeTransform {
+ override def name: String = "empty"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new PostTyperTransformer {
+ override def transformations = Array(new EmptyTransform)
+
+ override def name: String = "test"
+ }
+ val transformed = transformer.transform(tree).toString
+ val classPattern = "TypeDef(Modifiers(,,List()),A,"
+ val classPos = transformed.indexOf(classPattern)
+ val moduleClassPattern = "TypeDef(Modifiers(final module,,List()),A$,"
+ val modulePos = transformed.indexOf(moduleClassPattern)
+
+ Assert.assertTrue("should reorder existing objects in template",
+ classPos < modulePos
+ )
+ }*/
+}
diff --git a/compiler/test/dotty/tools/dotc/transform/TreeTransformerTest.scala b/compiler/test/dotty/tools/dotc/transform/TreeTransformerTest.scala
new file mode 100644
index 000000000..d72980d80
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/transform/TreeTransformerTest.scala
@@ -0,0 +1,198 @@
+package dotty.tools
+package dotc
+package transform
+
+import org.junit.{Assert, Test}
+import TreeTransforms.{TransformerInfo, TreeTransformer, MiniPhaseTransform}
+import ast.tpd
+import core.Constants.Constant
+import core.Contexts.Context
+
+class TreeTransformerTest extends DottyTest {
+
+ @Test
+ def shouldReturnSameTreeIfUnchanged = checkCompile("frontend", "class A{ val d = 1}") {
+ (tree, context) =>
+ implicit val ctx = context
+ class EmptyTransform extends MiniPhaseTransform {
+ override def phaseName: String = "empty"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new TreeTransformer {
+ override def miniPhases = Array(new EmptyTransform)
+
+ override def phaseName: String = "test"
+ }
+ val transformed = transformer.macroTransform(tree)
+
+ Assert.assertTrue("returns same tree if unmodified",
+ tree eq transformed
+ )
+ }
+
+ // Disabled, awaiting resolution. @Test
+ def canReplaceConstant = checkCompile("frontend", "class A{ val d = 1}") {
+ (tree, context) =>
+ implicit val ctx = context
+ class ConstantTransform extends MiniPhaseTransform {
+
+ override def transformLiteral(tree: tpd.Literal)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = tpd.Literal(Constant(2))
+ override def phaseName: String = "canReplaceConstant"
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new TreeTransformer {
+ override def miniPhases = Array(new ConstantTransform)
+
+ override def phaseName: String = "test"
+ }
+ val transformed = transformer.macroTransform(tree)
+
+ Assert.assertTrue("returns same tree if unmodified",
+ transformed.toString.contains("List(ValDef(Modifiers(,,List()),d,TypeTree[TypeRef(ThisType(module class scala),Int)],Literal(Constant(2)))")
+ )
+ }
+
+ @Test
+ def canOverwrite = checkCompile("frontend", "class A{ val d = 1}") {
+ (tree, context) =>
+ implicit val ctx = context
+ class Transformation extends MiniPhaseTransform {
+
+ override def transformLiteral(tree: tpd.Literal)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = tpd.Literal(Constant(-1))
+ override def phaseName: String = "canOverwrite"
+
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo): tpd.ValDef = {
+ Assert.assertTrue("transformation of children succeeded",
+ tree.rhs.toString == "Literal(Constant(-1))"
+ )
+ tpd.cpy.ValDef(tree)(rhs = tpd.Literal(Constant(2)))
+ }
+
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new TreeTransformer {
+ override def miniPhases = Array(new Transformation)
+
+ override def phaseName: String = "test"
+
+ }
+ val tr = transformer.macroTransform(tree).toString
+
+ Assert.assertTrue("node can rewrite children",
+ tr.contains("Literal(Constant(2))") && !tr.contains("Literal(Constant(-1))")
+ )
+ }
+
+ @Test
+ def transformationOrder = checkCompile("frontend", "class A{ val d = 1}") {
+ (tree, context) =>
+ implicit val ctx = context
+ class Transformation1 extends MiniPhaseTransform {
+ override def phaseName: String = "transformationOrder1"
+
+ override def transformLiteral(tree: tpd.Literal)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ Assert.assertTrue("correct constant",
+ tree.const.toString == "Constant(1)"
+ )
+ tpd.cpy.Literal(tree)(Constant(-1))
+ }
+
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo): tpd.ValDef = {
+ Assert.assertTrue("transformation of children succeeded",
+ tree.rhs.toString == "Literal(Constant(-1))"
+ )
+ tpd.cpy.ValDef(tree)(rhs = tpd.Literal(Constant(2)))
+ }
+
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ class Transformation2 extends MiniPhaseTransform {
+ override def phaseName: String = "transformationOrder2"
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo): tpd.ValDef = {
+ Assert.assertTrue("transformation of children succeeded",
+ tree.rhs.toString == "Literal(Constant(2))"
+ )
+ tpd.cpy.ValDef(tree)(rhs = tpd.Literal(Constant(3)))
+ }
+
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new TreeTransformer {
+ override def miniPhases = Array(new Transformation1, new Transformation2)
+
+ override def phaseName: String = "test"
+ }
+ val tr = transformer.macroTransform(tree).toString
+
+ Assert.assertTrue("node can rewrite children",
+ tr.contains("Literal(Constant(3))")
+ )
+ }
+
+ @Test
+ def invocationCount = checkCompile("frontend", "class A{ val d = 1}") {
+ (tree, context) =>
+ implicit val ctx = context
+ var transformed1 = 0
+ class Transformation1 extends MiniPhaseTransform {
+ override def phaseName: String = "invocationCount1"
+ override def transformLiteral(tree: tpd.Literal)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ transformed1 += 1
+ Assert.assertTrue("correct constant",
+ tree.const.toString == "Constant(1)"
+ )
+ tpd.cpy.Literal(tree)(Constant(-1))
+ }
+
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo) = {
+ transformed1 += 1
+ Assert.assertTrue("transformation of children succeeded",
+ tree.rhs.toString == "Literal(Constant(-3))"
+ )
+ tpd.cpy.ValDef(tree)(rhs = transformFollowing(tpd.Literal(Constant(2))))
+ }
+
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ var transformed2 = 0
+ class Transformation2 extends MiniPhaseTransform {
+ var constantsSeen = 0
+ override def phaseName: String = "invocationCount2"
+ override def transformLiteral(tree: tpd.Literal)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ transformed2 += 1
+ constantsSeen match {
+ case 0 =>
+ Assert.assertTrue("correct constant",
+ tree.const.toString == "Constant(-1)"
+ )
+ case 1 =>
+ Assert.assertTrue("correct constant",
+ tree.const.toString == "Constant(2)"
+ )
+ case _ => Assert.fail("to many constants seen")
+ }
+ constantsSeen += 1
+ tpd.cpy.Literal(tree)(Constant(-3))
+ }
+
+ override def transformValDef(tree: tpd.ValDef)(implicit ctx: Context, info: TransformerInfo) = {
+ transformed2 += 1
+ Assert.assertTrue("transformation of children succeeded",
+ tree.rhs.toString == "Literal(Constant(-3))"
+ )
+ transformFollowing(tpd.cpy.ValDef(tree)(rhs = tpd.Literal(Constant(3))))
+ }
+
+ init(ctx, ctx.period.firstPhaseId, ctx.period.lastPhaseId)
+ }
+ val transformer = new TreeTransformer {
+ override def miniPhases = Array(new Transformation1, new Transformation2)
+
+ override def phaseName: String = "test"
+ }
+ val tr = transformer.macroTransform(tree).toString
+ Assert.assertTrue("transformations aren't invoked multiple times",
+ transformed1 == 2 && transformed2 == 3
+ )
+ }
+}
diff --git a/compiler/test/dotty/tools/showClass.scala b/compiler/test/dotty/tools/showClass.scala
new file mode 100644
index 000000000..012f5f59e
--- /dev/null
+++ b/compiler/test/dotty/tools/showClass.scala
@@ -0,0 +1,17 @@
+package dotty.tools
+
+import dotc.core.Decorators._
+
+object showClass extends ShowClassTests {
+
+ def main(args: Array[String]) = {
+ for (arg <- args) showPackage(ctx.requiredPackage(arg))
+// showClasses("test.SyncOps")
+// showClasses("scala.concurrent.forkjoin.LinkedTransferQueue")
+// showPackage("scala.reflect")
+// showPackage("scala.collection")
+
+ showPackage("dotty", 1)
+ showPackage("scala", 2)
+ }
+}