aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFelix Mulder <felix.mulder@gmail.com>2017-03-27 14:11:23 +0200
committerFelix Mulder <felix.mulder@gmail.com>2017-03-29 10:33:26 +0200
commitf7e3b7002d1eefbeaac3970be4ac729843d6a939 (patch)
tree30858a140e475153c9b6e18a10a5e5159ba615be
parent1ab7c038e7daed1f05aafa000a284d76ddacb381 (diff)
downloaddotty-f7e3b7002d1eefbeaac3970be4ac729843d6a939.tar.gz
dotty-f7e3b7002d1eefbeaac3970be4ac729843d6a939.tar.bz2
dotty-f7e3b7002d1eefbeaac3970be4ac729843d6a939.zip
Restore partest and enable it to run alongside new partest
-rw-r--r--.drone.yml4
-rw-r--r--.drone.yml.sig2
-rw-r--r--compiler/test/dotc/tests.scala374
-rw-r--r--compiler/test/dotty/partest/DPConfig.scala40
-rw-r--r--compiler/test/dotty/partest/DPConsoleRunner.scala411
-rw-r--r--compiler/test/dotty/partest/DPDirectCompiler.scala36
-rw-r--r--compiler/test/dotty/tools/dotc/CompilationTests.scala4
-rw-r--r--compiler/test/dotty/tools/dotc/CompilerTest.scala613
-rw-r--r--project/Build.scala87
9 files changed, 1568 insertions, 3 deletions
diff --git a/.drone.yml b/.drone.yml
index 6d49aace5..eb36e65d8 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -36,3 +36,7 @@ matrix:
TEST:
- ;test;dotty-bin-tests/test
- ;publishLocal;dotty-bootstrapped/test
+ - partest-only-no-bootstrap --show-diff --verbose
+ - partest-only --show-diff --verbose
+ - ;set testOptions in LocalProject("dotty-compiler") := Seq() ;dotty-compiler/testOnly dotty.tools.dotc.CompilationTests
+ - ;publishLocal ;set testOptions in LocalProject("dotty-compiler-bootstrapped") := Seq() ;dotty-bootstrapped/testOnly dotty.tools.dotc.CompilationTests
diff --git a/.drone.yml.sig b/.drone.yml.sig
index e6c378c5b..b823296fa 100644
--- a/.drone.yml.sig
+++ b/.drone.yml.sig
@@ -1 +1 @@
-eyJhbGciOiJIUzI1NiJ9.cGlwZWxpbmU6CiAgdGVzdDoKICAgIGltYWdlOiBsYW1wZXBmbC9kb3R0eTpsYXRlc3QKICAgIHB1bGw6IHRydWUKICAgIGNvbW1hbmRzOgogICAgICAtIGxuIC1zIC92YXIvY2FjaGUvZHJvbmUvc2NhbGEtc2NhbGEgc2NhbGEtc2NhbGEKICAgICAgLSBsbiAtcyAvdmFyL2NhY2hlL2Ryb25lL2l2eTIgIiRIT01FLy5pdnkyIgogICAgICAtIC4vc2NyaXB0cy91cGRhdGUtc2NhbGEtbGlicmFyeQogICAgICAtIHNidCAtSi1YbXg0MDk2bSAtSi1YWDpSZXNlcnZlZENvZGVDYWNoZVNpemU9NTEybSAtSi1YWDpNYXhNZXRhc3BhY2VTaXplPTEwMjRtIC1EZG90dHkuZHJvbmUubWVtPTQwOTZtICIke1RFU1R9IgogICAgd2hlbjoKICAgICAgYnJhbmNoOgogICAgICAgIGV4Y2x1ZGU6IGdoLXBhZ2VzCgogIGRvY3VtZW50YXRpb246CiAgICBpbWFnZTogbGFtcGVwZmwvZG90dHk6bGF0ZXN0CiAgICBwdWxsOiB0cnVlCiAgICBjb21tYW5kczoKICAgICAgLSAuL3Byb2plY3Qvc2NyaXB0cy9nZW5Eb2NzICIke1RFU1R9IiAkQk9UX1BBU1MKICAgIHdoZW46CiAgICAgIGJyYW5jaDogbWFzdGVyCgogIGdpdHRlcjoKICAgIGltYWdlOiBwbHVnaW5zL2dpdHRlcgogICAgd2hlbjoKICAgICAgYnJhbmNoOiBtYXN0ZXIKICAgICAgc3RhdHVzOiBjaGFuZ2VkCgogIHNsYWNrOgogICAgaW1hZ2U6IHBsdWdpbnMvc2xhY2sKICAgIGNoYW5uZWw6IGRvdHR5CiAgICB3aGVuOgogICAgICBicmFuY2g6IG1hc3RlcgogICAgICBzdGF0dXM6IGNoYW5nZWQKCm1hdHJpeDoKICBURVNUOgogICAgLSA7dGVzdDtkb3R0eS1iaW4tdGVzdHMvdGVzdAogICAgLSA7cHVibGlzaExvY2FsO2RvdHR5LWJvb3RzdHJhcHBlZC90ZXN0Cg.jh9DiIPPWc33AN8J2-GRV-PThSGGTFis5HmG9AgOTV8 \ No newline at end of file
+eyJhbGciOiJIUzI1NiJ9.cGlwZWxpbmU6CiAgdGVzdDoKICAgIGltYWdlOiBsYW1wZXBmbC9kb3R0eTpsYXRlc3QKICAgIHB1bGw6IHRydWUKICAgIGNvbW1hbmRzOgogICAgICAtIGxuIC1zIC92YXIvY2FjaGUvZHJvbmUvc2NhbGEtc2NhbGEgc2NhbGEtc2NhbGEKICAgICAgLSBsbiAtcyAvdmFyL2NhY2hlL2Ryb25lL2l2eTIgIiRIT01FLy5pdnkyIgogICAgICAtIC4vc2NyaXB0cy91cGRhdGUtc2NhbGEtbGlicmFyeQogICAgICAtIHNidCAtSi1YbXg0MDk2bSAtSi1YWDpSZXNlcnZlZENvZGVDYWNoZVNpemU9NTEybSAtSi1YWDpNYXhNZXRhc3BhY2VTaXplPTEwMjRtIC1EZG90dHkuZHJvbmUubWVtPTQwOTZtICIke1RFU1R9IgogICAgd2hlbjoKICAgICAgYnJhbmNoOgogICAgICAgIGV4Y2x1ZGU6IGdoLXBhZ2VzCgogIGRvY3VtZW50YXRpb246CiAgICBpbWFnZTogbGFtcGVwZmwvZG90dHk6bGF0ZXN0CiAgICBwdWxsOiB0cnVlCiAgICBjb21tYW5kczoKICAgICAgLSAuL3Byb2plY3Qvc2NyaXB0cy9nZW5Eb2NzICIke1RFU1R9IiAkQk9UX1BBU1MKICAgIHdoZW46CiAgICAgIGJyYW5jaDogbWFzdGVyCgogIGdpdHRlcjoKICAgIGltYWdlOiBwbHVnaW5zL2dpdHRlcgogICAgd2hlbjoKICAgICAgYnJhbmNoOiBtYXN0ZXIKICAgICAgc3RhdHVzOiBjaGFuZ2VkCgogIHNsYWNrOgogICAgaW1hZ2U6IHBsdWdpbnMvc2xhY2sKICAgIGNoYW5uZWw6IGRvdHR5CiAgICB3aGVuOgogICAgICBicmFuY2g6IG1hc3RlcgogICAgICBzdGF0dXM6IGNoYW5nZWQKCm1hdHJpeDoKICBURVNUOgogICAgLSA7dGVzdDtkb3R0eS1iaW4tdGVzdHMvdGVzdAogICAgLSA7cHVibGlzaExvY2FsO2RvdHR5LWJvb3RzdHJhcHBlZC90ZXN0CiAgICAtIHBhcnRlc3Qtb25seS1uby1ib290c3RyYXAgLS1zaG93LWRpZmYgLS12ZXJib3NlCiAgICAtIHBhcnRlc3Qtb25seSAtLXNob3ctZGlmZiAtLXZlcmJvc2UKICAgIC0gO3NldCB0ZXN0T3B0aW9ucyBpbiBMb2NhbFByb2plY3QoImRvdHR5LWNvbXBpbGVyIikgOj0gU2VxKCkgO2RvdHR5LWNvbXBpbGVyL3Rlc3RPbmx5IGRvdHR5LnRvb2xzLmRvdGMuQ29tcGlsYXRpb25UZXN0cwogICAgLSA7cHVibGlzaExvY2FsIDtzZXQgdGVzdE9wdGlvbnMgaW4gTG9jYWxQcm9qZWN0KCJkb3R0eS1jb21waWxlci1ib290c3RyYXBwZWQiKSA6PSBTZXEoKSA7ZG90dHktYm9vdHN0cmFwcGVkL3Rlc3RPbmx5IGRvdHR5LnRvb2xzLmRvdGMuQ29tcGlsYXRpb25UZXN0cwo.qsDrUBsZtyXeEeRXf9CnC0Rh5FF0lZpKCgf2iZvPckE \ No newline at end of file
diff --git a/compiler/test/dotc/tests.scala b/compiler/test/dotc/tests.scala
new file mode 100644
index 000000000..1c80767ee
--- /dev/null
+++ b/compiler/test/dotc/tests.scala
@@ -0,0 +1,374 @@
+package dotc
+
+import dotty.Jars
+import dotty.tools.dotc.CompilerTest
+import dotty.tools.StdLibSources
+import org.junit.{Before, Test}
+import org.junit.Assert._
+
+import java.io.{ File => JFile }
+import scala.reflect.io.Directory
+import scala.io.Source
+
+// tests that match regex '(pos|dotc|run|java|compileStdLib)\.*' would be executed as benchmarks.
+class tests extends CompilerTest {
+
+ def isRunByJenkins: Boolean = sys.props.isDefinedAt("dotty.jenkins.build")
+
+ val defaultOutputDir = "../out/"
+
+ val noCheckOptions = List(
+// "-verbose",
+// "-Ylog:frontend",
+// "-Xprompt",
+// "-explaintypes",
+// "-Yshow-suppressed-errors",
+ "-pagewidth", "120",
+ "-d", defaultOutputDir
+ )
+
+ val checkOptions = List(
+ "-Yno-deep-subtypes",
+ "-Yno-double-bindings",
+ "-Yforce-sbt-phases",
+ "-color:never"
+ )
+
+ val classPath = {
+ val paths = Jars.dottyTestDeps map { p =>
+ val file = new JFile(p)
+ assert(
+ file.exists,
+ s"""|File "$p" couldn't be found. Run `packageAll` from build tool before
+ |testing.
+ |
+ |If running without sbt, test paths need to be setup environment variables:
+ |
+ | - DOTTY_LIBRARY
+ | - DOTTY_COMPILER
+ | - DOTTY_INTERFACES
+ | - DOTTY_EXTRAS
+ |
+ |Where these all contain locations, except extras which is a colon
+ |separated list of jars.
+ |
+ |When compiling with eclipse, you need the sbt-interfaces jar, put
+ |it in extras."""
+ )
+ file.getAbsolutePath
+ } mkString (":")
+
+ List("-classpath", paths)
+ }
+
+ implicit val defaultOptions: List[String] = noCheckOptions ++ {
+ if (isRunByJenkins) List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef") // should be Ycheck:all, but #725
+ else List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef")
+ } ++ checkOptions ++ classPath
+
+ val testPickling = List("-Xprint-types", "-Ytest-pickler", "-Ystop-after:pickler", "-Yprintpos")
+
+ val twice = List("#runs", "2")
+ val staleSymbolError: List[String] = List()
+
+ val allowDeepSubtypes = defaultOptions diff List("-Yno-deep-subtypes")
+ val allowDoubleBindings = defaultOptions diff List("-Yno-double-bindings")
+ val scala2mode = List("-language:Scala2")
+
+ val explicitUTF8 = List("-encoding", "UTF8")
+ val explicitUTF16 = List("-encoding", "UTF16")
+
+ val testsDir = "../tests/"
+ val posDir = testsDir + "pos/"
+ val posSpecialDir = testsDir + "pos-special/"
+ val posScala2Dir = testsDir + "pos-scala2/"
+ val negDir = testsDir + "neg/"
+ val runDir = testsDir + "run/"
+ val newDir = testsDir + "new/"
+ val javaDir = testsDir + "pos-java-interop/"
+
+ val sourceDir = "./src/"
+ val dottyDir = sourceDir + "dotty/"
+ val toolsDir = dottyDir + "tools/"
+ val backendDir = toolsDir + "backend/"
+ val dotcDir = toolsDir + "dotc/"
+ val coreDir = dotcDir + "core/"
+ val parsingDir = dotcDir + "parsing/"
+ val dottyReplDir = dotcDir + "repl/"
+ val typerDir = dotcDir + "typer/"
+ val libDir = "../library/src/"
+
+ def dottyBootedLib = compileDir(libDir, ".", List("-deep", "-Ycheck-reentrant", "-strict") ::: defaultOptions)(allowDeepSubtypes) // note the -deep argument
+ def dottyDependsOnBootedLib = compileDir(dottyDir, ".", List("-deep", "-Ycheck-reentrant", "-strict") ::: defaultOptions)(allowDeepSubtypes) // note the -deep argument
+
+ @Before def cleanup(): Unit = {
+ // remove class files from stdlib and tests compilation
+ Directory(defaultOutputDir + "scala").deleteRecursively()
+ Directory(defaultOutputDir + "java").deleteRecursively()
+ }
+
+ @Test def pickle_pickleOK = compileFiles(testsDir + "pickling/", testPickling)
+// This directory doesn't exist anymore
+// @Test def pickle_pickling = compileDir(coreDir, "pickling", testPickling)
+ @Test def pickle_ast = compileDir(dotcDir, "ast", testPickling)
+ @Test def pickle_inf = compileFile(posDir, "pickleinf", testPickling)
+
+ //@Test def pickle_core = compileDir(dotcDir, "core", testPickling, xerrors = 2) // two spurious comparison errors in Types and TypeOps
+
+ @Test def pos_arraycopy =
+ compileFile(runDir, "arraycopy", List("-Ylog-classpath"))
+ @Test def pos_t2168_pat = compileFile(posDir, "t2168", twice)
+ @Test def pos_erasure = compileFile(posDir, "erasure", twice)
+ @Test def pos_Coder() = compileFile(posDir, "Coder", twice)
+ @Test def pos_blockescapes() = compileFile(posDir, "blockescapes", twice)
+ @Test def pos_collections() = compileFile(posDir, "collections", twice)
+ @Test def pos_functions1() = compileFile(posDir, "functions1", twice)
+ @Test def pos_implicits1() = compileFile(posDir, "implicits1", twice)
+ @Test def pos_inferred() = compileFile(posDir, "inferred", twice)
+ @Test def pos_Patterns() = compileFile(posDir, "Patterns", twice)
+ @Test def pos_selftypes() = compileFile(posDir, "selftypes", twice)
+ @Test def pos_varargs() = compileFile(posDir, "varargs", twice)
+ @Test def pos_vararg_patterns() = compileFile(posDir, "vararg-pattern", twice)
+ @Test def pos_opassign() = compileFile(posDir, "opassign", twice)
+ @Test def pos_typedapply() = compileFile(posDir, "typedapply", twice)
+ @Test def pos_nameddefaults() = compileFile(posDir, "nameddefaults", twice)
+ @Test def pos_desugar() = compileFile(posDir, "desugar", twice)
+ @Test def pos_sigs() = compileFile(posDir, "sigs", twice)
+ @Test def pos_typers() = compileFile(posDir, "typers", twice)
+ @Test def pos_typedIdents() = compileDir(posDir, "typedIdents", twice)
+ @Test def pos_assignments() = compileFile(posDir, "assignments", twice)
+ @Test def pos_packageobject() = compileFile(posDir, "packageobject", twice)
+ @Test def pos_overloaded() = compileFile(posDir, "overloaded", twice)
+ @Test def pos_overrides() = compileFile(posDir, "overrides", twice)
+ @Test def pos_javaOverride() = compileDir(posDir, "java-override", twice)
+ @Test def pos_templateParents() = compileFile(posDir, "templateParents", twice)
+ @Test def pos_overloadedAccess = compileFile(posDir, "overloadedAccess", twice)
+ @Test def pos_approximateUnion = compileFile(posDir, "approximateUnion", twice)
+ @Test def pos_tailcall = compileDir(posDir, "tailcall", twice)
+ @Test def pos_valueclasses = compileFiles(posDir + "pos_valueclasses/", twice)
+ @Test def pos_nullarify = compileFile(posDir, "nullarify", args = "-Ycheck:nullarify" :: Nil)
+ @Test def pos_subtyping = compileFile(posDir, "subtyping", twice)
+ @Test def pos_packageObj = compileFile(posDir, "i0239", twice)
+ @Test def pos_anonClassSubtyping = compileFile(posDir, "anonClassSubtyping", twice)
+ @Test def pos_extmethods = compileFile(posDir, "extmethods", twice)
+ @Test def pos_companions = compileFile(posDir, "companions", twice)
+ @Test def posVarargsT1625 = compileFiles(posDir + "varargsInMethodsT1625/")
+
+ @Test def pos_all = compileFiles(posDir) // twice omitted to make tests run faster
+
+ @Test def pos_scala2_all = compileFiles(posScala2Dir, scala2mode)
+
+ @Test def rewrites = compileFile(posScala2Dir, "rewrites", "-rewrite" :: scala2mode)
+
+ @Test def pos_t8146a = compileFile(posSpecialDir, "t8146a")(allowDeepSubtypes)
+
+ @Test def pos_t5545 = {
+ // compile by hand in two batches, since junit lacks the infrastructure to
+ // compile files in multiple batches according to _1, _2, ... suffixes.
+ compileFile(posSpecialDir, "spec-t5545/S_1")
+ compileFile(posSpecialDir, "spec-t5545/S_2")
+ }
+ @Test def pos_utf8 = compileFile(posSpecialDir, "utf8encoded", explicitUTF8)
+ @Test def pos_utf16 = compileFile(posSpecialDir, "utf16encoded", explicitUTF16)
+
+ @Test def new_all = compileFiles(newDir, twice)
+
+ @Test def neg_all = compileFiles(negDir, verbose = true, compileSubDirs = false)
+ @Test def neg_typedIdents() = compileDir(negDir, "typedIdents")
+
+ @Test def negVarargsT1625 = compileFiles(negDir + "varargsInMethodsT1625/")
+
+ val negCustomArgs = negDir + "customArgs/"
+
+ @Test def neg_typers() = compileFile(negCustomArgs, "typers")(allowDoubleBindings)
+ @Test def neg_overrideClass = compileFile(negCustomArgs, "overrideClass", scala2mode)
+ @Test def neg_autoTupling = compileFile(negCustomArgs, "autoTuplingTest", args = "-language:noAutoTupling" :: Nil)
+ @Test def neg_i1050 = compileFile(negCustomArgs, "i1050", List("-strict"))
+ @Test def neg_i1240 = compileFile(negCustomArgs, "i1240")(allowDoubleBindings)
+ @Test def neg_i2002 = compileFile(negCustomArgs, "i2002")(allowDoubleBindings)
+
+ val negTailcallDir = negDir + "tailcall/"
+ @Test def neg_tailcall_t1672b = compileFile(negTailcallDir, "t1672b")
+ @Test def neg_tailcall_t3275 = compileFile(negTailcallDir, "t3275")
+ @Test def neg_tailcall_t6574 = compileFile(negTailcallDir, "t6574")
+ @Test def neg_tailcall = compileFile(negTailcallDir, "tailrec")
+ @Test def neg_tailcall2 = compileFile(negTailcallDir, "tailrec-2")
+ @Test def neg_tailcall3 = compileFile(negTailcallDir, "tailrec-3")
+
+ @Test def neg_nopredef = compileFile(negCustomArgs, "nopredef", List("-Yno-predef"))
+ @Test def neg_noimports = compileFile(negCustomArgs, "noimports", List("-Yno-imports"))
+ @Test def neg_noimpots2 = compileFile(negCustomArgs, "noimports2", List("-Yno-imports"))
+
+ @Test def run_all = runFiles(runDir)
+
+ private val stdlibFiles: List[String] = StdLibSources.whitelisted
+
+ @Test def compileStdLib = compileList("compileStdLib", stdlibFiles, "-migration" :: "-Yno-inline" :: scala2mode)
+ @Test def compileMixed = compileLine(
+ """../tests/pos/B.scala
+ |../scala-scala/src/library/scala/collection/immutable/Seq.scala
+ |../scala-scala/src/library/scala/collection/parallel/ParSeq.scala
+ |../scala-scala/src/library/scala/package.scala
+ |../scala-scala/src/library/scala/collection/GenSeqLike.scala
+ |../scala-scala/src/library/scala/collection/SeqLike.scala
+ |../scala-scala/src/library/scala/collection/generic/GenSeqFactory.scala""".stripMargin)
+ @Test def compileIndexedSeq = compileLine("../scala-scala/src/library/scala/collection/immutable/IndexedSeq.scala")
+ @Test def compileParSetLike = compileLine("../scala-scala/src/library/scala/collection/parallel/mutable/ParSetLike.scala")
+ @Test def compileParSetSubset = compileLine(
+ """../scala-scala/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+ |../scala-scala/src/library/scala/collection/parallel/mutable/ParSet.scala
+ |../scala-scala/src/library/scala/collection/mutable/SetLike.scala""".stripMargin)(scala2mode ++ defaultOptions)
+
+ @Test def dotty = {
+ dottyBootedLib
+ dottyDependsOnBootedLib
+ }
+
+ @Test def dotc_ast = compileDir(dotcDir, "ast")
+ @Test def dotc_config = compileDir(dotcDir, "config")
+ @Test def dotc_core = compileDir(dotcDir, "core")(allowDeepSubtypes)// twice omitted to make tests run faster
+ @Test def dotc_core_nocheck = compileDir(dotcDir, "core")(noCheckOptions ++ classPath)
+
+// This directory doesn't exist anymore
+// @Test def dotc_core_pickling = compileDir(coreDir, "pickling")(allowDeepSubtypes)// twice omitted to make tests run faster
+
+ @Test def dotc_transform = compileDir(dotcDir, "transform")(allowDeepSubtypes)// twice omitted to make tests run faster
+
+ @Test def dotc_parsing = compileDir(dotcDir, "parsing") // twice omitted to make tests run faster
+
+ @Test def dotc_printing = compileDir(dotcDir, "printing") // twice omitted to make tests run faster
+
+ @Test def dotc_reporting = compileDir(dotcDir, "reporting") // twice omitted to make tests run faster
+
+ @Test def dotc_typer = compileDir(dotcDir, "typer")// twice omitted to make tests run faster
+ // error: error while loading Checking$$anon$2$,
+ // class file 'target/scala-2.11/dotty_2.11-0.1.1-SNAPSHOT.jar(dotty/tools/dotc/typer/Checking$$anon$2.class)'
+ // has location not matching its contents: contains class $anon
+
+ @Test def dotc_util = compileDir(dotcDir, "util") // twice omitted to make tests run faster
+
+ @Test def tools_io = compileDir(toolsDir, "io") // inner class has symbol <none>
+
+ @Test def helloWorld = compileFile(posDir, "HelloWorld")
+ @Test def labels = compileFile(posDir, "Labels", twice)
+ //@Test def tools = compileDir(dottyDir, "tools", "-deep" :: Nil)(allowDeepSubtypes)
+
+ @Test def testNonCyclic = compileList("testNonCyclic", List(
+ dotcDir + "CompilationUnit.scala",
+ coreDir + "Types.scala",
+ dotcDir + "ast/Trees.scala"
+ ), List("-Xprompt") ++ staleSymbolError ++ twice)
+
+ @Test def testIssue_34 = compileList("testIssue_34", List(
+ dotcDir + "config/Properties.scala",
+ dotcDir + "config/PathResolver.scala"
+ ), List(/* "-Ylog:frontend", */ "-Xprompt") ++ staleSymbolError ++ twice)
+
+ @Test def java_all = compileFiles(javaDir, twice)
+ //@Test def dotc_compilercommand = compileFile(dotcDir + "config/", "CompilerCommand")
+
+ //TASTY tests
+ @Test def tasty_new_all = compileFiles(newDir, testPickling)
+
+ @Test def tasty_dotty = compileDir(sourceDir, "dotty", testPickling)
+
+ // Disabled because we get stale symbol errors on the SourceFile annotation, which is normal.
+ // @Test def tasty_annotation_internal = compileDir(s"${dottyDir}annotation/", "internal", testPickling)
+
+ @Test def tasty_runtime = compileDir(s"${libDir}dotty/", "runtime", testPickling)
+ @Test def tasty_runtime_vc = compileDir(s"${libDir}dotty/runtime/", "vc", testPickling)
+
+ @Test def tasty_tools = compileDir(dottyDir, "tools", testPickling)
+
+ //TODO: issue with ./src/dotty/tools/backend/jvm/DottyBackendInterface.scala
+ @Test def tasty_backend_jvm = compileList("tasty_backend_jvm", List(
+ "CollectEntryPoints.scala", "GenBCode.scala", "LabelDefs.scala",
+ "scalaPrimitives.scala"
+ ) map (s"${backendDir}jvm/" + _), testPickling)
+
+ //@Test def tasty_backend_sjs = compileDir(s"${backendDir}", "sjs", testPickling)
+
+ @Test def tasty_dotc = compileDir(toolsDir, "dotc", testPickling)
+ @Test def tasty_dotc_ast = compileDir(dotcDir, "ast", testPickling)
+ @Test def tasty_dotc_config = compileDir(dotcDir, "config", testPickling)
+
+ //TODO: issue with ./src/dotty/tools/dotc/core/Types.scala
+ @Test def tasty_core = compileList("tasty_core", List(
+ "Annotations.scala", "Constants.scala", "Constraint.scala", "ConstraintHandling.scala",
+ "ConstraintRunInfo.scala", "Contexts.scala", "Decorators.scala", "Definitions.scala",
+ "DenotTransformers.scala", "Denotations.scala", "Flags.scala", "Hashable.scala",
+ "NameOps.scala", "Names.scala", "OrderingConstraint.scala", "Periods.scala",
+ "Phases.scala", "Scopes.scala", "Signature.scala", "StdNames.scala",
+ "Substituters.scala", "SymDenotations.scala", "SymbolLoaders.scala", "Symbols.scala",
+ "TypeApplications.scala", "TypeComparer.scala", "TypeErasure.scala", "TypeOps.scala",
+ "TyperState.scala", "Uniques.scala"
+ ) map (coreDir + _), testPickling)
+
+ @Test def tasty_classfile = compileDir(coreDir, "classfile", testPickling)
+ @Test def tasty_tasty = compileDir(coreDir, "tasty", testPickling)
+ @Test def tasty_unpickleScala2 = compileDir(coreDir, "unpickleScala2", testPickling)
+
+ //TODO: issue with ./src/dotty/tools/dotc/parsing/Parsers.scala
+ @Test def tasty_dotc_parsing = compileList("tasty_dotc_parsing", List(
+ "CharArrayReader.scala", "JavaParsers.scala", "JavaScanners.scala", "JavaTokens.scala",
+ "MarkupParserCommon.scala", "MarkupParsers.scala", "package.scala" ,"Scanners.scala",
+ "ScriptParsers.scala", "SymbolicXMLBuilder.scala", "Tokens.scala", "Utility.scala"
+ ) map (parsingDir + _), testPickling)
+
+ @Test def tasty_dotc_printing = compileDir(dotcDir, "printing", testPickling)
+
+ @Test def tasty_dotc_repl = compileDir(dotcDir, "repl", testPickling)
+
+ //@Test def tasty_dotc_reporting = compileDir(dotcDir, "reporting", testPickling)
+ @Test def tasty_dotc_rewrite = compileDir(dotcDir, "rewrite", testPickling)
+
+ //TODO: issues with LazyVals.scala, PatternMatcher.scala
+ @Test def tasty_dotc_transform = compileList("tasty_dotc_transform", List(
+ "AugmentScala2Traits.scala", "CapturedVars.scala", "CheckReentrant.scala", "CheckStatic.scala",
+ "ClassOf.scala", "CollectEntryPoints.scala", "Constructors.scala", "CrossCastAnd.scala",
+ "CtxLazy.scala", "ElimByName.scala", "ElimErasedValueType.scala", "ElimRepeated.scala",
+ "ElimStaticThis.scala", "Erasure.scala", "ExpandPrivate.scala", "ExpandSAMs.scala",
+ "ExplicitOuter.scala", "ExtensionMethods.scala", "FirstTransform.scala",
+ "Flatten.scala", "FullParameterization.scala", "FunctionalInterfaces.scala", "GetClass.scala",
+ "Getters.scala", "InterceptedMethods.scala", "LambdaLift.scala", "LiftTry.scala", "LinkScala2ImplClasses.scala",
+ "MacroTransform.scala", "Memoize.scala", "Mixin.scala", "MixinOps.scala", "NonLocalReturns.scala",
+ "NormalizeFlags.scala", "OverridingPairs.scala", "ParamForwarding.scala", "Pickler.scala", "PostTyper.scala",
+ "ResolveSuper.scala", "RestoreScopes.scala", "SeqLiterals.scala", "Splitter.scala", "SuperAccessors.scala",
+ "SymUtils.scala", "SyntheticMethods.scala", "TailRec.scala", "TreeChecker.scala", "TreeExtractors.scala",
+ "TreeGen.scala", "TreeTransform.scala", "TypeTestsCasts.scala", "TypeUtils.scala", "ValueClasses.scala",
+ "VCElideAllocations.scala", "VCInlineMethods.scala"
+ ) map (s"${dotcDir}transform/" + _), testPickling)
+
+ //TODO: issue with ./src/dotty/tools/dotc/typer/Namer.scala
+ @Test def tasty_typer = compileList("tasty_typer", List(
+ "Applications.scala", "Checking.scala", "ConstFold.scala", "ErrorReporting.scala",
+ "EtaExpansion.scala", "FrontEnd.scala", "Implicits.scala", "ImportInfo.scala",
+ "Inferencing.scala", "ProtoTypes.scala", "ReTyper.scala", "RefChecks.scala",
+ "TypeAssigner.scala", "Typer.scala", "VarianceChecker.scala", "Variances.scala"
+ ) map (typerDir + _), testPickling)
+
+ @Test def tasty_dotc_util = compileDir(dotcDir, "util", testPickling)
+ @Test def tasty_tools_io = compileDir(toolsDir, "io", testPickling)
+
+ @Test def tasty_bootstrap = {
+ val logging = if (false) List("-Ylog-classpath", "-verbose") else Nil
+ val opt = List("-priorityclasspath", defaultOutputDir) ++ logging
+ // first compile dotty
+ compileDir(dottyDir, ".", List("-deep", "-Ycheck-reentrant", "-strict") ++ logging)(allowDeepSubtypes)
+
+ compileDir(libDir, "dotty", "-deep" :: opt)
+ compileDir(libDir, "scala", "-deep" :: opt)
+ compileDir(dottyDir, "tools", opt)
+ compileDir(toolsDir, "dotc", opt)
+ compileDir(dotcDir, "ast", opt)
+ compileDir(dotcDir, "config", opt)
+ compileDir(dotcDir, "parsing", opt)
+ compileDir(dotcDir, "printing", opt)
+ compileDir(dotcDir, "repl", opt)
+ compileDir(dotcDir, "reporting", opt)
+ compileDir(dotcDir, "rewrite", opt)
+ compileDir(dotcDir, "transform", opt)
+ compileDir(dotcDir, "typer", opt)
+ compileDir(dotcDir, "util", opt)
+ }
+}
diff --git a/compiler/test/dotty/partest/DPConfig.scala b/compiler/test/dotty/partest/DPConfig.scala
new file mode 100644
index 000000000..5c493f465
--- /dev/null
+++ b/compiler/test/dotty/partest/DPConfig.scala
@@ -0,0 +1,40 @@
+package dotty.partest
+
+import scala.collection.JavaConversions._
+import scala.reflect.io.Path
+import java.io.File
+
+import scala.tools.partest.PartestDefaults
+
+
+/** Dotty Partest runs all tests in the provided testDirs located under
+ * testRoot. There can be several directories with pos resp. neg tests, as
+ * long as the prefix is pos/neg.
+ *
+ * Each testDir can also have a __defaultFlags.flags file, which provides
+ * compiler flags and is used unless there's a specific flags file (e.g. for
+ * test pos/A.scala, if there's a pos/A.flags file those flags are used,
+ * otherwise pos/__defaultFlags.flags are used if the file exists).
+ */
+object DPConfig {
+ /** Options used for _running_ the run tests.
+ * Note that this is different from the options used when _compiling_ tests,
+ * those are determined by the sbt configuration.
+ */
+ val runJVMOpts = s"-Xms64M -Xmx1024M ${PartestDefaults.javaOpts}"
+
+ val testRoot = (Path("..") / Path("tests") / Path("partest-generated")).toString
+ val genLog = Path(testRoot) / Path("gen.log")
+
+ lazy val testDirs = {
+ val root = new File(testRoot)
+ val dirs = if (!root.exists) Array.empty[String] else root.listFiles.filter(_.isDirectory).map(_.getName)
+ if (dirs.isEmpty)
+ throw new Exception("Partest did not detect any generated sources")
+ dirs
+ }
+
+ // Tests finish faster when running in parallel, but console output is
+ // out of order and sometimes the compiler crashes
+ val runTestsInParallel = true
+}
diff --git a/compiler/test/dotty/partest/DPConsoleRunner.scala b/compiler/test/dotty/partest/DPConsoleRunner.scala
new file mode 100644
index 000000000..3362d7a59
--- /dev/null
+++ b/compiler/test/dotty/partest/DPConsoleRunner.scala
@@ -0,0 +1,411 @@
+/* NOTE: Adapted from ScalaJSPartest.scala in
+ * https://github.com/scala-js/scala-js/
+ * TODO make partest configurable */
+
+package dotty.partest
+
+import dotty.tools.FatalError
+import scala.reflect.io.AbstractFile
+import scala.tools.partest._
+import scala.tools.partest.nest._
+import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
+import ClassPath.{ join, split }
+import FileManager.{ compareFiles, compareContents, joinPaths, withTempFile }
+import scala.util.matching.Regex
+import tools.nsc.io.{ File => NSCFile }
+import java.io.{ File, PrintStream, FileOutputStream, PrintWriter, FileWriter }
+import java.net.URLClassLoader
+
+/** Runs dotty partest from the Console, discovering test sources in
+ * DPConfig.testRoot that have been generated automatically by
+ * DPPrepJUnitRunner. Use `sbt partest` to run. If additional jars are
+ * required by some run tests, add them to partestDeps in the sbt Build.scala.
+ */
+object DPConsoleRunner {
+ def main(args: Array[String]): Unit = {
+ // unfortunately sbt runTask passes args as single string
+ // extra jars for run tests are passed with -dottyJars <count> <jar1> <jar2> ...
+ val jarFinder = """-dottyJars (\d*) (.*)""".r
+ val (jarList, otherArgs) = args.toList.partition(jarFinder.findFirstIn(_).isDefined)
+ val (extraJars, moreArgs) = jarList match {
+ case Nil => sys.error("Error: DPConsoleRunner needs \"-dottyJars <jarCount> <jars>*\".")
+ case jarFinder(nr, jarString) :: Nil =>
+ val jars = jarString.split(" ").toList
+ val count = nr.toInt
+ if (jars.length < count)
+ sys.error("Error: DPConsoleRunner found wrong number of dottyJars: " + jars + ", expected: " + nr)
+ else (jars.take(count), jars.drop(count))
+ case list => sys.error("Error: DPConsoleRunner found several -dottyJars options: " + list)
+ }
+ new DPConsoleRunner((otherArgs ::: moreArgs) mkString (" "), extraJars).runPartest
+ }
+}
+
+// console runner has a suite runner which creates a test runner for each test
+class DPConsoleRunner(args: String, extraJars: List[String]) extends ConsoleRunner(args) {
+ override val suiteRunner = new DPSuiteRunner (
+ testSourcePath = optSourcePath getOrElse DPConfig.testRoot,
+ fileManager = new DottyFileManager(extraJars),
+ updateCheck = optUpdateCheck,
+ failed = optFailed,
+ consoleArgs = args)
+
+ override def run = {}
+ def runPartest = super.run
+}
+
+class DottyFileManager(extraJars: List[String]) extends FileManager(Nil) {
+ lazy val extraJarList = extraJars.map(NSCFile(_))
+ override lazy val libraryUnderTest = Path(extraJars.find(_.contains("scala-library")).getOrElse(""))
+ override lazy val reflectUnderTest = Path(extraJars.find(_.contains("scala-reflect")).getOrElse(""))
+ override lazy val compilerUnderTest = Path(extraJars.find(_.contains("dotty")).getOrElse(""))
+}
+
+class DPSuiteRunner(testSourcePath: String, // relative path, like "files", or "pending"
+ fileManager: DottyFileManager,
+ updateCheck: Boolean,
+ failed: Boolean,
+ consoleArgs: String,
+ javaCmdPath: String = PartestDefaults.javaCmd,
+ javacCmdPath: String = PartestDefaults.javacCmd,
+ scalacExtraArgs: Seq[String] = Seq.empty,
+ javaOpts: String = DPConfig.runJVMOpts)
+extends SuiteRunner(testSourcePath, fileManager, updateCheck, failed, javaCmdPath, javacCmdPath, scalacExtraArgs, javaOpts) {
+
+ if (!DPConfig.runTestsInParallel)
+ sys.props("partest.threads") = "1"
+
+ sys.props("partest.root") = "."
+
+ // override to provide Dotty banner
+ override def banner: String = {
+ s"""|Welcome to Partest for Dotty! Partest version: ${Properties.versionNumberString}
+ |Compiler under test: dotty.tools.dotc.Bench or dotty.tools.dotc.Main
+ |Generated test sources: ${PathSettings.srcDir}${File.separator}
+ |Test directories: ${DPConfig.testDirs.toList.mkString(", ")}
+ |Debugging: failed tests have compiler output in test-kind.clog, run output in test-kind.log, class files in test-kind.obj
+ |Parallel: ${DPConfig.runTestsInParallel}
+ |Options: (use partest --help for usage information) ${consoleArgs}
+ """.stripMargin
+ }
+
+ /** Some tests require a limitation of resources, tests which are compiled
+ * with one or more of the flags in this list will be run with
+ * `limitedThreads`. This is necessary because some test flags require a lot
+ * of memory when running the compiler and may exhaust the available memory
+ * when run in parallel with too many other tests.
+ *
+ * This number could be increased on the CI, but might fail locally if
+ * scaled too extreme - override with:
+ *
+ * ```
+ * -Ddotty.tests.limitedThreads=X
+ * ```
+ */
+ def limitResourceFlags = List("-Ytest-pickler")
+ private val limitedThreads = sys.props.get("dotty.tests.limitedThreads").getOrElse("2")
+
+ override def runTestsForFiles(kindFiles: Array[File], kind: String): Array[TestState] = {
+ val (limitResourceTests, parallelTests) =
+ kindFiles partition { kindFile =>
+ val flags = kindFile.changeExtension("flags").fileContents
+ limitResourceFlags.exists(seqFlag => flags.contains(seqFlag))
+ }
+
+ val seqResults =
+ if (!limitResourceTests.isEmpty) {
+ val savedThreads = sys.props("partest.threads")
+ sys.props("partest.threads") = {
+ assert(
+ savedThreads == null || limitedThreads.toInt <= savedThreads.toInt,
+ """|Should not use more threads than the default, when the point
+ |is to limit the amount of resources""".stripMargin
+ )
+ limitedThreads
+ }
+
+ NestUI.echo(s"## we will run ${limitResourceTests.length} tests using ${PartestDefaults.numThreads} thread(s) in parallel")
+ val res = super.runTestsForFiles(limitResourceTests, kind)
+
+ if (savedThreads != null)
+ sys.props("partest.threads") = savedThreads
+ else
+ sys.props.remove("partest.threads")
+
+ res
+ } else Array[TestState]()
+
+ val parResults =
+ if (!parallelTests.isEmpty) {
+ NestUI.echo(s"## we will run ${parallelTests.length} tests in parallel using ${PartestDefaults.numThreads} thread(s)")
+ super.runTestsForFiles(parallelTests, kind)
+ } else Array[TestState]()
+
+ seqResults ++ parResults
+ }
+
+ // override for DPTestRunner and redirecting compilation output to test.clog
+ override def runTest(testFile: File): TestState = {
+ val runner = new DPTestRunner(testFile, this)
+
+ val state =
+ try {
+ runner.run match {
+ // Append compiler output to transcript if compilation failed,
+ // printed with --verbose option
+ case TestState.Fail(f, r@"compilation failed", transcript) =>
+ TestState.Fail(f, r, transcript ++ runner.cLogFile.fileLines.dropWhile(_ == ""))
+ case res => res
+ }
+ } catch {
+ case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
+ }
+ reportTest(state)
+ runner.cleanup()
+
+ onFinishTest(testFile, state)
+ }
+
+ // override NestUI.reportTest because --show-diff doesn't work. The diff used
+ // seems to add each line to transcript separately, whereas NestUI assumes
+ // that the diff string was added as one entry in the transcript
+ def reportTest(state: TestState) = {
+ import NestUI._
+ import NestUI.color._
+
+ if (isTerse && state.isOk) {
+ NestUI.reportTest(state)
+ } else {
+ echo(statusLine(state))
+ if (!state.isOk && isDiffy) {
+ val differ = bold(red("% ")) + "diff "
+ state.transcript.dropWhile(s => !(s startsWith differ)) foreach (echo(_))
+ // state.transcript find (_ startsWith differ) foreach (echo(_)) // original
+ }
+ }
+ }
+}
+
+class DPTestRunner(testFile: File, suiteRunner: DPSuiteRunner) extends nest.Runner(testFile, suiteRunner) {
+ val cLogFile = SFile(logFile).changeExtension("clog")
+
+ // override to provide DottyCompiler
+ override def newCompiler = new dotty.partest.DPDirectCompiler(this)
+
+ // Adapted from nest.Runner#javac because:
+ // - Our classpath handling is different and we need to pass extraClassPath
+ // to java to get the scala-library which is required for some java tests
+ // - The compiler output should be redirected to cLogFile, like the output of
+ // dotty itself
+ override def javac(files: List[File]): TestState = {
+ import fileManager._
+ import suiteRunner._
+ import FileManager.joinPaths
+ // compile using command-line javac compiler
+ val args = Seq(
+ suiteRunner.javacCmdPath, // FIXME: Dotty deviation just writing "javacCmdPath" doesn't work
+ "-d",
+ outDir.getAbsolutePath,
+ "-classpath",
+ joinPaths(outDir :: extraClasspath ++ testClassPath)
+ ) ++ files.map(_.getAbsolutePath)
+
+ pushTranscript(args mkString " ")
+
+ val captured = StreamCapture(runCommand(args, cLogFile))
+ if (captured.result) genPass() else {
+ cLogFile appendAll captured.stderr
+ cLogFile appendAll captured.stdout
+ genFail("java compilation failed")
+ }
+ }
+
+ // Overriden in order to recursively get all sources that should be handed to
+ // the compiler. Otherwise only sources in the top dir is compiled - works
+ // because the compiler is on the classpath.
+ override def sources(file: File): List[File] =
+ if (file.isDirectory)
+ file.listFiles.toList.flatMap { f =>
+ if (f.isDirectory) sources(f)
+ else if (f.isJavaOrScala) List(f)
+ else Nil
+ }
+ else List(file)
+
+ // Enable me to "fix" the depth issue - remove once completed
+ //override def compilationRounds(file: File): List[CompileRound] = {
+ // val srcs = sources(file) match {
+ // case Nil =>
+ // System.err.println {
+ // s"""|================================================================================
+ // |Warning! You attempted to compile sources from:
+ // | $file
+ // |but partest was unable to find any sources - uncomment DPConsoleRunner#sources
+ // |================================================================================""".stripMargin
+ // }
+ // List(new File("./tests/pos/HelloWorld.scala")) // "just compile some crap" - Guillaume
+ // case xs =>
+ // xs
+ // }
+ // (groupedFiles(srcs) map mixedCompileGroup).flatten
+ //}
+
+ // FIXME: This is copy-pasted from nest.Runner where it is private
+ // Remove this once https://github.com/scala/scala-partest/pull/61 is merged
+ /** Runs command redirecting standard out and
+ * error out to output file.
+ */
+ def runCommand(args: Seq[String], outFile: File): Boolean = {
+ import scala.sys.process.{ Process, ProcessLogger }
+ //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
+ val pl = ProcessLogger(outFile)
+ val nonzero = 17 // rounding down from 17.3
+ def run: Int = {
+ val p = Process(args) run pl
+ try p.exitValue
+ catch {
+ case e: InterruptedException =>
+ NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
+ p.destroy
+ nonzero
+ case t: Throwable =>
+ NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
+ p.destroy
+ throw t
+ }
+ finally pl.close()
+ }
+ (pl buffer run) == 0
+ }
+
+ // override to provide default dotty flags from file in directory
+ override def flagsForCompilation(sources: List[File]): List[String] = {
+ val specificFlags = super.flagsForCompilation(sources)
+ if (specificFlags.isEmpty) defaultFlags
+ else specificFlags
+ }
+
+ val defaultFlags = {
+ val defaultFile = parentFile.listFiles.toList.find(_.getName == "__defaultFlags.flags")
+ defaultFile.map({ file =>
+ SFile(file).safeSlurp.map({ content => words(content).filter(_.nonEmpty) }).getOrElse(Nil)
+ }).getOrElse(Nil)
+ }
+
+ // override to add the check for nr of compilation errors if there's a
+ // target.nerr file
+ override def runNegTest() = runInContext {
+ sealed abstract class NegTestState
+ // Don't get confused, the neg test passes when compilation fails for at
+ // least one round (optionally checking the number of compiler errors and
+ // compiler console output)
+ case object CompFailed extends NegTestState
+ // the neg test fails when all rounds return either of these:
+ case class CompFailedButWrongNErr(expected: String, found: String) extends NegTestState
+ case object CompFailedButWrongDiff extends NegTestState
+ case object CompSucceeded extends NegTestState
+
+ def nerrIsOk(reason: String) = {
+ val nerrFinder = """compilation failed with (\d+) errors""".r
+ reason match {
+ case nerrFinder(found) =>
+ SFile(FileOps(testFile) changeExtension "nerr").safeSlurp match {
+ case Some(exp) if (exp != found) => CompFailedButWrongNErr(exp, found)
+ case _ => CompFailed
+ }
+ case _ => CompFailed
+ }
+ }
+
+ // we keep the partest semantics where only one round needs to fail
+ // compilation, not all
+ val compFailingRounds =
+ compilationRounds(testFile)
+ .map { round =>
+ val ok = round.isOk
+ setLastState(if (ok) genPass else genFail("compilation failed"))
+ (round.result, ok)
+ }
+ .filter { case (_, ok) => !ok }
+
+ val failureStates = compFailingRounds.map({ case (result, _) => result match {
+ // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
+ case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => CompSucceeded
+ case Fail(_, reason, _) => if (diffIsOk) nerrIsOk(reason) else CompFailedButWrongDiff
+ case _ => if (diffIsOk) CompFailed else CompFailedButWrongDiff
+ }})
+
+ if (failureStates.exists({ case CompFailed => true; case _ => false })) {
+ true
+ } else {
+ val existsNerr = failureStates.exists({
+ case CompFailedButWrongNErr(exp, found) =>
+ nextTestActionFailing(s"wrong number of compilation errors, expected: $exp, found: $found")
+ true
+ case _ =>
+ false
+ })
+
+ if (existsNerr) false
+ else {
+ val existsDiff = failureStates.exists({
+ case CompFailedButWrongDiff =>
+ nextTestActionFailing(s"output differs")
+ true
+ case _ =>
+ false
+ })
+ if (existsDiff) false
+ else nextTestActionFailing("expected compilation failure")
+ }
+ }
+ }
+
+ // override to change check file updating to original file, not generated
+ override def diffIsOk: Boolean = {
+ // always normalize the log first
+ normalizeLog()
+ val diff = currentDiff
+ // if diff is not empty, is update needed?
+ val updating: Option[Boolean] = (
+ if (diff == "") None
+ else Some(suiteRunner.updateCheck)
+ )
+ pushTranscript(s"diff $logFile $checkFile")
+ nextTestAction(updating) {
+ case Some(true) =>
+ val origCheck = SFile(checkFile.changeExtension("checksrc").fileLines(1))
+ NestUI.echo("Updating original checkfile " + origCheck)
+ origCheck writeAll file2String(logFile)
+ genUpdated()
+ case Some(false) =>
+ // Get a word-highlighted diff from git if we can find it
+ val bestDiff = if (updating.isEmpty) "" else {
+ if (checkFile.canRead)
+ gitDiff(logFile, checkFile) getOrElse {
+ s"diff $logFile $checkFile\n$diff"
+ }
+ else diff
+ }
+ pushTranscript(bestDiff)
+ genFail("output differs")
+ case None => genPass() // redundant default case
+ } getOrElse true
+ }
+
+ // override to add dotty and scala jars to classpath
+ override def extraClasspath =
+ suiteRunner.fileManager.asInstanceOf[DottyFileManager].extraJarList ::: super.extraClasspath
+
+
+ // FIXME: Dotty deviation: error if return type is omitted:
+ // overriding method cleanup in class Runner of type ()Unit;
+ // method cleanup of type => Boolean | Unit has incompatible type
+
+ // override to keep class files if failed and delete clog if ok
+ override def cleanup: Unit = if (lastState.isOk) {
+ logFile.delete
+ cLogFile.delete
+ Directory(outDir).deleteRecursively
+ }
+}
diff --git a/compiler/test/dotty/partest/DPDirectCompiler.scala b/compiler/test/dotty/partest/DPDirectCompiler.scala
new file mode 100644
index 000000000..410dac338
--- /dev/null
+++ b/compiler/test/dotty/partest/DPDirectCompiler.scala
@@ -0,0 +1,36 @@
+package dotty.partest
+
+import dotty.tools.dotc.reporting.ConsoleReporter
+import scala.tools.partest.{ TestState, nest }
+import java.io.{ File, PrintWriter, FileWriter }
+
+
+/* NOTE: Adapted from partest.DirectCompiler */
+class DPDirectCompiler(runner: DPTestRunner) extends nest.DirectCompiler(runner) {
+
+ override def compile(opts0: List[String], sources: List[File]): TestState = {
+ val clogFWriter = new FileWriter(runner.cLogFile.jfile, true)
+ val clogWriter = new PrintWriter(clogFWriter, true)
+ clogWriter.println("\ncompiling " + sources.mkString(" ") + "\noptions: " + opts0.mkString(" "))
+
+ try {
+ val processor =
+ if (opts0.exists(_.startsWith("#"))) dotty.tools.dotc.Bench else dotty.tools.dotc.Main
+ val clogger = new ConsoleReporter(writer = clogWriter)
+ val reporter = processor.process((sources.map(_.toString) ::: opts0).toArray, clogger)
+ if (!reporter.hasErrors) runner.genPass()
+ else {
+ clogWriter.println(reporter.summary)
+ runner.genFail(s"compilation failed with ${reporter.errorCount} errors")
+ }
+ } catch {
+ case t: Throwable =>
+ t.printStackTrace
+ t.printStackTrace(clogWriter)
+ runner.genCrash(t)
+ } finally {
+ clogFWriter.close
+ clogWriter.close
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala
index 7f0f84049..83713748c 100644
--- a/compiler/test/dotty/tools/dotc/CompilationTests.scala
+++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala
@@ -5,6 +5,10 @@ package dotc
import org.junit.Test
import java.io.{ File => JFile }
+import org.junit.experimental.categories.Category
+
+
+@Category(Array(classOf[ParallelTesting]))
class CompilationTests extends ParallelTesting {
import CompilationTests._
diff --git a/compiler/test/dotty/tools/dotc/CompilerTest.scala b/compiler/test/dotty/tools/dotc/CompilerTest.scala
new file mode 100644
index 000000000..f35f9f919
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/CompilerTest.scala
@@ -0,0 +1,613 @@
+package dotty.tools.dotc
+
+import repl.TestREPL
+import core.Contexts._
+import dotty.partest.DPConfig
+import interfaces.Diagnostic.ERROR
+import reporting._
+import diagnostic.MessageContainer
+import util.SourcePosition
+import config.CompilerCommand
+import dotty.tools.io.PlainFile
+import scala.collection.mutable.ListBuffer
+import scala.reflect.io.{ Path, Directory, File => SFile, AbstractFile }
+import scala.tools.partest.nest.{ FileManager, NestUI }
+import scala.annotation.tailrec
+import java.io.{ RandomAccessFile, File => JFile }
+
+
+/** This class has two modes: it can directly run compiler tests, or it can
+ * generate the necessary file structure for partest in the directory
+ * DPConfig.testRoot. Both modes are regular JUnit tests. Which mode is used
+ * depends on the existence of the tests/locks/partest-ppid.lock file which is
+ * created by sbt to trigger partest generation. Sbt will then run partest on
+ * the generated sources.
+ *
+ * Through overriding the partestableXX methods, tests can always be run as
+ * JUnit compiler tests. Run tests cannot be run by JUnit, only by partest.
+ *
+ * A test can either be a file or a directory. Partest will generate a
+ * <test>-<kind>.log file with output of failed tests. Partest reads compiler
+ * flags and the number of errors expected from a neg test from <test>.flags
+ * and <test>.nerr files (also generated). The test is in a parent directory
+ * that determines the kind of test:
+ * - pos: checks that compilation succeeds
+ * - neg: checks that compilation fails with the given number of errors
+ * - run: compilation succeeds, partest: test run generates the output in
+ * <test>.check. Run tests always need to be:
+ * object Test { def main(args: Array[String]): Unit = ... }
+ * Classpath jars can be added to partestDeps in the sbt Build.scala.
+ */
+abstract class CompilerTest {
+
+ /** Override with output dir of test so it can be patched. Partest expects
+ * classes to be in partest-generated/[kind]/[testname]-[kind].obj/ */
+ val defaultOutputDir: String
+
+ /** Override to filter out tests that should not be run by partest. */
+ def partestableFile(prefix: String, fileName: String, extension: String, args: List[String]) = true
+ def partestableDir(prefix: String, dirName: String, args: List[String]) = true
+ def partestableList(testName: String, files: List[String], args: List[String]) = true
+
+ val generatePartestFiles = {
+ /* Because we fork in test, the JVM in which this JUnit test runs has a
+ * different pid from the one that started the partest. But the forked VM
+ * receives the pid of the parent as system property. If the lock file
+ * exists, the parent is requesting partest generation. This mechanism
+ * allows one sbt instance to run test (JUnit only) and another partest.
+ * We cannot run two instances of partest at the same time, because they're
+ * writing to the same directories. The sbt lock file generation prevents
+ * this.
+ */
+ val pid = System.getProperty("partestParentID")
+ if (pid == null)
+ false
+ else
+ new JFile(".." + JFile.separator + "tests" + JFile.separator + "locks" + JFile.separator + s"partest-$pid.lock").exists
+ }
+
+ // Delete generated files from previous run and create new log
+ val logFile = if (!generatePartestFiles) None else Some(CompilerTest.init)
+
+ /** Always run with JUnit. */
+ def compileLine(cmdLine: String)(implicit defaultOptions: List[String]): Unit = {
+ if (generatePartestFiles)
+ log("WARNING: compileLine will always run with JUnit, no partest files generated.")
+ compileArgs(cmdLine.split("\n"), Nil)
+ }
+
+ /** Compiles the given code file.
+ *
+ * @param prefix the parent directory (including separator at the end)
+ * @param fileName the filename, by default without extension
+ * @param args arguments to the compiler
+ * @param extension the file extension, .scala by default
+ * @param defaultOptions more arguments to the compiler
+ */
+ def compileFile(prefix: String, fileName: String, args: List[String] = Nil, extension: String = ".scala", runTest: Boolean = false)
+ (implicit defaultOptions: List[String]): Unit = {
+ val filePath = s"$prefix$fileName$extension"
+ val expErrors = expectedErrors(filePath)
+ if (!generatePartestFiles || !partestableFile(prefix, fileName, extension, args ++ defaultOptions)) {
+ if (runTest)
+ log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$fileName$extension")
+ if (args.contains("-rewrite")) {
+ val file = new PlainFile(filePath)
+ val data = file.toByteArray
+ // compile with rewrite
+ compileArgs((filePath :: args).toArray, expErrors)
+ // compile again, check that file now compiles without -language:Scala2
+ val plainArgs = args.filter(arg => arg != "-rewrite" && arg != "-language:Scala2")
+ compileFile(prefix, fileName, plainArgs, extension, runTest)
+ // restore original test file
+ val out = file.output
+ out.write(data)
+ out.close()
+ }
+ else compileArgs((filePath :: args).toArray, expErrors)
+ } else {
+ val kind = testKind(prefix, runTest)
+ log(s"generating partest files for test file: $prefix$fileName$extension of kind $kind")
+
+ val sourceFile = new JFile(prefix + fileName + extension)
+ if (sourceFile.exists) {
+ val firstDest = SFile(DPConfig.testRoot + JFile.separator + kind + JFile.separator + fileName + extension)
+ val xerrors = expErrors.map(_.totalErrors).sum
+ computeDestAndCopyFiles(sourceFile, firstDest, kind, args ++ defaultOptions, xerrors.toString)
+ } else {
+ throw new java.io.FileNotFoundException(s"Unable to locate test file $prefix$fileName")
+ }
+ }
+ }
+ def runFile(prefix: String, fileName: String, args: List[String] = Nil, extension: String = ".scala")
+ (implicit defaultOptions: List[String]): Unit = {
+ compileFile(prefix, fileName, args, extension, true)
+ }
+
+ def findJarFromRuntime(partialName: String): String = {
+ val urls = ClassLoader.getSystemClassLoader.asInstanceOf[java.net.URLClassLoader].getURLs.map(_.getFile.toString)
+ urls.find(_.contains(partialName)).getOrElse {
+ throw new java.io.FileNotFoundException(
+ s"""Unable to locate $partialName on classpath:\n${urls.toList.mkString("\n")}"""
+ )
+ }
+ }
+
+ private def compileWithJavac(
+ fs: Array[String],
+ args: Array[String]
+ )(implicit defaultOptions: List[String]): Boolean = {
+ val scalaLib = findJarFromRuntime("scala-library")
+ val fullArgs = Array(
+ "javac",
+ "-classpath",
+ s".:$scalaLib"
+ ) ++ args ++ defaultOptions.dropWhile("-d" != _).take(2) ++ fs
+
+ Runtime.getRuntime.exec(fullArgs).waitFor() == 0
+ }
+
+ /** Compiles the code files in the given directory together. If args starts
+ * with "-deep", all files in subdirectories (and so on) are included. */
+ def compileDir(prefix: String, dirName: String, args: List[String] = Nil, runTest: Boolean = false)
+ (implicit defaultOptions: List[String]): Unit = {
+ def computeFilePathsAndExpErrors = {
+ val dir = Directory(prefix + dirName)
+ val (files, normArgs) = args match {
+ case "-deep" :: args1 => (dir.deepFiles, args1)
+ case _ => (dir.files, args)
+ }
+ val (filePaths, javaFilePaths) = files
+ .toArray.map(_.toString)
+ .foldLeft((Array.empty[String], Array.empty[String])) { case (acc @ (fp, jfp), name) =>
+ if (name endsWith ".scala") (name +: fp, jfp)
+ else if (name endsWith ".java") (fp, name +: jfp)
+ else (fp, jfp)
+ }
+ val expErrors = expectedErrors(filePaths.toList)
+ (filePaths, javaFilePaths, normArgs, expErrors)
+ }
+ if (!generatePartestFiles || !partestableDir(prefix, dirName, args ++ defaultOptions)) {
+ if (runTest)
+ log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$dirName")
+ val (filePaths, javaFilePaths, normArgs, expErrors) = computeFilePathsAndExpErrors
+ compileWithJavac(javaFilePaths, Array.empty) // javac needs to run first on dotty-library
+ compileArgs(javaFilePaths ++ filePaths ++ normArgs, expErrors)
+ } else {
+ val (sourceDir, flags, deep) = args match {
+ case "-deep" :: args1 => (flattenDir(prefix, dirName), args1 ++ defaultOptions, "deep")
+ case _ => (new JFile(prefix + dirName), args ++ defaultOptions, "shallow")
+ }
+ val kind = testKind(prefix, runTest)
+ log(s"generating partest files for test directory ($deep): $prefix$dirName of kind $kind")
+
+ if (sourceDir.exists) {
+ val firstDest = Directory(DPConfig.testRoot + JFile.separator + kind + JFile.separator + dirName)
+ val xerrors = if (isNegTest(prefix)) {
+ val (_, _, _, expErrors) = computeFilePathsAndExpErrors
+ expErrors.map(_.totalErrors).sum
+ } else 0
+ computeDestAndCopyFiles(sourceDir, firstDest, kind, flags, xerrors.toString)
+ if (deep == "deep")
+ Directory(sourceDir).deleteRecursively
+ } else {
+ throw new java.io.FileNotFoundException(s"Unable to locate test dir $prefix$dirName")
+ }
+ }
+ }
+ def runDir(prefix: String, dirName: String, args: List[String] = Nil)
+ (implicit defaultOptions: List[String]): Unit =
+ compileDir(prefix, dirName, args, true)
+
+ /** Compiles each source in the directory path separately by calling
+ * compileFile resp. compileDir. */
+ def compileFiles(path: String, args: List[String] = Nil, verbose: Boolean = true, runTest: Boolean = false,
+ compileSubDirs: Boolean = true)(implicit defaultOptions: List[String]): Unit = {
+ val dir = Directory(path)
+ val fileNames = dir.files.toArray.map(_.jfile.getName).filter(name => (name endsWith ".scala") || (name endsWith ".java"))
+ for (name <- fileNames) {
+ if (verbose) log(s"testing $path$name")
+ compileFile(path, name, args, "", runTest)
+ }
+ if (compileSubDirs)
+ for (subdir <- dir.dirs) {
+ if (verbose) log(s"testing $subdir")
+ compileDir(path, subdir.jfile.getName, args, runTest)
+ }
+ }
+ def runFiles(path: String, args: List[String] = Nil, verbose: Boolean = true)
+ (implicit defaultOptions: List[String]): Unit =
+ compileFiles(path, args, verbose, true)
+
+ /** Compiles the given list of code files. */
+ def compileList(testName: String, files: List[String], args: List[String] = Nil)
+ (implicit defaultOptions: List[String]): Unit = {
+ if (!generatePartestFiles || !partestableList(testName, files, args ++ defaultOptions)) {
+ val expErrors = expectedErrors(files)
+ compileArgs((files ++ args).toArray, expErrors)
+ } else {
+ val destDir = Directory(DPConfig.testRoot + JFile.separator + testName)
+ files.foreach({ file =>
+ val sourceFile = new JFile(file)
+ val destFile = destDir / (if (file.startsWith("../")) file.substring(3) else file)
+ recCopyFiles(sourceFile, destFile)
+ })
+ compileDir(DPConfig.testRoot + JFile.separator, testName, args)
+ destDir.deleteRecursively
+ }
+ }
+
+ // ========== HELPERS =============
+
+ private def expectedErrors(filePaths: List[String]): List[ErrorsInFile] = if (filePaths.exists(isNegTest(_))) filePaths.map(getErrors(_)) else Nil
+
+ private def expectedErrors(filePath: String): List[ErrorsInFile] = expectedErrors(List(filePath))
+
+ private def isNegTest(testPath: String) = testPath.contains("/neg/")
+
+ private def compileArgs(args: Array[String], expectedErrorsPerFile: List[ErrorsInFile])
+ (implicit defaultOptions: List[String]): Unit = {
+ val allArgs = args ++ defaultOptions
+ val verbose = allArgs.contains("-verbose")
+ //println(s"""all args: ${allArgs.mkString("\n")}""")
+ val processor = if (allArgs.exists(_.startsWith("#"))) Bench else Main
+ val storeReporter = new Reporter with UniqueMessagePositions with HideNonSensicalMessages {
+ private val consoleReporter = new ConsoleReporter()
+ private val innerStoreReporter = new StoreReporter(consoleReporter)
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ if (m.level == ERROR || verbose) {
+ innerStoreReporter.flush()
+ consoleReporter.doReport(m)
+ }
+ else if (errorCount > 0) consoleReporter.doReport(m)
+ else innerStoreReporter.doReport(m)
+ }
+ }
+ val reporter = processor.process(allArgs, storeReporter)
+
+ val nerrors = reporter.errorCount
+ val xerrors = (expectedErrorsPerFile map {_.totalErrors}).sum
+ def expectedErrorFiles =
+ expectedErrorsPerFile.collect{
+ case er if er.totalErrors > 0 => er.fileName
+ }
+ assert(nerrors == xerrors,
+ s"""Wrong # of errors. Expected: $xerrors, found: $nerrors
+ |Files with expected errors: $expectedErrorFiles
+ |errors:
+ """.stripMargin)
+ // NEG TEST
+ if (xerrors > 0) {
+ val errorLines = reporter.allErrors.map(_.pos)
+ // reporter didn't record as many errors as its errorCount says
+ assert(errorLines.length == nerrors, s"Not enough errors recorded.")
+
+ // Some compiler errors have an associated source position. Each error
+ // needs to correspond to a "// error" marker on that line in the source
+ // file and vice versa.
+ // Other compiler errors don't have an associated source position. Their
+ // number should correspond to the total count of "// nopos-error"
+ // markers in all files
+ val (errorsByFile, errorsWithoutPos) = errorLines.groupBy(_.source.file).toList.partition(_._1.toString != "<no source>")
+
+ // check errors with source position
+ val foundErrorsPerFile = errorsByFile.map({ case (fileName, errorList) =>
+ val posErrorLinesToNr = errorList.groupBy(_.line).toList.map({ case (line, list) => (line, list.length) }).sortBy(_._1)
+ ErrorsInFile(fileName.toString, 0, posErrorLinesToNr)
+ })
+ val expectedErrorsPerFileZeroed = expectedErrorsPerFile.map({
+ case ErrorsInFile(fileName, _, posErrorLinesToNr) =>
+ ErrorsInFile(fileName.toString, 0, posErrorLinesToNr)
+ })
+ checkErrorsWithPosition(expectedErrorsPerFileZeroed, foundErrorsPerFile)
+
+ // check errors without source position
+ val expectedNoPos = expectedErrorsPerFile.map(_.noposErrorNr).sum
+ val foundNoPos = errorsWithoutPos.map(_._2.length).sum
+ assert(foundNoPos == expectedNoPos,
+ s"Wrong # of errors without source position. Expected (all files): $expectedNoPos, found (compiler): $foundNoPos")
+ }
+ }
+
+ // ========== NEG TEST HELPERS =============
+
+ /** Captures the number of nopos-errors in the given file and the number of
+ * errors with a position, represented as a tuple of source line and number
+ * of errors on that line. */
+ case class ErrorsInFile(fileName: String, noposErrorNr: Int, posErrorLinesToNr: List[(Int, Int)]) {
+ def totalErrors = noposErrorNr + posErrorLinesToNr.map(_._2).sum
+ }
+
+ /** Extracts the errors expected for the given neg test file. */
+ def getErrors(fileName: String): ErrorsInFile = {
+ val content = SFile(fileName).slurp
+ val (line, rest) = content.span(_ != '\n')
+
+ @tailrec
+ def checkLine(line: String, rest: String, index: Int, noposAcc: Int, posAcc: List[(Int, Int)]): ErrorsInFile = {
+ val posErrors = "// ?error".r.findAllIn(line).length
+ val newPosAcc = if (posErrors > 0) (index, posErrors) :: posAcc else posAcc
+ val newNoPosAcc = noposAcc + "// ?nopos-error".r.findAllIn(line).length
+ val (newLine, newRest) = rest.span(_ != '\n')
+ if (newRest.isEmpty)
+ ErrorsInFile(fileName.toString, newNoPosAcc, newPosAcc.reverse)
+ else
+ checkLine(newLine, newRest.tail, index + 1, newNoPosAcc, newPosAcc) // skip leading '\n'
+ }
+
+ checkLine(line, rest.tail, 0, 0, Nil) // skip leading '\n'
+ }
+
+ /** Asserts that the expected and found number of errors correspond, and
+ * otherwise throws an error with the filename, plus optionally a line
+ * number if available. */
+ def errorMsg(fileName: String, lineNumber: Option[Int], exp: Int, found: Int) = {
+ val i = lineNumber.map({ i => ":" + (i + 1) }).getOrElse("")
+ assert(found == exp, s"Wrong # of errors for $fileName$i. Expected (file): $exp, found (compiler): $found")
+ }
+
+ /** Compares the expected with the found errors and creates a nice error
+ * message if they don't agree. */
+ def checkErrorsWithPosition(expected: List[ErrorsInFile], found: List[ErrorsInFile]): Unit = {
+ // create nice error messages
+ expected.diff(found) match {
+ case Nil => // nothing missing
+ case ErrorsInFile(fileName, _, expectedLines) :: xs =>
+ found.find(_.fileName == fileName) match {
+ case None =>
+ // expected some errors, but none found for this file
+ errorMsg(fileName, None, expectedLines.map(_._2).sum, 0)
+ case Some(ErrorsInFile(_,_,foundLines)) =>
+ // found wrong number/location of markers for this file
+ compareLines(fileName, expectedLines, foundLines)
+ }
+ }
+
+ found.diff(expected) match {
+ case Nil => // nothing missing
+ case ErrorsInFile(fileName, _, foundLines) :: xs =>
+ expected.find(_.fileName == fileName) match {
+ case None =>
+ // found some errors, but none expected for this file
+ errorMsg(fileName, None, 0, foundLines.map(_._2).sum)
+ case Some(ErrorsInFile(_,_,expectedLines)) =>
+ // found wrong number/location of markers for this file
+ compareLines(fileName, expectedLines, foundLines)
+ }
+ }
+ }
+
+ /** Gives an error message for one line where the expected number of errors and
+ * the number of compiler errors differ. */
+ def compareLines(fileName: String, expectedLines: List[(Int, Int)], foundLines: List[(Int, Int)]) = {
+ expectedLines foreach{
+ case (line, expNr) =>
+ foundLines.find(_._1 == line) match {
+ case Some((_, `expNr`)) => // this line is ok
+ case Some((_, foundNr)) => errorMsg(fileName, Some(line), expNr, foundNr)
+ case None =>
+ println(s"expected lines = $expectedLines%, %")
+ println(s"found lines = $foundLines%, %")
+ errorMsg(fileName, Some(line), expNr, 0)
+ }
+ }
+ foundLines foreach {
+ case (line, foundNr) =>
+ expectedLines.find(_._1 == line) match {
+ case Some((_, `foundNr`)) => // this line is ok
+ case Some((_, expNr)) => errorMsg(fileName, Some(line), expNr, foundNr)
+ case None => errorMsg(fileName, Some(line), 0, foundNr)
+ }
+ }
+ }
+
+ // ========== PARTEST HELPERS =============
+
+ // In particular, don't copy flags from scalac tests
+ private val extensionsToCopy = scala.collection.immutable.HashSet("scala", "java")
+
+ /** Determines what kind of test to run. */
+ private def testKind(prefixDir: String, runTest: Boolean) = {
+ if (runTest) "run"
+ else if (isNegTest(prefixDir)) "neg"
+ else if (prefixDir.endsWith("run" + JFile.separator)) {
+ log("WARNING: test is being run as pos test despite being in a run directory. " +
+ "Use runFile/runDir instead of compileFile/compileDir to do a run test")
+ "pos"
+ } else "pos"
+ }
+
+ /** The three possibilities: no generated sources exist yet, the same sources
+ * exist already, different sources exist. */
+ object Difference extends Enumeration {
+ type Difference = Value
+ val NotExists, ExistsSame, ExistsDifferent = Value
+ }
+ import Difference._
+
+ /** The same source might be used for several partest test cases (e.g. with
+ * different flags). Detects existing versions and computes the path to be
+ * used for this version, e.g. testname_v1 for the first alternative. */
+ private def computeDestAndCopyFiles(source: JFile, dest: Path, kind: String, oldFlags: List[String], nerr: String,
+ nr: Int = 0, oldOutput: String = defaultOutputDir): Unit = {
+
+ val partestOutput = dest.jfile.getParentFile + JFile.separator + dest.stripExtension + "-" + kind + ".obj"
+
+ val altOutput =
+ source.getParentFile.getAbsolutePath.map(x => if (x == JFile.separatorChar) '_' else x)
+
+ val (beforeCp, remaining) = oldFlags
+ .map(f => if (f == oldOutput) partestOutput else f)
+ .span(_ != "-classpath")
+ val flags = beforeCp ++ List("-classpath", (partestOutput :: remaining.drop(1)).mkString(":"))
+
+ val difference = getExisting(dest).isDifferent(source, flags, nerr)
+ difference match {
+ case NotExists => copyFiles(source, dest, partestOutput, flags, nerr, kind)
+ case ExistsSame => // nothing else to do
+ case ExistsDifferent =>
+ val nextDest = dest.parent / (dest match {
+ case d: Directory =>
+ val newVersion = replaceVersion(d.name, nr).getOrElse(altOutput)
+ Directory(newVersion)
+ case f =>
+ val newVersion = replaceVersion(f.stripExtension, nr).getOrElse(altOutput)
+ SFile(newVersion).addExtension(f.extension)
+ })
+ computeDestAndCopyFiles(source, nextDest, kind, flags, nerr, nr + 1, partestOutput)
+ }
+ }
+
+ /** Copies the test sources. Creates flags, nerr, check and output files. */
+ private def copyFiles(sourceFile: Path, dest: Path, partestOutput: String, flags: List[String], nerr: String, kind: String) = {
+ recCopyFiles(sourceFile, dest)
+
+ new JFile(partestOutput).mkdirs
+
+ if (flags.nonEmpty)
+ dest.changeExtension("flags").createFile(true).writeAll(flags.mkString(" "))
+ if (nerr != "0")
+ dest.changeExtension("nerr").createFile(true).writeAll(nerr)
+ sourceFile.changeExtension("check").ifFile({ check =>
+ if (kind == "run") {
+ FileManager.copyFile(check.jfile, dest.changeExtension("check").jfile)
+ dest.changeExtension("checksrc").createFile(true).writeAll("check file generated from source:\n" + check.toString)
+ } else {
+ log(s"WARNING: ignoring $check for test kind $kind")
+ }
+ })
+
+ }
+
+ /** Recursively copy over source files and directories, excluding extensions
+ * that aren't in extensionsToCopy. */
+ private def recCopyFiles(sourceFile: Path, dest: Path): Unit = {
+
+ @tailrec def copyfile(file: SFile, bytewise: Boolean): Unit = {
+ if (bytewise) {
+ val in = file.inputStream()
+ val out = SFile(dest).outputStream()
+ val buffer = new Array[Byte](1024)
+ @tailrec def loop(available: Int):Unit = {
+ if (available < 0) {()}
+ else {
+ out.write(buffer, 0, available)
+ val read = in.read(buffer)
+ loop(read)
+ }
+ }
+ loop(0)
+ in.close()
+ out.close()
+ } else {
+ try {
+ SFile(dest)(scala.io.Codec.UTF8).writeAll((s"/* !!!!! WARNING: DO NOT MODIFY. Original is at: $file !!!!! */").replace("\\", "/"), file.slurp("UTF-8"))
+ } catch {
+ case unmappable: java.nio.charset.MalformedInputException =>
+ copyfile(file, true) //there are bytes that can't be mapped with UTF-8. Bail and just do a straight byte-wise copy without the warning header.
+ }
+ }
+ }
+
+ processFileDir(sourceFile, { sf =>
+ if (extensionsToCopy.contains(sf.extension)) {
+ dest.parent.jfile.mkdirs
+ copyfile(sf, false)
+ } else {
+ log(s"WARNING: ignoring $sf")
+ }
+ }, { sdir =>
+ dest.jfile.mkdirs
+ sdir.list.foreach(path => recCopyFiles(path, dest / path.name))
+ }, Some("DPCompilerTest.recCopyFiles: sourceFile not found: " + sourceFile))
+ }
+
+ /** Reads the existing files for the given test source if any. */
+ private def getExisting(dest: Path): ExistingFiles = {
+ val content: Option[Option[String]] = processFileDir(dest, f => try Some(f.slurp("UTF8")) catch {case io: java.io.IOException => Some(io.toString())}, d => Some(""))
+ if (content.isDefined && content.get.isDefined) {
+ val flags = (dest changeExtension "flags").toFile.safeSlurp
+ val nerr = (dest changeExtension "nerr").toFile.safeSlurp
+ ExistingFiles(content.get, flags, nerr)
+ } else ExistingFiles()
+ }
+
+ /** Encapsulates existing generated test files. */
+ case class ExistingFiles(genSrc: Option[String] = None, flags: Option[String] = None, nerr: Option[String] = None) {
+ def isDifferent(sourceFile: JFile, otherFlags: List[String], otherNerr: String): Difference = {
+ if (!genSrc.isDefined) {
+ NotExists
+ } else {
+ val source = processFileDir(sourceFile, { f => try Some(f.slurp("UTF8")) catch {case _: java.io.IOException => None} }, { d => Some("") },
+ Some("DPCompilerTest sourceFile doesn't exist: " + sourceFile)).get
+ if (source == genSrc) {
+ nerr match {
+ case Some(n) if (n != otherNerr) => ExistsDifferent
+ case None if (otherNerr != "0") => ExistsDifferent
+ case _ if (flags.map(_ == otherFlags.mkString(" ")).getOrElse(otherFlags.isEmpty)) => ExistsSame
+ case _ => ExistsDifferent
+ }
+ } else ExistsDifferent
+ }
+ }
+ }
+
+ import scala.util.matching.Regex
+ val nrFinder = """(.*_v)(\d+)""".r
+ /** Changes the version number suffix in the name (without extension). */
+ private def replaceVersion(name: String, nr: Int): Option[String] = {
+ val nrString = nr.toString
+ name match {
+ case nrFinder(prefix, `nrString`) => Some(prefix + (nr + 1))
+ case _ if nr != 0 => None
+ case _ => Some(name + "_v1")
+ }
+ }
+
+ /** Returns None if the given path doesn't exist, otherwise returns Some of
+ * applying either processFile or processDir, depending on what the path
+ * refers to in the file system. If failMsgOnNone is defined, this function
+ * asserts that the file exists using the provided message. */
+ private def processFileDir[T](input: Path, processFile: SFile => T, processDir: Directory => T, failMsgOnNone: Option[String] = None): Option[T] = {
+ val res = input.ifFile(f => processFile(f)).orElse(input.ifDirectory(d => processDir(d)))
+ (failMsgOnNone, res) match {
+ case (Some(msg), None) => assert(false, msg); None
+ case _ => res
+ }
+ }
+
+ /** Creates a temporary directory and copies all (deep) files over, thus
+ * flattening the directory structure. */
+ private def flattenDir(prefix: String, dirName: String): JFile = {
+ val destDir = Directory(DPConfig.testRoot + JFile.separator + "_temp")
+ Directory(prefix + dirName).deepFiles.foreach(source => recCopyFiles(source, destDir / source.name))
+ destDir.jfile
+ }
+
+ /** Write either to console (JUnit) or log file (partest). */
+ private def log(msg: String) = logFile.map(_.appendAll(msg + "\n")).getOrElse(println(msg))
+}
+
+object CompilerTest extends App {
+
+ /** Deletes generated partest sources from a previous run, recreates
+ * directory and returns the freshly created log file. */
+ lazy val init: SFile = {
+ scala.reflect.io.Directory(DPConfig.testRoot).deleteRecursively
+ new JFile(DPConfig.testRoot).mkdirs
+ val log = DPConfig.genLog.createFile(true)
+ println(s"CompilerTest is generating tests for partest, log: $log")
+ log
+ }
+
+// val dotcDir = "/Users/odersky/workspace/dotty/src/dotty/"
+
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "CompilationUnit")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Compiler")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Driver")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Main")
+// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Run")
+
+// new CompilerTest().compileDir(dotcDir + "tools/dotc")
+ // new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Run")
+}
diff --git a/project/Build.scala b/project/Build.scala
index ae7672248..baebaedf5 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -131,7 +131,27 @@ object Build {
settings(
triggeredMessage in ThisBuild := Watched.clearWhenTriggered,
- addCommandAlias("run", "dotty-compiler/run")
+ addCommandAlias("run", "dotty-compiler/run") ++
+ addCommandAlias(
+ "partest",
+ ";publishLocal" + // Non-bootstrapped dotty needs to be published first
+ ";dotty-compiler-bootstrapped/lockPartestFile" +
+ ";dotty-compiler-bootstrapped/test:test" +
+ ";dotty-compiler-bootstrapped/runPartestRunner"
+ ) ++
+ addCommandAlias(
+ "partest-only",
+ ";publishLocal" + // Non-bootstrapped dotty needs to be published first
+ ";dotty-compiler-bootstrapped/lockPartestFile" +
+ ";dotty-compiler-bootstrapped/test:test-only dotc.tests" +
+ ";dotty-compiler-bootstrapped/runPartestRunner"
+ ) ++
+ addCommandAlias(
+ "partest-only-no-bootstrap",
+ ";dotty-compiler/lockPartestFile" +
+ ";dotty-compiler/test:test-only dotc.tests" +
+ ";dotty-compiler/runPartestRunner"
+ )
).
settings(publishing)
@@ -264,6 +284,44 @@ object Build {
"org.scala-lang" % "scala-reflect" % scalacVersion,
"org.scala-lang" % "scala-library" % scalacVersion % "test"),
+ // start partest specific settings:
+ libraryDependencies += "org.scala-lang.modules" %% "scala-partest" % "1.0.11" % "test",
+ testOptions in Test += Tests.Cleanup({ () => partestLockFile.delete }),
+ // this option is needed so that partest doesn't run
+ testOptions += Tests.Argument(TestFrameworks.JUnit, "--exclude-categories=dotty.tools.dotc.ParallelTesting"),
+ partestDeps := Seq(
+ scalaCompiler,
+ "org.scala-lang" % "scala-reflect" % scalacVersion,
+ "org.scala-lang" % "scala-library" % scalacVersion % "test"
+ ),
+ lockPartestFile := {
+ // When this file is present, running `test` generates the files for
+ // partest. Otherwise it just executes the tests directly.
+ val lockDir = partestLockFile.getParentFile
+ lockDir.mkdirs
+ // Cannot have concurrent partests as they write to the same directory.
+ if (lockDir.list.size > 0)
+ throw new RuntimeException("ERROR: sbt partest: another partest is already running, pid in lock file: " + lockDir.list.toList.mkString(" "))
+ partestLockFile.createNewFile
+ partestLockFile.deleteOnExit
+ },
+ runPartestRunner := Def.inputTaskDyn {
+ // Magic! This is both an input task and a dynamic task. Apparently
+ // command line arguments get passed to the last task in an aliased
+ // sequence (see partest alias below), so this works.
+ val args = Def.spaceDelimited("<arg>").parsed
+ val jars = List(
+ (packageBin in Compile).value.getAbsolutePath,
+ packageAll.value("dotty-library"),
+ packageAll.value("dotty-interfaces")
+ ) ++ getJarPaths(partestDeps.value, ivyPaths.value.ivyHome)
+ val dottyJars =
+ s"""-dottyJars ${jars.length + 2} dotty.jar dotty-lib.jar ${jars.mkString(" ")}"""
+ // Provide the jars required on the classpath of run tests
+ runTask(Test, "dotty.partest.DPConsoleRunner", dottyJars + " " + args.mkString(" "))
+ }.evaluated,
+ // end partest specific settings
+
// enable improved incremental compilation algorithm
incOptions := incOptions.value.withNameHashing(true),
@@ -403,10 +461,35 @@ object Build {
"-Ddotty.tests.classes.compiler=" + pA("dotty-compiler")
)
- jars ::: tuning ::: agentOptions ::: ci_build ::: path.toList
+ ("-DpartestParentID=" + pid) :: jars ::: tuning ::: agentOptions ::: ci_build ::: path.toList
}
)
+ // Partest tasks
+ lazy val partestDeps =
+ SettingKey[Seq[ModuleID]]("partestDeps", "Finds jars for partest dependencies")
+ lazy val runPartestRunner =
+ InputKey[Unit]("runPartestRunner", "Runs partest")
+ lazy val lockPartestFile =
+ TaskKey[Unit]("lockPartestFile", "Creates the lock file at ./tests/locks/partest-<pid>.lock")
+ lazy val partestLockFile =
+ new File("." + File.separator + "tests" + File.separator + "locks" + File.separator + s"partest-$pid.lock")
+
+ def pid = java.lang.Long.parseLong(java.lang.management.ManagementFactory.getRuntimeMXBean().getName().split("@")(0))
+
+ def getJarPaths(modules: Seq[ModuleID], ivyHome: Option[File]): Seq[String] = ivyHome match {
+ case Some(home) =>
+ modules.map({ module =>
+ val file = Path(home) / Path("cache") /
+ Path(module.organization) / Path(module.name) / Path("jars") /
+ Path(module.name + "-" + module.revision + ".jar")
+ if (!file.isFile) throw new RuntimeException("ERROR: sbt getJarPaths: dependency jar not found: " + file)
+ else file.jfile.getAbsolutePath
+ })
+ case None => throw new RuntimeException("ERROR: sbt getJarPaths: ivyHome not defined")
+ }
+ // end partest tasks
+
lazy val `dotty-compiler` = project.in(file("compiler")).
dependsOn(`dotty-interfaces`).
dependsOn(`dotty-library`).