aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.drone.yml16
-rw-r--r--.drone.yml.sig2
-rw-r--r--AUTHORS.md23
-rw-r--r--compiler/foo0
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala149
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/ClassPath.scala60
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala83
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala245
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/FileUtils.scala74
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala26
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala52
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala176
-rw-r--r--compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala68
-rw-r--r--compiler/src/dotty/tools/dotc/config/JavaPlatform.scala14
-rw-r--r--compiler/src/dotty/tools/dotc/config/PathResolver.scala21
-rw-r--r--compiler/src/dotty/tools/dotc/config/ScalaSettings.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/core/Definitions.scala4
-rw-r--r--compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala52
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala36
-rw-r--r--compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/transform/LambdaLift.scala12
-rw-r--r--compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Applications.scala7
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Namer.scala2
-rw-r--r--compiler/src/dotty/tools/dotc/typer/Typer.scala50
-rw-r--r--compiler/src/dotty/tools/io/ClassPath.scala425
-rw-r--r--compiler/src/dotty/tools/io/PlainFile.scala170
-rw-r--r--compiler/src/dotty/tools/io/package.scala2
-rw-r--r--compiler/test/dotc/comptest.scala7
-rw-r--r--compiler/test/dotc/tests.scala18
-rw-r--r--compiler/test/dotty/Jars.scala33
-rw-r--r--compiler/test/dotty/Properties.scala49
-rw-r--r--compiler/test/dotty/partest/DPConfig.scala40
-rw-r--r--compiler/test/dotty/partest/DPConsoleRunner.scala411
-rw-r--r--compiler/test/dotty/partest/DPDirectCompiler.scala36
-rw-r--r--compiler/test/dotty/tools/dotc/CompilationTests.scala86
-rw-r--r--compiler/test/dotty/tools/dotc/CompilerTest.scala231
-rw-r--r--compiler/test/dotty/tools/dotc/ParallelSummaryReport.java73
-rw-r--r--compiler/test/dotty/tools/dotc/reporting/TestReporter.scala69
-rw-r--r--compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala3
-rw-r--r--compiler/test/dotty/tools/vulpix/ChildJVMMain.java34
-rw-r--r--compiler/test/dotty/tools/vulpix/ParallelTesting.scala (renamed from compiler/test/dotty/tools/dotc/ParallelTesting.scala)374
-rw-r--r--compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala196
-rw-r--r--compiler/test/dotty/tools/vulpix/Status.scala7
-rw-r--r--compiler/test/dotty/tools/vulpix/SummaryReport.scala145
-rw-r--r--compiler/test/dotty/tools/vulpix/TestConfiguration.scala67
-rw-r--r--compiler/test/dotty/tools/vulpix/VulpixTests.scala (renamed from compiler/test/dotty/tools/dotc/ParallelTestTests.scala)28
-rw-r--r--docs/docs/contributing/testing.md7
-rw-r--r--docs/docs/contributing/workflow.md15
-rw-r--r--project/Build.scala105
-rw-r--r--sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala2
-rw-r--r--tests/disabled/run/t5293.scala (renamed from tests/run/t5293.scala)0
-rw-r--r--tests/disabled/scalac-dependent/WeakHashSetTest.scala (renamed from tests/run/WeakHashSetTest.scala)0
-rw-r--r--tests/disabled/scalac-dependent/shortClass.scala (renamed from tests/run/shortClass.scala)0
-rw-r--r--tests/disabled/scalac-dependent/showraw_nosymbol.scala (renamed from tests/run/showraw_nosymbol.scala)0
-rw-r--r--tests/disabled/scalac-dependent/sm-interpolator.scala (renamed from tests/run/sm-interpolator.scala)0
-rw-r--r--tests/disabled/scalac-dependent/structural.scala (renamed from tests/run/structural.scala)0
-rw-r--r--tests/disabled/scalac-dependent/t6732.scala (renamed from tests/run/t6732.scala)0
-rw-r--r--tests/partest-test/deadlock.scala44
-rw-r--r--tests/partest-test/infinite.scala9
-rw-r--r--tests/partest-test/infiniteAlloc.scala9
-rw-r--r--tests/partest-test/infiniteTail.scala7
-rw-r--r--tests/pos/i2212.scala19
-rw-r--r--tests/pos/i2218.scala9
-rw-r--r--tests/run/1938-2.scala37
-rw-r--r--tests/run/i2163.scala21
-rw-r--r--tests/run/t429.scala1
-rw-r--r--tests/run/t5857.scala5
-rw-r--r--tests/run/t9915/C_1.java20
-rw-r--r--tests/run/t9915/Test_2.scala12
70 files changed, 2409 insertions, 1595 deletions
diff --git a/.drone.yml b/.drone.yml
index 75f7d89de..22ce99c3a 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -1,3 +1,9 @@
+# After updating this file, you need to re-sign it:
+#
+# - Install [drone-cli](http://readme.drone.io/usage/getting-started-cli/)
+# - Copy your token from http://dotty-ci.epfl.ch/account (Click SHOW TOKEN)
+# - (export DRONE_TOKEN=your-token; export DRONE_SERVER=http://dotty-ci.epfl.ch; drone sign lampepfl/dotty)
+
pipeline:
test:
image: lampepfl/dotty:latest
@@ -34,9 +40,7 @@ pipeline:
matrix:
TEST:
- - ;set testOptions in LocalProject("dotty-compiler") += Tests.Argument(TestFrameworks.JUnit, "--exclude-categories=dotty.tools.dotc.ParallelTesting") ;test ;dotty-bin-tests/test
- - ;set testOptions in LocalProject("dotty-compiler-bootstrapped") += Tests.Argument(TestFrameworks.JUnit, "--exclude-categories=dotty.tools.dotc.ParallelTesting") ;publishLocal ;dotty-bootstrapped/test
- - ;set testOptions in LocalProject("dotty-compiler") += Tests.Argument(TestFrameworks.JUnit, "--exclude-categories=dotty.tools.dotc.ParallelTesting") ;partest-only-no-bootstrap --show-diff --verbose
- - ;set testOptions in LocalProject("dotty-compiler-bootstrapped") += Tests.Argument(TestFrameworks.JUnit, "--exclude-categories=dotty.tools.dotc.ParallelTesting") ;partest-only --show-diff --verbose
- - ;dotty-compiler/testOnly dotty.tools.dotc.CompilationTests
- - ;publishLocal ;dotty-bootstrapped/testOnly dotty.tools.dotc.CompilationTests
+ - dotty-bin-tests/test
+ - legacyTests
+ - test
+ - ;publishLocal ;dotty-bootstrapped/test
diff --git a/.drone.yml.sig b/.drone.yml.sig
index 7f5049f14..e071da484 100644
--- a/.drone.yml.sig
+++ b/.drone.yml.sig
@@ -1 +1 @@
-eyJhbGciOiJIUzI1NiJ9.cGlwZWxpbmU6CiAgdGVzdDoKICAgIGltYWdlOiBsYW1wZXBmbC9kb3R0eTpsYXRlc3QKICAgIHB1bGw6IHRydWUKICAgIGNvbW1hbmRzOgogICAgICAtIGxuIC1zIC92YXIvY2FjaGUvZHJvbmUvc2NhbGEtc2NhbGEgc2NhbGEtc2NhbGEKICAgICAgLSBsbiAtcyAvdmFyL2NhY2hlL2Ryb25lL2l2eTIgIiRIT01FLy5pdnkyIgogICAgICAtIC4vc2NyaXB0cy91cGRhdGUtc2NhbGEtbGlicmFyeQogICAgICAtIHNidCAtSi1YbXg0MDk2bSAtSi1YWDpSZXNlcnZlZENvZGVDYWNoZVNpemU9NTEybSAtSi1YWDpNYXhNZXRhc3BhY2VTaXplPTEwMjRtIC1EZG90dHkuZHJvbmUubWVtPTQwOTZtICIke1RFU1R9IgogICAgd2hlbjoKICAgICAgYnJhbmNoOgogICAgICAgIGV4Y2x1ZGU6IGdoLXBhZ2VzCgogIGRvY3VtZW50YXRpb246CiAgICBpbWFnZTogbGFtcGVwZmwvZG90dHk6bGF0ZXN0CiAgICBwdWxsOiB0cnVlCiAgICBjb21tYW5kczoKICAgICAgLSAuL3Byb2plY3Qvc2NyaXB0cy9nZW5Eb2NzICIke1RFU1R9IiAkQk9UX1BBU1MKICAgIHdoZW46CiAgICAgIGJyYW5jaDogbWFzdGVyCgogIGdpdHRlcjoKICAgIGltYWdlOiBwbHVnaW5zL2dpdHRlcgogICAgd2hlbjoKICAgICAgYnJhbmNoOiBtYXN0ZXIKICAgICAgc3RhdHVzOiBjaGFuZ2VkCgogIHNsYWNrOgogICAgaW1hZ2U6IHBsdWdpbnMvc2xhY2sKICAgIGNoYW5uZWw6IGRvdHR5CiAgICB3aGVuOgogICAgICBicmFuY2g6IG1hc3RlcgogICAgICBzdGF0dXM6IGNoYW5nZWQKCm1hdHJpeDoKICBURVNUOgogICAgLSA7c2V0IHRlc3RPcHRpb25zIGluIExvY2FsUHJvamVjdCgiZG90dHktY29tcGlsZXIiKSArPSBUZXN0cy5Bcmd1bWVudChUZXN0RnJhbWV3b3Jrcy5KVW5pdCwgIi0tZXhjbHVkZS1jYXRlZ29yaWVzPWRvdHR5LnRvb2xzLmRvdGMuUGFyYWxsZWxUZXN0aW5nIikgO3Rlc3QgO2RvdHR5LWJpbi10ZXN0cy90ZXN0CiAgICAtIDtzZXQgdGVzdE9wdGlvbnMgaW4gTG9jYWxQcm9qZWN0KCJkb3R0eS1jb21waWxlci1ib290c3RyYXBwZWQiKSArPSBUZXN0cy5Bcmd1bWVudChUZXN0RnJhbWV3b3Jrcy5KVW5pdCwgIi0tZXhjbHVkZS1jYXRlZ29yaWVzPWRvdHR5LnRvb2xzLmRvdGMuUGFyYWxsZWxUZXN0aW5nIikgO3B1Ymxpc2hMb2NhbCA7ZG90dHktYm9vdHN0cmFwcGVkL3Rlc3QKICAgIC0gO3NldCB0ZXN0T3B0aW9ucyBpbiBMb2NhbFByb2plY3QoImRvdHR5LWNvbXBpbGVyIikgKz0gVGVzdHMuQXJndW1lbnQoVGVzdEZyYW1ld29ya3MuSlVuaXQsICItLWV4Y2x1ZGUtY2F0ZWdvcmllcz1kb3R0eS50b29scy5kb3RjLlBhcmFsbGVsVGVzdGluZyIpIDtwYXJ0ZXN0LW9ubHktbm8tYm9vdHN0cmFwIC0tc2hvdy1kaWZmIC0tdmVyYm9zZQogICAgLSA7c2V0IHRlc3RPcHRpb25zIGluIExvY2FsUHJvamVjdCgiZG90dHktY29tcGlsZXItYm9vdHN0cmFwcGVkIikgKz0gVGVzdHMuQXJndW1lbnQoVGVzdEZyYW1ld29ya3MuSlVuaXQsICItLWV4Y2x1ZGUtY2F0ZWdvcmllcz1kb3R0eS50b29scy5kb3RjLlBhcmFsbGVsVGVzdGluZyIpIDtwYXJ0ZXN0LW9ubHkgLS1zaG93LWRpZmYgLS12ZXJib3NlCiAgICAtIDtkb3R0eS1jb21waWxlci90ZXN0T25seSBkb3R0eS50b29scy5kb3RjLkNvbXBpbGF0aW9uVGVzdHMKICAgIC0gO3B1Ymxpc2hMb2NhbCA7ZG90dHktYm9vdHN0cmFwcGVkL3Rlc3RPbmx5IGRvdHR5LnRvb2xzLmRvdGMuQ29tcGlsYXRpb25UZXN0cwo.b9x4iSh27OqWMUT8eR6uiK0OH_eERpnKIaMglF8hKYA \ No newline at end of file
+eyJhbGciOiJIUzI1NiJ9.IyBBZnRlciB1cGRhdGluZyB0aGlzIGZpbGUsIHlvdSBuZWVkIHRvIHJlLXNpZ24gaXQ6CiMKIyAtIEluc3RhbGwgW2Ryb25lLWNsaV0oaHR0cDovL3JlYWRtZS5kcm9uZS5pby91c2FnZS9nZXR0aW5nLXN0YXJ0ZWQtY2xpLykKIyAtIENvcHkgeW91ciB0b2tlbiBmcm9tICBodHRwOi8vZG90dHktY2kuZXBmbC5jaC9hY2NvdW50IChDbGljayBTSE9XIFRPS0VOKQojIC0gKGV4cG9ydCBEUk9ORV9UT0tFTj15b3VyLXRva2VuOyBleHBvcnQgRFJPTkVfU0VSVkVSPWh0dHA6Ly9kb3R0eS1jaS5lcGZsLmNoOyBkcm9uZSBzaWduIGxhbXBlcGZsL2RvdHR5KQoKcGlwZWxpbmU6CiAgdGVzdDoKICAgIGltYWdlOiBsYW1wZXBmbC9kb3R0eTpsYXRlc3QKICAgIHB1bGw6IHRydWUKICAgIGNvbW1hbmRzOgogICAgICAtIGxuIC1zIC92YXIvY2FjaGUvZHJvbmUvc2NhbGEtc2NhbGEgc2NhbGEtc2NhbGEKICAgICAgLSBsbiAtcyAvdmFyL2NhY2hlL2Ryb25lL2l2eTIgIiRIT01FLy5pdnkyIgogICAgICAtIC4vc2NyaXB0cy91cGRhdGUtc2NhbGEtbGlicmFyeQogICAgICAtIHNidCAtSi1YbXg0MDk2bSAtSi1YWDpSZXNlcnZlZENvZGVDYWNoZVNpemU9NTEybSAtSi1YWDpNYXhNZXRhc3BhY2VTaXplPTEwMjRtIC1EZG90dHkuZHJvbmUubWVtPTQwOTZtICIke1RFU1R9IgogICAgd2hlbjoKICAgICAgYnJhbmNoOgogICAgICAgIGV4Y2x1ZGU6IGdoLXBhZ2VzCgogIGRvY3VtZW50YXRpb246CiAgICBpbWFnZTogbGFtcGVwZmwvZG90dHk6bGF0ZXN0CiAgICBwdWxsOiB0cnVlCiAgICBjb21tYW5kczoKICAgICAgLSAuL3Byb2plY3Qvc2NyaXB0cy9nZW5Eb2NzICIke1RFU1R9IiAkQk9UX1BBU1MKICAgIHdoZW46CiAgICAgIGJyYW5jaDogbWFzdGVyCgogIGdpdHRlcjoKICAgIGltYWdlOiBwbHVnaW5zL2dpdHRlcgogICAgd2hlbjoKICAgICAgYnJhbmNoOiBtYXN0ZXIKICAgICAgc3RhdHVzOiBjaGFuZ2VkCgogIHNsYWNrOgogICAgaW1hZ2U6IHBsdWdpbnMvc2xhY2sKICAgIGNoYW5uZWw6IGRvdHR5CiAgICB3aGVuOgogICAgICBicmFuY2g6IG1hc3RlcgogICAgICBzdGF0dXM6IGNoYW5nZWQKCm1hdHJpeDoKICBURVNUOgogICAgLSBkb3R0eS1iaW4tdGVzdHMvdGVzdAogICAgLSBsZWdhY3lUZXN0cwogICAgLSB0ZXN0CiAgICAtIDtwdWJsaXNoTG9jYWwgO2RvdHR5LWJvb3RzdHJhcHBlZC90ZXN0Cg.bGW0VXWjWrro4w7rn_6Aq2veQaxXr7x3KJJCaF3X8V8 \ No newline at end of file
diff --git a/AUTHORS.md b/AUTHORS.md
index db2f34a56..e197a6c5c 100644
--- a/AUTHORS.md
+++ b/AUTHORS.md
@@ -1,23 +1,28 @@
-The dotty compiler frontend has been developed since November 2012 by Martin Odersky. It is expected and hoped for
+The dotty compiler frontend has been developed since November 2012 by Martin Odersky. It is expected and hoped for
that the list of contributors to the codebase will grow quickly. Dotty draws inspiration and code from the original
-Scala compiler "nsc", which is developed at [scala/scala](https://github.com/scala/scala).
+Scala compiler "nsc", which is developed at [scala/scala](https://github.com/scala/scala).
The majority of the dotty codebase is new code, with the exception of the components mentioned below. We have for each component tried to come up with a list of the original authors in the [scala/scala](https://github.com/scala/scala) codebase. Apologies if some major authors were omitted by oversight.
`dotty.tools.dotc.ast`
-> The syntax tree handling is mostly new, but some elements, such as the idea of tree copiers and the `TreeInfo` module,
-> were adopted from [scala/scala](https://github.com/scala/scala).
+> The syntax tree handling is mostly new, but some elements, such as the idea of tree copiers and the `TreeInfo` module,
+> were adopted from [scala/scala](https://github.com/scala/scala).
> The original authors of these parts include Martin Odersky, Paul Phillips, Adriaan Moors, and Matthias Zenger.
+`dotty.tools.dotc.classpath`
+
+> The classpath handling is taken mostly as is from [scala/scala](https://github.com/scala/scala).
+> The original authors were Grzegorz Kossakowski, MichaΕ‚ Pociecha, Lukas Rytz, Jason Zaugg and others.
+
`dotty.tools.dotc.config`
-> The configuration components were adapted and extended from [scala/scala](https://github.com/scala/scala).
+> The configuration components were adapted and extended from [scala/scala](https://github.com/scala/scala).
> The original sources were authored by Paul Phillips with contributions from Martin Odersky, Miguel Garcia and others.
-
+
`dotty.tools.dotc.core`
-> The core data structures and operations are mostly new. Some parts (e.g. those dealing with names) were adapted from [scala/scala](https://github.com/scala/scala).
+> The core data structures and operations are mostly new. Some parts (e.g. those dealing with names) were adapted from [scala/scala](https://github.com/scala/scala).
> These were originally authored by Martin Odersky, Adriaan Moors, Jason Zaugg, Paul Phillips, Eugene Burmako and others.
`dotty.tools.dotc.core.pickling`
@@ -41,7 +46,7 @@ The majority of the dotty codebase is new code, with the exception of the compon
> The utilities package is a mix of new and adapted components. The files in [scala/scala](https://github.com/scala/scala) were originally authored by many people,
> including Paul Phillips, Martin Odersky, Sean McDirmid, and others.
-
+
`dotty.tools.io`
> The I/O support library was adapted from current Scala compiler. Original authors were Paul Phillips and others.
@@ -53,7 +58,7 @@ The majority of the dotty codebase is new code, with the exception of the compon
> the needs of dotty. Original authors include: Adrian Moors, Lukas Rytz,
> Grzegorz Kossakowski, Paul Phillips
-`dotty.tools.dotc.sbt and everything in bridge/`
+`dotty.tools.dotc.sbt and everything in sbt-bridge/`
> The sbt compiler phases are based on
> https://github.com/adriaanm/scala/tree/sbt-api-consolidate/src/compiler/scala/tools/sbt
diff --git a/compiler/foo b/compiler/foo
deleted file mode 100644
index e69de29bb..000000000
--- a/compiler/foo
+++ /dev/null
diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala
new file mode 100644
index 000000000..ec3e8fdf4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import java.net.URL
+import scala.annotation.tailrec
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.FatalError
+import scala.reflect.io.AbstractFile
+import dotty.tools.io.ClassPath
+import dotty.tools.io.ClassRepresentation
+
+/**
+ * A classpath unifying multiple class- and sourcepath entries.
+ * The Classpath can obtain entries for classes and sources independently
+ * so it tries to do operations quite optimally - iterating only these collections
+ * which are needed in the given moment and only as far as it's necessary.
+ *
+ * @param aggregates classpath instances containing entries which this class processes
+ */
+case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath {
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ @tailrec
+ def find(aggregates: Seq[ClassPath]): Option[AbstractFile] =
+ if (aggregates.nonEmpty) {
+ val classFile = aggregates.head.findClassFile(className)
+ if (classFile.isDefined) classFile
+ else find(aggregates.tail)
+ } else None
+
+ find(aggregates)
+ }
+
+ override def findClass(className: String): Option[ClassRepresentation] = {
+ @tailrec
+ def findEntry(aggregates: Seq[ClassPath], isSource: Boolean): Option[ClassRepresentation] =
+ if (aggregates.nonEmpty) {
+ val entry = aggregates.head.findClass(className) match {
+ case s @ Some(_: SourceFileEntry) if isSource => s
+ case s @ Some(_: ClassFileEntry) if !isSource => s
+ case _ => None
+ }
+ if (entry.isDefined) entry
+ else findEntry(aggregates.tail, isSource)
+ } else None
+
+ val classEntry = findEntry(aggregates, isSource = false)
+ val sourceEntry = findEntry(aggregates, isSource = true)
+
+ (classEntry, sourceEntry) match {
+ case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file))
+ case (c @ Some(_), _) => c
+ case (_, s) => s
+ }
+ }
+
+ override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs)
+
+ override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct
+
+ override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*)
+
+ override private[dotty] def packages(inPackage: String): Seq[PackageEntry] = {
+ val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct
+ aggregatedPackages
+ }
+
+ override private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] =
+ getDistinctEntries(_.classes(inPackage))
+
+ override private[dotty] def sources(inPackage: String): Seq[SourceFileEntry] =
+ getDistinctEntries(_.sources(inPackage))
+
+ override private[dotty] def list(inPackage: String): ClassPathEntries = {
+ val (packages, classesAndSources) = aggregates.map { cp =>
+ try {
+ cp.list(inPackage)
+ } catch {
+ case ex: java.io.IOException =>
+ val e = new FatalError(ex.getMessage)
+ e.initCause(ex)
+ throw e
+ }
+ }.unzip
+ val distinctPackages = packages.flatten.distinct
+ val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*)
+ ClassPathEntries(distinctPackages, distinctClassesAndSources)
+ }
+
+ /**
+ * Returns only one entry for each name. If there's both a source and a class entry, it
+ * creates an entry containing both of them. If there would be more than one class or source
+ * entries for the same class it always would use the first entry of each type found on a classpath.
+ */
+ private def mergeClassesAndSources(entries: Seq[ClassRepresentation]*): Seq[ClassRepresentation] = {
+ // based on the implementation from MergedClassPath
+ var count = 0
+ val indices = collection.mutable.HashMap[String, Int]()
+ val mergedEntries = new ArrayBuffer[ClassRepresentation](1024)
+
+ for {
+ partOfEntries <- entries
+ entry <- partOfEntries
+ } {
+ val name = entry.name
+ if (indices contains name) {
+ val index = indices(name)
+ val existing = mergedEntries(index)
+
+ if (existing.binary.isEmpty && entry.binary.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get)
+ if (existing.source.isEmpty && entry.source.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get)
+ }
+ else {
+ indices(name) = count
+ mergedEntries += entry
+ count += 1
+ }
+ }
+ mergedEntries.toIndexedSeq
+ }
+
+ private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = {
+ val seenNames = collection.mutable.HashSet[String]()
+ val entriesBuffer = new ArrayBuffer[EntryType](1024)
+ for {
+ cp <- aggregates
+ entry <- getEntries(cp) if !seenNames.contains(entry.name)
+ } {
+ entriesBuffer += entry
+ seenNames += entry.name
+ }
+ entriesBuffer.toIndexedSeq
+ }
+}
+
+object AggregateClassPath {
+ def createAggregate(parts: ClassPath*): ClassPath = {
+ val elems = new ArrayBuffer[ClassPath]()
+ parts foreach {
+ case AggregateClassPath(ps) => elems ++= ps
+ case p => elems += p
+ }
+ if (elems.size == 1) elems.head
+ else AggregateClassPath(elems.toIndexedSeq)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala
new file mode 100644
index 000000000..129c6b9fe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import scala.reflect.io.AbstractFile
+import dotty.tools.io.ClassRepresentation
+
+case class ClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepresentation])
+
+object ClassPathEntries {
+ import scala.language.implicitConversions
+ // to have working unzip method
+ implicit def entry2Tuple(entry: ClassPathEntries): (Seq[PackageEntry], Seq[ClassRepresentation]) = (entry.packages, entry.classesAndSources)
+}
+
+trait ClassFileEntry extends ClassRepresentation {
+ def file: AbstractFile
+}
+
+trait SourceFileEntry extends ClassRepresentation {
+ def file: AbstractFile
+}
+
+trait PackageEntry {
+ def name: String
+}
+
+private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
+ override def name = FileUtils.stripClassExtension(file.name) // class name
+
+ override def binary: Option[AbstractFile] = Some(file)
+ override def source: Option[AbstractFile] = None
+}
+
+private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
+ override def name = FileUtils.stripSourceExtension(file.name)
+
+ override def binary: Option[AbstractFile] = None
+ override def source: Option[AbstractFile] = Some(file)
+}
+
+private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation {
+ override def name = FileUtils.stripClassExtension(classFile.name)
+
+ override def binary: Option[AbstractFile] = Some(classFile)
+ override def source: Option[AbstractFile] = Some(srcFile)
+}
+
+private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry
+
+private[dotty] trait NoSourcePaths {
+ def asSourcePathString: String = ""
+ private[dotty] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
+}
+
+private[dotty] trait NoClassPaths {
+ def findClassFile(className: String): Option[AbstractFile] = None
+ private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala
new file mode 100644
index 000000000..ac8fc633f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import scala.reflect.io.{AbstractFile, VirtualDirectory}
+import scala.reflect.io.Path.string2path
+import dotty.tools.dotc.config.Settings
+import FileUtils.AbstractFileOps
+import dotty.tools.io.ClassPath
+import dotty.tools.dotc.core.Contexts.Context
+
+/**
+ * Provides factory methods for classpath. When creating classpath instances for a given path,
+ * it uses proper type of classpath depending on a types of particular files containing sources or classes.
+ */
+class ClassPathFactory {
+ /**
+ * Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile)(implicit ctx: Context): ClassPath = ClassPathFactory.newClassPath(file)
+
+ /**
+ * Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String)(implicit ctx: Context): List[ClassPath] =
+ for {
+ file <- expandPath(path, expandStar = false)
+ dir <- Option(AbstractFile getDirectory file)
+ } yield createSourcePath(dir)
+
+
+ def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar)
+
+ def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir)
+
+ def contentsOfDirsInPath(path: String)(implicit ctx: Context): List[ClassPath] =
+ for {
+ dir <- expandPath(path, expandStar = false)
+ name <- expandDir(dir)
+ entry <- Option(AbstractFile.getDirectory(name))
+ } yield newClassPath(entry)
+
+ def classesInExpandedPath(path: String)(implicit ctx: Context): IndexedSeq[ClassPath] =
+ classesInPathImpl(path, expand = true).toIndexedSeq
+
+ def classesInPath(path: String)(implicit ctx: Context) = classesInPathImpl(path, expand = false)
+
+ def classesInManifest(useManifestClassPath: Boolean)(implicit ctx: Context) =
+ if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
+ else Nil
+
+ // Internal
+ protected def classesInPathImpl(path: String, expand: Boolean)(implicit ctx: Context) =
+ for {
+ file <- expandPath(path, expand)
+ dir <- {
+ def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None
+ Option(AbstractFile.getDirectory(file)).orElse(asImage)
+ }
+ } yield newClassPath(dir)
+
+ private def createSourcePath(file: AbstractFile)(implicit ctx: Context): ClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarSourcePathFactory.create(file)
+ else if (file.isDirectory)
+ new DirectorySourcePath(file.file)
+ else
+ sys.error(s"Unsupported sourcepath element: $file")
+}
+
+object ClassPathFactory {
+ def newClassPath(file: AbstractFile)(implicit ctx: Context): ClassPath = file match {
+ case vd: VirtualDirectory => VirtualDirectoryClassPath(vd)
+ case _ =>
+ if (file.isJarOrZip)
+ ZipAndJarClassPathFactory.create(file)
+ else if (file.isDirectory)
+ new DirectoryClassPath(file.file)
+ else
+ sys.error(s"Unsupported classpath element: $file")
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala
new file mode 100644
index 000000000..1ed233ed7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala
@@ -0,0 +1,245 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import java.io.File
+import java.net.{URI, URL}
+import java.nio.file.{FileSystems, Files, SimpleFileVisitor}
+import java.util.function.IntFunction
+import java.util
+import java.util.Comparator
+
+import scala.reflect.io.{AbstractFile, PlainFile}
+import dotty.tools.io.{ClassPath, ClassRepresentation, PlainNioFile}
+import FileUtils._
+import scala.collection.JavaConverters._
+
+/**
+ * A trait allowing to look for classpath entries in directories. It provides common logic for
+ * classes handling class and source files.
+ * It makes use of the fact that in the case of nested directories it's easy to find a file
+ * when we have a name of a package.
+ * It abstracts over the file representation to work with both JFile and AbstractFile.
+ */
+trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
+ type F
+
+ val dir: F
+
+ protected def emptyFiles: Array[F] // avoids reifying ClassTag[F]
+ protected def getSubDir(dirName: String): Option[F]
+ protected def listChildren(dir: F, filter: Option[F => Boolean] = None): Array[F]
+ protected def getName(f: F): String
+ protected def toAbstractFile(f: F): AbstractFile
+ protected def isPackage(f: F): Boolean
+
+ protected def createFileEntry(file: AbstractFile): FileEntryType
+ protected def isMatchingFile(f: F): Boolean
+
+ private def getDirectory(forPackage: String): Option[F] = {
+ if (forPackage == ClassPath.RootPackage) {
+ Some(dir)
+ } else {
+ val packageDirName = FileUtils.dirPath(forPackage)
+ getSubDir(packageDirName)
+ }
+ }
+
+ private[dotty] def packages(inPackage: String): Seq[PackageEntry] = {
+ val dirForPackage = getDirectory(inPackage)
+ val nestedDirs: Array[F] = dirForPackage match {
+ case None => emptyFiles
+ case Some(directory) => listChildren(directory, Some(isPackage))
+ }
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ nestedDirs.map(f => PackageEntryImpl(prefix + getName(f)))
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[F] = dirForPackage match {
+ case None => emptyFiles
+ case Some(directory) => listChildren(directory, Some(isMatchingFile))
+ }
+ files.map(f => createFileEntry(toAbstractFile(f)))
+ }
+
+ private[dotty] def list(inPackage: String): ClassPathEntries = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[F] = dirForPackage match {
+ case None => emptyFiles
+ case Some(directory) => listChildren(directory)
+ }
+ val packagePrefix = PackageNameUtils.packagePrefix(inPackage)
+ val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ for (file <- files) {
+ if (isPackage(file))
+ packageBuf += PackageEntryImpl(packagePrefix + getName(file))
+ else if (isMatchingFile(file))
+ fileBuf += createFileEntry(toAbstractFile(file))
+ }
+ ClassPathEntries(packageBuf, fileBuf)
+ }
+}
+
+trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] {
+ type F = File
+
+ protected def emptyFiles: Array[File] = Array.empty
+ protected def getSubDir(packageDirName: String): Option[File] = {
+ val packageDir = new File(dir, packageDirName)
+ if (packageDir.exists && packageDir.isDirectory) Some(packageDir)
+ else None
+ }
+ protected def listChildren(dir: File, filter: Option[File => Boolean]): Array[File] = {
+ val listing = filter match {
+ case Some(f) => dir.listFiles(mkFileFilter(f))
+ case None => dir.listFiles()
+ }
+
+ if (listing != null) {
+ // Sort by file name for stable order of directory .class entries in package scope.
+ // This gives stable results ordering of base type sequences for unrelated classes
+ // with the same base type depth.
+ //
+ // Notably, this will stably infer`Product with Serializable`
+ // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order.
+ // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified.
+ //
+ // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only
+ // intended to improve determinism of the compiler for compiler hackers.
+ java.util.Arrays.sort(listing,
+ new java.util.Comparator[File] {
+ def compare(o1: File, o2: File) = o1.getName.compareTo(o2.getName)
+ })
+ listing
+ } else Array()
+ }
+ protected def getName(f: File): String = f.getName
+ protected def toAbstractFile(f: File): AbstractFile = new PlainFile(new scala.reflect.io.File(f))
+ protected def isPackage(f: File): Boolean = f.isPackage
+
+ assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
+
+ def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
+ def asClassPathStrings: Seq[String] = Seq(dir.getPath)
+}
+
+object JrtClassPath {
+ import java.nio.file._, java.net.URI
+ def apply(): Option[ClassPath] = {
+ try {
+ val fs = FileSystems.getFileSystem(URI.create("jrt:/"))
+ Some(new JrtClassPath(fs))
+ } catch {
+ case _: ProviderNotFoundException | _: FileSystemNotFoundException =>
+ None
+ }
+ }
+}
+
+/**
+ * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220)
+ *
+ * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference
+ * for the structure of the jrt:// filesystem.
+ *
+ * The implementation assumes that no classes exist in the empty package.
+ */
+final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths {
+ import java.nio.file.Path, java.nio.file._
+ type F = Path
+ private val dir: Path = fs.getPath("/packages")
+
+ // e.g. "java.lang" -> Seq("/modules/java.base")
+ private val packageToModuleBases: Map[String, Seq[Path]] = {
+ val ps = Files.newDirectoryStream(dir).iterator().asScala
+ def lookup(pack: Path): Seq[Path] = {
+ Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList
+ }
+ ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap
+ }
+
+ override private[dotty] def packages(inPackage: String): Seq[PackageEntry] = {
+ def matches(packageDottedName: String) =
+ if (packageDottedName.contains("."))
+ packageOf(packageDottedName) == inPackage
+ else inPackage == ""
+ packageToModuleBases.keysIterator.filter(matches).map(PackageEntryImpl(_)).toVector
+ }
+ private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = {
+ if (inPackage == "") Nil
+ else {
+ packageToModuleBases.getOrElse(inPackage, Nil).flatMap(x =>
+ Files.list(x.resolve(inPackage.replace('.', '/'))).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x =>
+ ClassFileEntryImpl(new PlainNioFile(x))).toVector
+ }
+ }
+
+ override private[dotty] def list(inPackage: String): ClassPathEntries =
+ if (inPackage == "") ClassPathEntries(packages(inPackage), Nil)
+ else ClassPathEntries(packages(inPackage), classes(inPackage))
+
+ def asURLs: Seq[URL] = Seq(dir.toUri.toURL)
+ // We don't yet have a scheme to represent the JDK modules in our `-classpath`.
+ // java models them as entries in the new "module path", we'll probably need to follow this.
+ def asClassPathStrings: Seq[String] = Nil
+
+ def findClassFile(className: String): Option[AbstractFile] = {
+ if (!className.contains(".")) None
+ else {
+ val inPackage = packageOf(className)
+ packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{x =>
+ val file = x.resolve(className.replace('.', '/') + ".class")
+ if (Files.exists(file)) new PlainNioFile(file) :: Nil else Nil
+ }.take(1).toList.headOption
+ }
+ }
+ private def packageOf(dottedClassName: String): String =
+ dottedClassName.substring(0, dottedClassName.lastIndexOf("."))
+}
+
+case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+ override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
+
+ def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val classFile = new File(s"$dir/$relativePath.class")
+ if (classFile.exists) {
+ val wrappedClassFile = new scala.reflect.io.File(classFile)
+ val abstractClassFile = new PlainFile(wrappedClassFile)
+ Some(abstractClassFile)
+ } else None
+ }
+
+ protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ protected def isMatchingFile(f: File): Boolean = f.isClass
+
+ private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+}
+
+case class DirectorySourcePath(dir: File) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths {
+ def asSourcePathString: String = asClassPathString
+
+ protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ protected def isMatchingFile(f: File): Boolean = endsScalaOrJava(f.getName)
+
+ override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl
+
+ private def findSourceFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val sourceFile = Stream("scala", "java")
+ .map(ext => new File(s"$dir/$relativePath.$ext"))
+ .collectFirst { case file if file.exists() => file }
+
+ sourceFile.map { file =>
+ val wrappedSourceFile = new scala.reflect.io.File(file)
+ val abstractSourceFile = new PlainFile(wrappedSourceFile)
+ abstractSourceFile
+ }
+ }
+
+ private[dotty] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala
new file mode 100644
index 000000000..823efbb9d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import java.io.{File => JFile, FileFilter}
+import java.net.URL
+import scala.reflect.internal.FatalError
+import scala.reflect.io.AbstractFile
+
+/**
+ * Common methods related to Java files and abstract files used in the context of classpath
+ */
+object FileUtils {
+ implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name)
+
+ def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class")
+ // FIXME: drop last condition when we stop being compatible with Scala 2.11
+
+ def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java"))
+
+ // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip?
+ def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip")
+
+ /**
+ * Safe method returning a sequence containing one URL representing this file, when underlying file exists,
+ * and returning given default value in other case
+ */
+ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL)
+ }
+
+ implicit class FileOps(val file: JFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName)
+
+ def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class")
+ // FIXME: drop last condition when we stop being compatible with Scala 2.11
+ }
+
+ def stripSourceExtension(fileName: String): String = {
+ if (endsScala(fileName)) stripClassExtension(fileName)
+ else if (endsJava(fileName)) stripJavaExtension(fileName)
+ else throw new FatalError("Unexpected source file ending: " + fileName)
+ }
+
+ def dirPath(forPackage: String) = forPackage.replace('.', '/')
+
+ def endsClass(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".class"
+
+ def endsScalaOrJava(fileName: String): Boolean =
+ endsScala(fileName) || endsJava(fileName)
+
+ def endsJava(fileName: String): Boolean =
+ fileName.length > 5 && fileName.substring(fileName.length - 5) == ".java"
+
+ def endsScala(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala"
+
+ def stripClassExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length
+
+ def stripJavaExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 5)
+
+ // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed
+ // because then some tests in partest don't pass
+ def mayBeValidPackage(dirName: String): Boolean =
+ (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.')
+
+ def mkFileFilter(f: JFile => Boolean) = new FileFilter {
+ def accept(pathname: JFile): Boolean = f(pathname)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala b/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala
new file mode 100644
index 000000000..303f142b9
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import dotty.tools.io.ClassPath.RootPackage
+
+/**
+ * Common methods related to package names represented as String
+ */
+object PackageNameUtils {
+
+ /**
+ * @param fullClassName full class name with package
+ * @return (package, simple class name)
+ */
+ def separatePkgAndClassNames(fullClassName: String): (String, String) = {
+ val lastDotIndex = fullClassName.lastIndexOf('.')
+ if (lastDotIndex == -1)
+ (RootPackage, fullClassName)
+ else
+ (fullClassName.substring(0, lastDotIndex), fullClassName.substring(lastDotIndex + 1))
+ }
+
+ def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "."
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala
new file mode 100644
index 000000000..5b0855554
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala
@@ -0,0 +1,52 @@
+package dotty.tools.dotc.classpath
+
+import dotty.tools.io.ClassRepresentation
+import scala.reflect.io.{AbstractFile, Path, PlainFile, VirtualDirectory}
+import FileUtils._
+import java.net.URL
+
+import scala.reflect.internal.util.AbstractFileClassLoader
+import dotty.tools.io.ClassPath
+
+case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+ type F = AbstractFile
+
+ // From AbstractFileClassLoader
+ private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = {
+ var file: AbstractFile = base
+ for (dirPart <- pathParts.init) {
+ file = file.lookupName(dirPart, directory = true)
+ if (file == null)
+ return null
+ }
+
+ file.lookupName(pathParts.last, directory = directory)
+ }
+
+ protected def emptyFiles: Array[AbstractFile] = Array.empty
+ protected def getSubDir(packageDirName: String): Option[AbstractFile] =
+ Option(lookupPath(dir)(packageDirName.split('/'), directory = true))
+ protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match {
+ case Some(f) => dir.iterator.filter(f).toArray
+ case _ => dir.toArray
+ }
+ def getName(f: AbstractFile): String = f.name
+ def toAbstractFile(f: AbstractFile): AbstractFile = f
+ def isPackage(f: AbstractFile): Boolean = f.isPackage
+
+ // mimic the behavior of the old nsc.util.DirectoryClassPath
+ def asURLs: Seq[URL] = Seq(new URL(dir.name))
+ def asClassPathStrings: Seq[String] = Seq(dir.path)
+
+ override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
+
+ def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className) + ".class"
+ Option(lookupPath(dir)(relativePath split '/', directory = false))
+ }
+
+ private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+
+ protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala
new file mode 100644
index 000000000..5210c699e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala
@@ -0,0 +1,176 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.annotation.tailrec
+import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources}
+import dotty.tools.io.ClassPath
+import dotty.tools.dotc.config.Settings
+import dotty.tools.dotc.core.Contexts.Context
+import FileUtils._
+
+/**
+ * A trait providing an optional cache for classpath entries obtained from zip and jar files.
+ * It's possible to create such a cache assuming that entries in such files won't change (at
+ * least will be the same each time we'll load classpath during the lifetime of JVM process)
+ * - unlike class and source files in directories, which can be modified and recompiled.
+ * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE
+ * when there are a lot of projects having a lot of common dependencies.
+ */
+sealed trait ZipAndJarFileLookupFactory {
+ private val cache = collection.mutable.Map.empty[AbstractFile, ClassPath]
+
+ def create(zipFile: AbstractFile)(implicit ctx: Context): ClassPath = {
+ if (ctx.settings.YdisableFlatCpCaching.value) createForZipFile(zipFile)
+ else createUsingCache(zipFile)
+ }
+
+ protected def createForZipFile(zipFile: AbstractFile): ClassPath
+
+ private def createUsingCache(zipFile: AbstractFile)(implicit ctx: Context): ClassPath = cache.synchronized {
+ def newClassPathInstance = {
+ if (ctx.settings.verbose.value || ctx.settings.Ylogcp.value)
+ println(s"$zipFile is not yet in the classpath cache")
+ createForZipFile(zipFile)
+ }
+ cache.getOrElseUpdate(zipFile, newClassPathInstance)
+ }
+}
+
+/**
+ * Manages creation of classpath for class files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory {
+ private case class ZipArchiveClassPath(zipFile: File)
+ extends ZipArchiveFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass
+ }
+
+ /**
+ * This type of classpath is closely related to the support for JSR-223.
+ * Its usage can be observed e.g. when running:
+ * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala
+ * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
+ * Name: scala/Function2$mcFJD$sp.class
+ */
+ private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths {
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override def asClassPathStrings: Seq[String] = Seq(file.path)
+
+ override def asURLs: Seq[URL] = file.toURLs()
+
+ import ManifestResourcesClassPath.PackageFileInfo
+ import ManifestResourcesClassPath.PackageInfo
+
+ /**
+ * A cache mapping package name to abstract file for package directory and subpackages of given package.
+ *
+ * ManifestResources can iterate through the collections of entries from e.g. remote jar file.
+ * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into
+ * given package, when it's needed. On the other hand we can iterate over entries to get
+ * AbstractFiles, iterate over entries of these files etc.
+ *
+ * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time,
+ * when we need subpackages of a given package or its classes, we traverse once and cache only packages.
+ * Classes for given package can be then easily loaded when they are needed.
+ */
+ private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = {
+ val packages = collection.mutable.HashMap[String, PackageFileInfo]()
+
+ def getSubpackages(dir: AbstractFile): List[AbstractFile] =
+ (for (file <- dir if file.isPackage) yield file)(collection.breakOut)
+
+ @tailrec
+ def traverse(packagePrefix: String,
+ filesForPrefix: List[AbstractFile],
+ subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match {
+ case pkgFile :: remainingFiles =>
+ val subpackages = getSubpackages(pkgFile)
+ val fullPkgName = packagePrefix + pkgFile.name
+ packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages))
+ val newPackagePrefix = fullPkgName + "."
+ subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages))
+ traverse(packagePrefix, remainingFiles, subpackagesQueue)
+ case Nil if subpackagesQueue.nonEmpty =>
+ val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue()
+ traverse(packagePrefix, filesForPrefix, subpackagesQueue)
+ case _ =>
+ }
+
+ val subpackages = getSubpackages(file)
+ packages.put(ClassPath.RootPackage, PackageFileInfo(file, subpackages))
+ traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue())
+ packages
+ }
+
+ override private[dotty] def packages(inPackage: String): Seq[PackageEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(_, subpackages)) =>
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ subpackages.map(packageFile => PackageEntryImpl(prefix + packageFile.name))
+ }
+
+ override private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(pkg, _)) =>
+ (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut)
+ }
+
+ override private[dotty] def list(inPackage: String): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage))
+ }
+
+ private object ManifestResourcesClassPath {
+ case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile])
+ case class PackageInfo(packageName: String, subpackages: List[AbstractFile])
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): ClassPath =
+ if (zipFile.file == null) createWithoutUnderlyingFile(zipFile)
+ else ZipArchiveClassPath(zipFile.file)
+
+ private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match {
+ case manifestRes: ManifestResources =>
+ ManifestResourcesClassPath(manifestRes)
+ case _ =>
+ val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile"
+ throw new IllegalArgumentException(errorMsg)
+ }
+}
+
+/**
+ * Manages creation of classpath for source files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory {
+ private case class ZipArchiveSourcePath(zipFile: File)
+ extends ZipArchiveFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override private[dotty] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): ClassPath = ZipArchiveSourcePath(zipFile.file)
+}
diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala
new file mode 100644
index 000000000..8184708ad
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package dotty.tools.dotc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.collection.Seq
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.FileZipArchive
+import FileUtils.AbstractFileOps
+import dotty.tools.io.{ClassPath, ClassRepresentation}
+
+/**
+ * A trait allowing to look for classpath entries of given type in zip and jar files.
+ * It provides common logic for classes handling class and source files.
+ * It's aware of things like e.g. META-INF directory which is correctly skipped.
+ */
+trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
+ val zipFile: File
+
+ assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath)
+
+ private val archive = new FileZipArchive(zipFile)
+
+ override private[dotty] def packages(inPackage: String): Seq[PackageEntry] = {
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if entry.isPackage
+ } yield PackageEntryImpl(prefix + entry.name)
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] =
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if isRequiredFileType(entry)
+ } yield createFileEntry(entry)
+
+ override private[dotty] def list(inPackage: String): ClassPathEntries = {
+ val foundDirEntry = findDirEntry(inPackage)
+
+ foundDirEntry map { dirEntry =>
+ val pkgBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+
+ for (entry <- dirEntry.iterator) {
+ if (entry.isPackage)
+ pkgBuf += PackageEntryImpl(prefix + entry.name)
+ else if (isRequiredFileType(entry))
+ fileBuf += createFileEntry(entry)
+ }
+ ClassPathEntries(pkgBuf, fileBuf)
+ } getOrElse ClassPathEntries(Seq.empty, Seq.empty)
+ }
+
+ private def findDirEntry(pkg: String): Option[archive.DirEntry] = {
+ val dirName = s"${FileUtils.dirPath(pkg)}/"
+ archive.allDirs.get(dirName)
+ }
+
+ protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType
+ protected def isRequiredFileType(file: AbstractFile): Boolean
+}
diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
index 8bc18c387..d2a8e18a2 100644
--- a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
+++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package config
-import io.{AbstractFile,ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
-import ClassPath.{ JavaContext, DefaultJavaContext }
+import io._
+import classpath.AggregateClassPath
import core._
import Symbols._, Types._, Contexts._, Denotations._, SymDenotations._, StdNames._, Names._
import Flags._, Scopes._, Decorators._, NameOps._, util.Positions._
@@ -11,7 +11,7 @@ import transform.ExplicitOuter, transform.SymUtils._
class JavaPlatform extends Platform {
- private var currentClassPath: Option[MergedClassPath] = None
+ private var currentClassPath: Option[ClassPath] = None
def classPath(implicit ctx: Context): ClassPath = {
if (currentClassPath.isEmpty)
@@ -35,8 +35,12 @@ class JavaPlatform extends Platform {
}
/** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath, ClassPath]) =
- currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
+ def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match {
+ case AggregateClassPath(entries) =>
+ currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e))))
+ case cp: ClassPath =>
+ currentClassPath = Some(subst.getOrElse(cp, cp))
+ }
def rootLoader(root: TermSymbol)(implicit ctx: Context): SymbolLoader = new ctx.base.loaders.PackageLoader(root, classPath)
diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
index 159989e6f..f0709f4d3 100644
--- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala
+++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
@@ -4,8 +4,9 @@ package config
import java.net.{ URL, MalformedURLException }
import WrappedProperties.AccessControl
-import io.{ ClassPath, JavaClassPath, File, Directory, Path, AbstractFile }
-import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
+import io.{ ClassPath, File, Directory, Path, AbstractFile }
+import classpath.{AggregateClassPath, ClassPathFactory }
+import ClassPath.{ JavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
import core.Contexts._
@@ -128,7 +129,7 @@ object PathResolver {
)
}
- def fromPathString(path: String)(implicit ctx: Context): JavaClassPath = {
+ def fromPathString(path: String)(implicit ctx: Context): ClassPath = {
val settings = ctx.settings.classpath.update(path)
new PathResolver()(ctx.fresh.setSettings(settings)).result
}
@@ -150,7 +151,11 @@ object PathResolver {
val pr = new PathResolver()(ctx.fresh.setSettings(sstate))
println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show
+
+ pr.result match {
+ case cp: AggregateClassPath =>
+ println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}")
+ }
}
}
}
@@ -159,7 +164,7 @@ import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
class PathResolver(implicit ctx: Context) {
import ctx.base.settings
- val context = ClassPath.DefaultJavaContext
+ private val classPathFactory = new ClassPathFactory
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
@@ -214,7 +219,7 @@ class PathResolver(implicit ctx: Context) {
else sys.env.getOrElse("CLASSPATH", ".")
}
- import context._
+ import classPathFactory._
// Assemble the elements!
// priority class path takes precedence
@@ -254,8 +259,8 @@ class PathResolver(implicit ctx: Context) {
def containers = Calculated.containers
- lazy val result: JavaClassPath = {
- val cp = new JavaClassPath(containers.toIndexedSeq, context)
+ lazy val result: ClassPath = {
+ val cp = AggregateClassPath(containers.toIndexedSeq)
if (settings.Ylogcp.value) {
Console.println("Classpath built from " + settings.toConciseString(ctx.sstate))
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
index 63c3d5f74..941434dd5 100644
--- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -73,6 +73,8 @@ class ScalaSettings extends Settings.SettingGroup {
val log = PhasesSetting("-Ylog", "Log operations during")
val emitTasty = BooleanSetting("-YemitTasty", "Generate tasty in separate *.tasty file.")
val Ylogcp = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.")
+ val YdisableFlatCpCaching = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.")
+
val YnoImports = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.")
val YnoPredef = BooleanSetting("-Yno-predef", "Compile without importing Predef.")
val Yskip = PhasesSetting("-Yskip", "Skip")
diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala
index a97589d73..eee6ba785 100644
--- a/compiler/src/dotty/tools/dotc/core/Definitions.scala
+++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala
@@ -10,7 +10,6 @@ import scala.collection.{ mutable, immutable }
import PartialFunction._
import collection.mutable
import util.common.alwaysZero
-import typer.Applications
object Definitions {
@@ -846,6 +845,9 @@ class Definitions {
TupleType(elems.size).appliedTo(elems)
}
+ def isProductSubType(tp: Type)(implicit ctx: Context) =
+ tp.derivesFrom(ProductType.symbol)
+
/** Is `tp` (an alias) of either a scala.FunctionN or a scala.ImplicitFunctionN? */
def isFunctionType(tp: Type)(implicit ctx: Context) = {
val arity = functionArity(tp)
diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
index e4d2d446f..63c2817a6 100644
--- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
+++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
@@ -9,7 +9,8 @@ package core
import java.io.IOException
import scala.compat.Platform.currentTime
-import dotty.tools.io.{ ClassPath, AbstractFile }
+import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile }
+import classpath._
import Contexts._, Symbols._, Flags._, SymDenotations._, Types._, Scopes._, util.Positions._, Names._
import StdNames._, NameOps._
import Decorators.{PreNamedString, StringInterpolators}
@@ -60,8 +61,7 @@ class SymbolLoaders {
/** Enter package with given `name` into scope of `owner`
* and give them `completer` as type.
*/
- def enterPackage(owner: Symbol, pkg: ClassPath)(implicit ctx: Context): Symbol = {
- val pname = pkg.name.toTermName
+ def enterPackage(owner: Symbol, pname: TermName, completer: (TermSymbol, ClassSymbol) => PackageLoader)(implicit ctx: Context): Symbol = {
val preExisting = owner.info.decls lookup pname
if (preExisting != NoSymbol) {
// Some jars (often, obfuscated ones) include a package and
@@ -84,7 +84,7 @@ class SymbolLoaders {
}
}
ctx.newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags,
- (module, modcls) => new PackageLoader(module, pkg)).entered
+ completer).entered
}
/** Enter class and module with given `name` into scope of `owner`
@@ -126,7 +126,7 @@ class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassPath#ClassRep)(implicit ctx: Context): Unit = {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation)(implicit ctx: Context): Unit = {
((classRep.binary, classRep.source): @unchecked) match {
case (Some(bin), Some(src)) if needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
if (ctx.settings.verbose.value) ctx.inform("[symloader] picked up newer source file for " + src.path)
@@ -144,10 +144,10 @@ class SymbolLoaders {
/** Load contents of a package
*/
- class PackageLoader(_sourceModule: TermSymbol, classpath: ClassPath)
+ class PackageLoader(_sourceModule: TermSymbol, classPath: ClassPath)
extends SymbolLoader {
override def sourceModule(implicit ctx: Context) = _sourceModule
- def description = "package loader " + classpath.name
+ def description(implicit ctx: Context) = "package loader " + sourceModule.fullName
private var enterFlatClasses: Option[Context => Unit] = None
@@ -188,23 +188,25 @@ class SymbolLoaders {
def isFlatName(name: SimpleTermName) = name.lastIndexOf('$', name.length - 2) >= 0
- def isFlatName(classRep: ClassPath#ClassRep) = {
+ def isFlatName(classRep: ClassRepresentation) = {
val idx = classRep.name.indexOf('$')
idx >= 0 && idx < classRep.name.length - 1
}
- def maybeModuleClass(classRep: ClassPath#ClassRep) = classRep.name.last == '$'
+ def maybeModuleClass(classRep: ClassRepresentation) = classRep.name.last == '$'
- private def enterClasses(root: SymDenotation, flat: Boolean)(implicit ctx: Context) = {
- def isAbsent(classRep: ClassPath#ClassRep) =
+ private def enterClasses(root: SymDenotation, packageName: String, flat: Boolean)(implicit ctx: Context) = {
+ def isAbsent(classRep: ClassRepresentation) =
!root.unforcedDecls.lookup(classRep.name.toTypeName).exists
if (!root.isRoot) {
- for (classRep <- classpath.classes)
+ val classReps = classPath.classes(packageName)
+
+ for (classRep <- classReps)
if (!maybeModuleClass(classRep) && isFlatName(classRep) == flat &&
(!flat || isAbsent(classRep))) // on 2nd enter of flat names, check that the name has not been entered before
initializeFromClassPath(root.symbol, classRep)
- for (classRep <- classpath.classes)
+ for (classRep <- classReps)
if (maybeModuleClass(classRep) && isFlatName(classRep) == flat &&
isAbsent(classRep))
initializeFromClassPath(root.symbol, classRep)
@@ -217,14 +219,24 @@ class SymbolLoaders {
root.info = ClassInfo(pre, root.symbol.asClass, Nil, currentDecls, pre select sourceModule)
if (!sourceModule.isCompleted)
sourceModule.completer.complete(sourceModule)
+
+ val packageName = if (root.isEffectiveRoot) "" else root.fullName.toString
+
enterFlatClasses = Some { ctx =>
enterFlatClasses = None
- enterClasses(root, flat = true)(ctx)
+ enterClasses(root, packageName, flat = true)(ctx)
}
- enterClasses(root, flat = false)
+ enterClasses(root, packageName, flat = false)
if (!root.isEmptyPackage)
- for (pkg <- classpath.packages)
- enterPackage(root.symbol, pkg)
+ for (pkg <- classPath.packages(packageName)) {
+ val fullName = pkg.name
+ val name =
+ if (packageName.isEmpty) fullName
+ else fullName.substring(packageName.length + 1)
+
+ enterPackage(root.symbol, name.toTermName,
+ (module, modcls) => new PackageLoader(module, classPath))
+ }
}
}
}
@@ -242,7 +254,7 @@ abstract class SymbolLoader extends LazyType {
/** Description of the resource (ClassPath, AbstractFile)
* being processed by this loader
*/
- def description: String
+ def description(implicit ctx: Context): String
override def complete(root: SymDenotation)(implicit ctx: Context): Unit = {
def signalError(ex: Exception): Unit = {
@@ -283,7 +295,7 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
override def sourceFileOrNull: AbstractFile = classfile
- def description = "class file " + classfile.toString
+ def description(implicit ctx: Context) = "class file " + classfile.toString
def rootDenots(rootDenot: ClassDenotation)(implicit ctx: Context): (ClassDenotation, ClassDenotation) = {
val linkedDenot = rootDenot.scalacLinkedClass.denot match {
@@ -318,7 +330,7 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
}
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
- def description = "source file " + srcfile.toString
+ def description(implicit ctx: Context) = "source file " + srcfile.toString
override def sourceFileOrNull = srcfile
def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = unsupported("doComplete")
}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
index 27afa4d09..9415c047f 100644
--- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -7,7 +7,7 @@ import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._,
import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Positions._
import NameKinds.{ModuleClassName, DefaultGetterName}
import ast.tpd._
-import java.io.{ File, IOException }
+import java.io.{ ByteArrayInputStream, DataInputStream, File, IOException }
import java.lang.Integer.toHexString
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
@@ -194,13 +194,21 @@ class ClassfileParser(
val name = pool.getName(in.nextChar)
val isConstructor = name eq nme.CONSTRUCTOR
- /** Strip leading outer param from constructor.
- * Todo: Also strip trailing access tag for private inner constructors?
+ /** Strip leading outer param from constructor and trailing access tag for
+ * private inner constructors.
*/
- def stripOuterParamFromConstructor() = innerClasses.get(currentClassName) match {
+ def normalizeConstructorParams() = innerClasses.get(currentClassName) match {
case Some(entry) if !isStatic(entry.jflags) =>
val mt @ MethodTpe(paramNames, paramTypes, resultType) = denot.info
- denot.info = mt.derivedLambdaType(paramNames.tail, paramTypes.tail, resultType)
+ var normalizedParamNames = paramNames.tail
+ var normalizedParamTypes = paramTypes.tail
+ if ((jflags & JAVA_ACC_SYNTHETIC) != 0) {
+ // SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which
+ // are added when an inner class needs to access a private constructor.
+ normalizedParamNames = paramNames.dropRight(1)
+ normalizedParamTypes = paramTypes.dropRight(1)
+ }
+ denot.info = mt.derivedLambdaType(normalizedParamNames, normalizedParamTypes, resultType)
case _ =>
}
@@ -216,7 +224,7 @@ class ClassfileParser(
denot.info = pool.getType(in.nextChar)
if (isEnum) denot.info = ConstantType(Constant(sym))
- if (isConstructor) stripOuterParamFromConstructor()
+ if (isConstructor) normalizeConstructorParams()
setPrivateWithin(denot, jflags)
denot.info = translateTempPoly(parseAttributes(sym, denot.info))
if (isConstructor) normalizeConstructorInfo()
@@ -227,8 +235,12 @@ class ClassfileParser(
// seal java enums
if (isEnum) {
val enumClass = sym.owner.linkedClass
- if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
- enumClass.addAnnotation(Annotation.makeChild(sym))
+ if (!enumClass.exists)
+ ctx.warning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.")
+ else {
+ if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
+ enumClass.addAnnotation(Annotation.makeChild(sym))
+ }
}
} finally {
in.bp = oldbp
@@ -665,7 +677,7 @@ class ClassfileParser(
for (entry <- innerClasses.values) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClassName) {
- val file = ctx.platform.classPath.findBinaryFile(entry.externalName.toString) getOrElse {
+ val file = ctx.platform.classPath.findClassFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
enterClassAndModule(entry, file, entry.jflags)
@@ -923,12 +935,16 @@ class ClassfileParser(
case null =>
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val name = termName(in.buf, start + 3, in.getChar(start + 1))
+ val len = in.getChar(start + 1).toInt
+ val name = termName(fromMUTF8(in.buf, start + 1, len + 2))
values(index) = name
name
}
}
+ private def fromMUTF8(bytes: Array[Byte], offset: Int, len: Int): String =
+ new DataInputStream(new ByteArrayInputStream(bytes, offset, len)).readUTF
+
/** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
def getExternalName(index: Int): SimpleTermName = {
if (index <= 0 || len <= index)
diff --git a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
index 65c64f708..eed75fe88 100644
--- a/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
+++ b/compiler/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
@@ -138,7 +138,7 @@ class CompilingInterpreter(
private val prevRequests = new ArrayBuffer[Request]()
/** the compiler's classpath, as URL's */
- val compilerClasspath: List[URL] = ictx.platform.classPath(ictx).asURLs
+ val compilerClasspath: Seq[URL] = ictx.platform.classPath(ictx).asURLs
/* A single class loader is used for all commands interpreted by this Interpreter.
It would also be possible to create a new class loader for each command
diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
index 7578b57f1..a729368d4 100644
--- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
@@ -143,13 +143,17 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisTransform
/** Set `liftedOwner(sym)` to `owner` if `owner` is more deeply nested
* than the previous value of `liftedowner(sym)`.
*/
- def narrowLiftedOwner(sym: Symbol, owner: Symbol)(implicit ctx: Context) =
+ def narrowLiftedOwner(sym: Symbol, owner: Symbol)(implicit ctx: Context): Unit =
if (sym.maybeOwner.isTerm &&
owner.isProperlyContainedIn(liftedOwner(sym)) &&
owner != sym) {
- ctx.log(i"narrow lifted $sym to $owner")
- changedLiftedOwner = true
- liftedOwner(sym) = owner
+ if (sym.is(InSuperCall) && owner.isProperlyContainedIn(sym.enclosingClass))
+ narrowLiftedOwner(sym, sym.enclosingClass)
+ else {
+ ctx.log(i"narrow lifted $sym to $owner")
+ changedLiftedOwner = true
+ liftedOwner(sym) = owner
+ }
}
/** Mark symbol `sym` as being free in `enclosure`, unless `sym` is defined
diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 41a1218eb..447a003e7 100644
--- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -1408,7 +1408,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1)
protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors =
- if (Applications.canProductMatch(binder.info))
+ if (defn.isProductSubType(binder.info))
productSelectors(binder.info)
else binder.caseAccessors
val res =
diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala
index c4d3e2292..7e17abbcd 100644
--- a/compiler/src/dotty/tools/dotc/typer/Applications.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala
@@ -48,9 +48,6 @@ object Applications {
ref.info.widenExpr.dealias
}
- def canProductMatch(tp: Type)(implicit ctx: Context) =
- extractorMemberType(tp, nme._1).exists
-
/** Does `tp` fit the "product match" conditions as an unapply result type
* for a pattern with `numArgs` subpatterns?
* This is the case of `tp` has members `_1` to `_N` where `N == numArgs`.
@@ -72,7 +69,7 @@ object Applications {
}
def productArity(tp: Type)(implicit ctx: Context) =
- if (canProductMatch(tp)) productSelectorTypes(tp).size else -1
+ if (defn.isProductSubType(tp)) productSelectorTypes(tp).size else -1
def productSelectors(tp: Type)(implicit ctx: Context): List[Symbol] = {
val sels = for (n <- Iterator.from(0)) yield tp.member(nme.selectorName(n)).symbol
@@ -114,7 +111,7 @@ object Applications {
getUnapplySelectors(getTp, args, pos)
else if (unapplyResult isRef defn.BooleanClass)
Nil
- else if (canProductMatch(unapplyResult))
+ else if (defn.isProductSubType(unapplyResult))
productSelectorTypes(unapplyResult)
// this will cause a "wrong number of arguments in pattern" error later on,
// which is better than the message in `fail`.
diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala
index 19b6dfa71..da9f9f6ac 100644
--- a/compiler/src/dotty/tools/dotc/typer/Namer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala
@@ -843,7 +843,7 @@ class Namer { typer: Typer =>
val targs1 = targs map (typedAheadType(_))
val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes
if (ptype.typeParams.isEmpty) ptype
- else typedAheadExpr(parent).tpe
+ else fullyDefinedType(typedAheadExpr(parent).tpe, "class parent", parent.pos)
}
/* Check parent type tree `parent` for the following well-formedness conditions:
diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala
index ba55dfe30..02538671e 100644
--- a/compiler/src/dotty/tools/dotc/typer/Typer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala
@@ -179,6 +179,20 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
previous
}
+ def selection(imp: ImportInfo, name: Name) =
+ if (imp.sym.isCompleting) {
+ ctx.warning(i"cyclic ${imp.sym}, ignored", tree.pos)
+ NoType
+ } else if (unimported.nonEmpty && unimported.contains(imp.site.termSymbol))
+ NoType
+ else {
+ val pre = imp.site
+ val denot = pre.member(name).accessibleFrom(pre)(refctx)
+ // Pass refctx so that any errors are reported in the context of the
+ // reference instead of the
+ if (reallyExists(denot)) pre.select(name, denot) else NoType
+ }
+
/** The type representing a named import with enclosing name when imported
* from given `site` and `selectors`.
*/
@@ -194,25 +208,15 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
found
}
- def selection(name: Name) =
- if (imp.sym.isCompleting) {
- ctx.warning(i"cyclic ${imp.sym}, ignored", tree.pos)
- NoType
- }
- else if (unimported.nonEmpty && unimported.contains(imp.site.termSymbol))
- NoType
- else {
- // Pass refctx so that any errors are reported in the context of the
- // reference instead of the
- checkUnambiguous(selectionType(imp.site, name, tree.pos)(refctx))
- }
+ def unambiguousSelection(name: Name) =
+ checkUnambiguous(selection(imp, name))
selector match {
case Thicket(fromId :: Ident(Name) :: _) =>
val Ident(from) = fromId
- selection(if (name.isTypeName) from.toTypeName else from)
+ unambiguousSelection(if (name.isTypeName) from.toTypeName else from)
case Ident(Name) =>
- selection(name)
+ unambiguousSelection(name)
case _ =>
recur(rest)
}
@@ -225,18 +229,10 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** The type representing a wildcard import with enclosing name when imported
* from given import info
*/
- def wildImportRef(imp: ImportInfo)(implicit ctx: Context): Type = {
- if (imp.isWildcardImport) {
- val pre = imp.site
- if (!unimported.contains(pre.termSymbol) &&
- !imp.excluded.contains(name.toTermName) &&
- name != nme.CONSTRUCTOR) {
- val denot = pre.member(name).accessibleFrom(pre)(refctx)
- if (reallyExists(denot)) return pre.select(name, denot)
- }
- }
- NoType
- }
+ def wildImportRef(imp: ImportInfo)(implicit ctx: Context): Type =
+ if (imp.isWildcardImport && !imp.excluded.contains(name.toTermName) && name != nme.CONSTRUCTOR)
+ selection(imp, name)
+ else NoType
/** Is (some alternative of) the given predenotation `denot`
* defined in current compilation unit?
@@ -763,7 +759,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** Is `formal` a product type which is elementwise compatible with `params`? */
def ptIsCorrectProduct(formal: Type) = {
isFullyDefined(formal, ForceDegree.noBottom) &&
- Applications.canProductMatch(formal) &&
+ defn.isProductSubType(formal) &&
Applications.productSelectorTypes(formal).corresponds(params) {
(argType, param) =>
param.tpt.isEmpty || argType <:< typedAheadType(param.tpt).tpe
diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala
index 5e77c1b61..b4cc426cf 100644
--- a/compiler/src/dotty/tools/io/ClassPath.scala
+++ b/compiler/src/dotty/tools/io/ClassPath.scala
@@ -1,56 +1,89 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2012 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
+
package dotty.tools
package io
+import java.net.MalformedURLException
import java.net.URL
-import scala.collection.{ mutable, immutable }
-import dotc.core.Decorators.StringDecorator
+import java.util.regex.PatternSyntaxException
+
import File.pathSeparator
-import java.net.MalformedURLException
import Jar.isJarOrZip
-import ClassPath._
-import scala.Option.option2Iterable
-import scala.reflect.io.Path.string2path
-import language.postfixOps
-
-/** <p>
- * This module provides star expansion of '-classpath' option arguments, behaves the same as
- * java, see [http://java.sun.com/javase/6/docs/technotes/tools/windows/classpath.html]
- * </p>
- *
- * @author Stepan Koltsov
- */
+
+/**
+ * A representation of the compiler's class- or sourcepath.
+ */
+trait ClassPath {
+ import dotty.tools.dotc.classpath._
+ def asURLs: Seq[URL]
+
+ /** Empty string represents root package */
+ private[dotty] def packages(inPackage: String): Seq[PackageEntry]
+ private[dotty] def classes(inPackage: String): Seq[ClassFileEntry]
+ private[dotty] def sources(inPackage: String): Seq[SourceFileEntry]
+
+ /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
+ private[dotty] def list(inPackage: String): ClassPathEntries
+
+ /**
+ * It returns both classes from class file and source files (as our base ClassRepresentation).
+ * So note that it's not so strictly related to findClassFile.
+ */
+ def findClass(className: String): Option[ClassRepresentation] = {
+ // A default implementation which should be overridden, if we can create the more efficient
+ // solution for a given type of ClassPath
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ val foundClassFromClassFiles = classes(pkg).find(_.name == simpleClassName)
+ def findClassInSources = sources(pkg).find(_.name == simpleClassName)
+
+ foundClassFromClassFiles orElse findClassInSources
+ }
+ def findClassFile(className: String): Option[AbstractFile]
+
+ def asClassPathStrings: Seq[String]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ // for compatibility purposes
+ @deprecated("use asClassPathString instead of this one", "2.11.5")
+ def asClasspathString: String = asClassPathString
+
+ /** The whole sourcepath in the form of one String.
+ */
+ def asSourcePathString: String
+}
+
object ClassPath {
+ val RootPackage = ""
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
val wildSuffix = File.separator + "*"
- /** Get all subdirectories, jars, zips out of a directory. */
- def lsDir(dir: Directory, filt: String => Boolean = _ => true) = {
- val files = synchronized(dir.list)
- files filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
- }
-
- def basedir(s: String) =
- if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
- else "."
+ /* Get all subdirectories, jars, zips out of a directory. */
+ def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
+ dir.list.filter(x => filt(x.name) && (x.isDirectory || isJarOrZip(x))).map(_.path).toList
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
- val regexp = ("^%s$" format pattern.replaceAll("""\*""", """.*""")).r
- lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
+ try {
+ val regexp = ("^" + pattern.replaceAllLiterally("""\*""", """.*""") + "$").r
+ lsDir(Directory(pattern).parent, regexp.findFirstIn(_).isDefined)
+ }
+ catch { case _: PatternSyntaxException => List(pattern) }
}
else List(pattern)
}
/** Split classpath using platform-dependent path separator */
- def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
+ def split(path: String): List[String] = (path split pathSeparator).toList.filterNot(_ == "").distinct
/** Join classpath using platform-dependent path separator */
def join(paths: String*): String = paths filterNot (_ == "") mkString pathSeparator
@@ -58,22 +91,6 @@ object ClassPath {
/** Split the classpath, apply a transformation function, and reassemble it. */
def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
- /** Split the classpath, filter according to predicate, and reassemble. */
- def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
-
- /** Split the classpath and map them into Paths */
- def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
-
- /** Make all classpath components absolute. */
- def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*)
-
- /** Join the paths as a classpath */
- def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
- def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*)
-
- /** Split the classpath and map them into URLs */
- def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
-
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
if (expandStar) split(path) flatMap expandS
@@ -83,9 +100,10 @@ object ClassPath {
def expandDir(extdir: String): List[String] = {
AbstractFile getDirectory extdir match {
case null => Nil
- case dir => dir filter (_.isClassContainer) map (x => new java.io.File(dir.file, x.name) getPath) toList
+ case dir => dir.filter(_.isClassContainer).map(x => new java.io.File(dir.file, x.name).getPath).toList
}
}
+
/** Expand manifest jar classpath entries: these are either urls, or paths
* relative to the location of the jar.
*/
@@ -99,317 +117,34 @@ object ClassPath {
)
}
- /** A useful name filter. */
- def isTraitImplementation(name: String) = name endsWith "$class.class"
-
def specToURL(spec: String): Option[URL] =
try Some(new URL(spec))
catch { case _: MalformedURLException => None }
- /** A class modeling aspects of a ClassPath which should be
- * propagated to any classpaths it creates.
- */
- abstract class ClassPathContext {
- /** A filter which can be used to exclude entities from the classpath
- * based on their name.
- */
- def isValidName(name: String): Boolean = true
-
- /** From the representation to its identifier.
- */
- def toBinaryName(rep: AbstractFile): String
-
- /** Create a new classpath based on the abstract file.
- */
- def newClassPath(file: AbstractFile): ClassPath
-
- /** Creators for sub classpaths which preserve this context.
- */
- def sourcesInPath(path: String): List[ClassPath] =
- for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
- new SourcePath(dir, this)
-
- def contentsOfDirsInPath(path: String): List[ClassPath] =
- for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
- newClassPath(entry)
-
- def classesAtAllURLS(path: String): List[ClassPath] =
- (path split " ").toList flatMap classesAtURL
-
- def classesAtURL(spec: String) =
- for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
- newClassPath(location)
-
- def classesInExpandedPath(path: String): IndexedSeq[ClassPath] =
- classesInPathImpl(path, true).toIndexedSeq
-
- def classesInPath(path: String) = classesInPathImpl(path, false)
-
- // Internal
- private def classesInPathImpl(path: String, expand: Boolean) =
- for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
- newClassPath(dir)
- }
-
- class JavaContext extends ClassPathContext {
- def toBinaryName(rep: AbstractFile) = {
- val name = rep.name
- assert(endsClass(name), name)
- name.substring(0, name.length - 6)
- }
- def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
- }
-
- object DefaultJavaContext extends JavaContext {
- override def isValidName(name: String) = !isTraitImplementation(name)
+ def manifests: List[java.net.URL] = {
+ import scala.collection.JavaConverters._
+ val resources = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF")
+ resources.asScala.filter(_.getProtocol == "jar").toList
}
- private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
- private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
- private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
-
- /** From the source file to its identifier.
- */
- def toSourceName(f: AbstractFile): String = {
- val name = f.name
+ @deprecated("shim for sbt's compiler interface", since = "2.12.0")
+ sealed abstract class ClassPathContext
- if (endsScala(name)) name.substring(0, name.length - 6)
- else if (endsJava(name)) name.substring(0, name.length - 5)
- else throw new FatalError("Unexpected source file ending: " + name)
- }
+ @deprecated("shim for sbt's compiler interface", since = "2.12.0")
+ sealed abstract class JavaContext
}
-/**
- * Represents a package which contains classes and other packages
- */
-abstract class ClassPath {
- type AnyClassRep = ClassPath#ClassRep
-
- /**
- * The short name of the package (without prefix)
- */
+trait ClassRepresentation {
def name: String
-
- /**
- * A String representing the origin of this classpath element, if known.
- * For example, the path of the directory or jar.
- */
- def origin: Option[String] = None
-
- /** A list of URLs representing this classpath.
- */
- def asURLs: List[URL]
-
- /** The whole classpath in the form of one String.
- */
- def asClasspathString: String
-
- /** Info which should be propagated to any sub-classpaths.
- */
- def context: ClassPathContext
-
- /** Lists of entities.
- */
- def classes: IndexedSeq[AnyClassRep]
- def packages: IndexedSeq[ClassPath]
- def sourcepaths: IndexedSeq[AbstractFile]
-
- /**
- * Represents classes which can be loaded with a ClassfileLoader
- * and / or a SourcefileLoader.
- */
- case class ClassRep(binary: Option[AbstractFile], source: Option[AbstractFile]) {
- def name: String = binary match {
- case Some(x) => context.toBinaryName(x)
- case _ =>
- assert(source.isDefined)
- toSourceName(source.get)
- }
- }
-
- /** Filters for assessing validity of various entities.
- */
- def validClassFile(name: String) = endsClass(name) && context.isValidName(name)
- def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScala(name) || endsJava(name)
-
- /**
- * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
- * Does not support nested classes on .NET
- */
- def findClass(name: String): Option[AnyClassRep] =
- name.splitWhere(_ == '.', doDropIndex = true) match {
- case Some((pkg, rest)) =>
- packages find (_.name == pkg) flatMap (_ findClass rest)
- case _ =>
- classes find (_.name == name)
- }
-
- def findBinaryFile(name: String): Option[AbstractFile] =
- findClass(name).flatMap(_.binary)
-
- def sortString = join(split(asClasspathString).sorted: _*)
-
- override def equals(that: Any) = that match {
- case x: ClassPath => this.sortString == x.sortString
- case _ => false
- }
- override def hashCode = sortString.hashCode()
+ def binary: Option[AbstractFile]
+ def source: Option[AbstractFile]
}
-/**
- * A Classpath containing source files
- */
-class SourcePath(dir: AbstractFile, val context: ClassPathContext) extends ClassPath {
- def name = dir.name
- override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
- def asClasspathString = dir.path
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
-
- private def traverse() = {
- val classBuf = immutable.Vector.newBuilder[ClassRep]
- val packageBuf = immutable.Vector.newBuilder[SourcePath]
- dir foreach { f =>
- if (!f.isDirectory && validSourceFile(f.name))
- classBuf += ClassRep(None, Some(f))
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new SourcePath(f, context)
- }
- (packageBuf.result, classBuf.result)
- }
+@deprecated("shim for sbt's compiler interface", since = "2.12.0")
+sealed abstract class DirectoryClassPath
- lazy val (packages, classes) = traverse()
- override def toString() = "sourcepath: " + dir.toString()
-}
+@deprecated("shim for sbt's compiler interface", since = "2.12.0")
+sealed abstract class MergedClassPath
-/**
- * A directory (or a .jar file) containing classfiles and packages
- */
-class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext) extends ClassPath {
- def name = dir.name
- override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
- def asClasspathString = dir.path
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
- // calculates (packages, classes) in one traversal.
- private def traverse() = {
- val classBuf = immutable.Vector.newBuilder[ClassRep]
- val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
- dir foreach { f =>
- if (!f.isDirectory && validClassFile(f.name))
- classBuf += ClassRep(Some(f), None)
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new DirectoryClassPath(f, context)
- }
- (packageBuf.result, classBuf.result)
- }
-
- lazy val (packages, classes) = traverse()
- override def toString() = "directory classpath: " + origin.getOrElse("?")
-}
-
-class DeltaClassPath(original: MergedClassPath, subst: Map[ClassPath, ClassPath])
-extends MergedClassPath(original.entries map (e => subst getOrElse (e, e)), original.context) {
- // not sure we should require that here. Commented out for now.
- // require(subst.keySet subsetOf original.entries.toSet)
- // We might add specialized operations for computing classes packages here. Not sure it's worth it.
-}
-
-/**
- * A classpath unifying multiple class- and sourcepath entries.
- */
-class MergedClassPath(
- val entries: IndexedSeq[ClassPath],
- val context: ClassPathContext)
-extends ClassPath {
- def this(entries: TraversableOnce[ClassPath], context: ClassPathContext) =
- this(entries.toIndexedSeq, context)
-
- def name = entries.head.name
- def asURLs = (entries flatMap (_.asURLs)).toList
- lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
-
- override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
- override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
-
- lazy val classes: IndexedSeq[AnyClassRep] = {
- var count = 0
- val indices = mutable.AnyRefMap[String, Int]()
- val cls = new mutable.ArrayBuffer[AnyClassRep](1024)
-
- for (e <- entries; c <- e.classes) {
- val name = c.name
- if (indices contains name) {
- val idx = indices(name)
- val existing = cls(idx)
-
- if (existing.binary.isEmpty && c.binary.isDefined)
- cls(idx) = existing.copy(binary = c.binary)
- if (existing.source.isEmpty && c.source.isDefined)
- cls(idx) = existing.copy(source = c.source)
- }
- else {
- indices(name) = count
- cls += c
- count += 1
- }
- }
- cls.toIndexedSeq
- }
-
- lazy val packages: IndexedSeq[ClassPath] = {
- var count = 0
- val indices = mutable.AnyRefMap[String, Int]()
- val pkg = new mutable.ArrayBuffer[ClassPath](256)
-
- for (e <- entries; p <- e.packages) {
- val name = p.name
- if (indices contains name) {
- val idx = indices(name)
- pkg(idx) = addPackage(pkg(idx), p)
- }
- else {
- indices(name) = count
- pkg += p
- count += 1
- }
- }
- pkg.toIndexedSeq
- }
-
- private def addPackage(to: ClassPath, pkg: ClassPath) = {
- val newEntries: IndexedSeq[ClassPath] = to match {
- case cp: MergedClassPath => cp.entries :+ pkg
- case _ => IndexedSeq(to, pkg)
- }
- new MergedClassPath(newEntries, context)
- }
- def show(): Unit = {
- println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
- asClasspathString split ':' foreach (x => println(" " + x))
- }
- override def toString() = "merged classpath " + entries.mkString("(", "\n", ")")
-}
-
-/**
- * The classpath when compiling with target:jvm. Binary files (classfiles) are represented
- * as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
- */
-class JavaClassPath(
- containers: IndexedSeq[ClassPath],
- context: JavaContext)
-extends MergedClassPath(containers, context) { }
-
-object JavaClassPath {
- def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
- val containers = {
- for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
- new DirectoryClassPath(f, context)
- }
- new JavaClassPath(containers.toIndexedSeq, context)
- }
- def fromURLs(urls: Seq[URL]): JavaClassPath =
- fromURLs(urls, ClassPath.DefaultJavaContext)
-}
+@deprecated("shim for sbt's compiler interface", since = "2.12.0")
+sealed abstract class JavaClassPath
diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala
new file mode 100644
index 000000000..53474e778
--- /dev/null
+++ b/compiler/src/dotty/tools/io/PlainFile.scala
@@ -0,0 +1,170 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools
+package io
+
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
+ override def isDirectory = true
+ override def iterator = givenPath.list filter (_.exists) map (x => new PlainFile(x))
+ override def delete(): Unit = givenPath.deleteRecursively()
+}
+
+/** This class implements an abstract file backed by a File.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class PlainFile(val givenPath: Path) extends AbstractFile {
+ assert(path ne null)
+
+ val file = givenPath.jfile
+ override def underlyingSource = Some(this)
+
+ private val fpath = givenPath.toAbsolute
+
+ /** Returns the name of this abstract file. */
+ def name = givenPath.name
+
+ /** Returns the path of this abstract file. */
+ def path = givenPath.path
+
+ /** The absolute file. */
+ def absolute = new PlainFile(givenPath.toAbsolute)
+
+ override def container: AbstractFile = new PlainFile(givenPath.parent)
+ override def input = givenPath.toFile.inputStream()
+ override def output = givenPath.toFile.outputStream()
+ override def sizeOption = Some(givenPath.length.toInt)
+
+ override def hashCode(): Int = fpath.hashCode()
+ override def equals(that: Any): Boolean = that match {
+ case x: PlainFile => fpath == x.fpath
+ case _ => false
+ }
+
+ /** Is this abstract file a directory? */
+ def isDirectory: Boolean = givenPath.isDirectory
+
+ /** Returns the time that this abstract file was last modified. */
+ def lastModified: Long = givenPath.lastModified
+
+ /** Returns all abstract subfiles of this abstract directory. */
+ def iterator: Iterator[AbstractFile] = {
+ // Optimization: Assume that the file was not deleted and did not have permissions changed
+ // between the call to `list` and the iteration. This saves a call to `exists`.
+ def existsFast(path: Path) = path match {
+ case (_: Directory | _: io.File) => true
+ case _ => path.exists
+ }
+ if (!isDirectory) Iterator.empty
+ else givenPath.toDirectory.list filter existsFast map (new PlainFile(_))
+ }
+
+ /**
+ * Returns the abstract file in this abstract directory with the
+ * specified name. If there is no such file, returns null. The
+ * argument "directory" tells whether to look for a directory or
+ * or a regular file.
+ */
+ def lookupName(name: String, directory: Boolean): AbstractFile = {
+ val child = givenPath / name
+ if ((child.isDirectory && directory) || (child.isFile && !directory)) new PlainFile(child)
+ else null
+ }
+
+ /** Does this abstract file denote an existing file? */
+ def create(): Unit = if (!exists) givenPath.createFile()
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete(): Unit =
+ if (givenPath.isFile) givenPath.delete()
+ else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively()
+
+ /** Returns a plain file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
+ new PlainFile(givenPath / name)
+}
+
+private[dotty] class PlainNioFile(nioPath: java.nio.file.Path) extends AbstractFile {
+ import java.nio.file._
+
+ assert(nioPath ne null)
+
+ /** Returns the underlying File if any and null otherwise. */
+ override def file: java.io.File = try {
+ nioPath.toFile
+ } catch {
+ case _: UnsupportedOperationException => null
+ }
+
+ override def underlyingSource = Some(this)
+
+ private val fpath = nioPath.toAbsolutePath.toString
+
+ /** Returns the name of this abstract file. */
+ def name = nioPath.getFileName.toString
+
+ /** Returns the path of this abstract file. */
+ def path = nioPath.toString
+
+ /** The absolute file. */
+ def absolute = new PlainNioFile(nioPath.toAbsolutePath)
+
+ override def container: AbstractFile = new PlainNioFile(nioPath.getParent)
+ override def input = Files.newInputStream(nioPath)
+ override def output = Files.newOutputStream(nioPath)
+ override def sizeOption = Some(Files.size(nioPath).toInt)
+ override def hashCode(): Int = fpath.hashCode()
+ override def equals(that: Any): Boolean = that match {
+ case x: PlainNioFile => fpath == x.fpath
+ case _ => false
+ }
+
+ /** Is this abstract file a directory? */
+ def isDirectory: Boolean = Files.isDirectory(nioPath)
+
+ /** Returns the time that this abstract file was last modified. */
+ def lastModified: Long = Files.getLastModifiedTime(nioPath).toMillis
+
+ /** Returns all abstract subfiles of this abstract directory. */
+ def iterator: Iterator[AbstractFile] = {
+ try {
+ import scala.collection.JavaConverters._
+ val it = Files.newDirectoryStream(nioPath).iterator()
+ it.asScala.map(new PlainNioFile(_))
+ } catch {
+ case _: NotDirectoryException => Iterator.empty
+ }
+ }
+
+ /**
+ * Returns the abstract file in this abstract directory with the
+ * specified name. If there is no such file, returns null. The
+ * argument "directory" tells whether to look for a directory or
+ * or a regular file.
+ */
+ def lookupName(name: String, directory: Boolean): AbstractFile = {
+ val child = nioPath.resolve(name)
+ if ((Files.isDirectory(child) && directory) || (Files.isRegularFile(child) && !directory)) new PlainNioFile(child)
+ else null
+ }
+
+ /** Does this abstract file denote an existing file? */
+ def create(): Unit = if (!exists) Files.createFile(nioPath)
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete(): Unit =
+ if (Files.isRegularFile(nioPath)) Files.deleteIfExists(nioPath)
+ else if (Files.isDirectory(nioPath)) new Directory(nioPath.toFile).deleteRecursively()
+
+ /** Returns a plain file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
+ new PlainNioFile(nioPath.resolve(name))
+}
diff --git a/compiler/src/dotty/tools/io/package.scala b/compiler/src/dotty/tools/io/package.scala
index 1c0e0b5c4..7acb827c9 100644
--- a/compiler/src/dotty/tools/io/package.scala
+++ b/compiler/src/dotty/tools/io/package.scala
@@ -20,8 +20,6 @@ package object io {
val File = scala.reflect.io.File
type Path = scala.reflect.io.Path
val Path = scala.reflect.io.Path
- type PlainFile = scala.reflect.io.PlainFile
- //val PlainFile = scala.reflect.io.PlainFile
val Streamable = scala.reflect.io.Streamable
type VirtualDirectory = scala.reflect.io.VirtualDirectory
type VirtualFile = scala.reflect.io.VirtualFile
diff --git a/compiler/test/dotc/comptest.scala b/compiler/test/dotc/comptest.scala
index dce002c81..8737ef165 100644
--- a/compiler/test/dotc/comptest.scala
+++ b/compiler/test/dotc/comptest.scala
@@ -1,9 +1,14 @@
package dotc
-import dotty.tools.dotc.ParallelTesting
+import dotty.tools.vulpix.ParallelTesting
+
+import scala.concurrent.duration._
object comptest extends ParallelTesting {
+ def maxDuration = 3.seconds
+ def numberOfSlaves = 5
+ def safeMode = false
def isInteractive = true
def testFilter = None
diff --git a/compiler/test/dotc/tests.scala b/compiler/test/dotc/tests.scala
index af2c88e1a..efecc1df3 100644
--- a/compiler/test/dotc/tests.scala
+++ b/compiler/test/dotc/tests.scala
@@ -3,6 +3,7 @@ package dotc
import dotty.Jars
import dotty.tools.dotc.CompilerTest
import dotty.tools.StdLibSources
+import org.junit.experimental.categories.Category
import org.junit.{Before, Test}
import org.junit.Assert._
@@ -10,10 +11,15 @@ import java.io.{ File => JFile }
import scala.reflect.io.Directory
import scala.io.Source
-// tests that match regex '(pos|dotc|run|java|compileStdLib)\.*' would be executed as benchmarks.
+/** WARNING
+ * =======
+ * These are legacy, do not add tests here, see `CompilationTests.scala`
+ */
+@Category(Array(classOf[java.lang.Exception]))
class tests extends CompilerTest {
- def isRunByJenkins: Boolean = sys.props.isDefinedAt("dotty.jenkins.build")
+ // tests that match regex '(pos|dotc|run|java|compileStdLib)\.*' would be
+ // executed as benchmarks.
val defaultOutputDir = "../out/"
@@ -62,7 +68,7 @@ class tests extends CompilerTest {
}
implicit val defaultOptions: List[String] = noCheckOptions ++ {
- if (isRunByJenkins) List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef") // should be Ycheck:all, but #725
+ if (dotty.Properties.isRunByDrone) List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef") // should be Ycheck:all, but #725
else List("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef")
} ++ checkOptions ++ classPath
@@ -204,8 +210,8 @@ class tests extends CompilerTest {
private val stdlibFiles: List[String] = StdLibSources.whitelisted
@Test def compileStdLib =
- if (!generatePartestFiles)
- compileList("compileStdLib", stdlibFiles, "-migration" :: "-Yno-inline" :: scala2mode)
+ compileList("compileStdLib", stdlibFiles, "-migration" :: "-Yno-inline" :: scala2mode)
+
@Test def compileMixed = compileLine(
"""../tests/pos/B.scala
|../scala-scala/src/library/scala/collection/immutable/Seq.scala
@@ -221,7 +227,7 @@ class tests extends CompilerTest {
|../scala-scala/src/library/scala/collection/parallel/mutable/ParSet.scala
|../scala-scala/src/library/scala/collection/mutable/SetLike.scala""".stripMargin)(scala2mode ++ defaultOptions)
- @Test def dotty = {
+ @Test def dottyBooted = {
dottyBootedLib
dottyDependsOnBootedLib
}
diff --git a/compiler/test/dotty/Jars.scala b/compiler/test/dotty/Jars.scala
index f062f8b25..bc000fced 100644
--- a/compiler/test/dotty/Jars.scala
+++ b/compiler/test/dotty/Jars.scala
@@ -2,21 +2,42 @@ package dotty
/** Jars used when compiling test, normally set from the sbt build */
object Jars {
+ /** Dotty library Jar */
val dottyLib: String = sys.env.get("DOTTY_LIB")
- .getOrElse(sys.props("dotty.tests.classes.library"))
+ .getOrElse(Properties.dottyLib)
+ /** Dotty Compiler Jar */
val dottyCompiler: String = sys.env.get("DOTTY_COMPILER")
- .getOrElse(sys.props("dotty.tests.classes.compiler"))
+ .getOrElse(Properties.dottyCompiler)
+ /** Dotty Interfaces Jar */
val dottyInterfaces: String = sys.env.get("DOTTY_INTERFACE")
- .getOrElse(sys.props("dotty.tests.classes.interfaces"))
+ .getOrElse(Properties.dottyInterfaces)
- val dottyExtras: List[String] = Option(sys.env.get("DOTTY_EXTRAS")
- .getOrElse(sys.props("dotty.tests.extraclasspath")))
- .map(_.split(":").toList).getOrElse(Nil)
+ /** Dotty extras classpath from env or properties */
+ val dottyExtras: List[String] = sys.env.get("DOTTY_EXTRAS")
+ .map(_.split(":").toList).getOrElse(Properties.dottyExtras)
+ /** Dotty REPL dependencies */
val dottyReplDeps: List[String] = dottyLib :: dottyExtras
+ /** Dotty test dependencies */
val dottyTestDeps: List[String] =
dottyLib :: dottyCompiler :: dottyInterfaces :: dottyExtras
+
+ /** Gets the scala 2.* library at runtime, note that doing this is unsafe
+ * unless you know that the library will be on the classpath of the running
+ * application. It is currently safe to call this function if the tests are
+ * run by sbt.
+ */
+ def scalaLibraryFromRuntime: String = findJarFromRuntime("scala-library-2.")
+
+ private def findJarFromRuntime(partialName: String) = {
+ val urls = ClassLoader.getSystemClassLoader.asInstanceOf[java.net.URLClassLoader].getURLs.map(_.getFile.toString)
+ urls.find(_.contains(partialName)).getOrElse {
+ throw new java.io.FileNotFoundException(
+ s"""Unable to locate $partialName on classpath:\n${urls.toList.mkString("\n")}"""
+ )
+ }
+ }
}
diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala
new file mode 100644
index 000000000..70db82092
--- /dev/null
+++ b/compiler/test/dotty/Properties.scala
@@ -0,0 +1,49 @@
+package dotty
+
+/** Runtime properties from defines or environmnent */
+object Properties {
+
+ /** If property is unset or "TRUE" we consider it `true` */
+ private[this] def propIsNullOrTrue(prop: String): Boolean = {
+ val prop = System.getProperty("dotty.tests.interactive")
+ prop == null || prop == "TRUE"
+ }
+
+ /** Are we running on the Drone CI? */
+ val isRunByDrone: Boolean = sys.env.isDefinedAt("DRONE")
+
+ /** Tests should run interactive? */
+ val testsInteractive: Boolean = propIsNullOrTrue("dotty.tests.interactive")
+
+ /** Filter out tests not matching the regex supplied by "dotty.tests.filter"
+ * define
+ */
+ val testsFilter: Option[String] = sys.props.get("dotty.tests.filter")
+
+ /** When set, the run tests are only compiled - not run, a warning will be
+ * issued
+ */
+ val testsNoRun: Boolean = sys.props.get("dotty.tests.norun").isDefined
+
+ /** Should Unit tests run in safe mode?
+ *
+ * For run tests this means that we respawn child JVM processes after each
+ * test, so that they are never reused.
+ */
+ val testsSafeMode: Boolean = sys.props.isDefinedAt("dotty.tests.safemode")
+
+ /** Dotty compiler path provided through define */
+ def dottyCompiler: String = sys.props("dotty.tests.classes.compiler")
+
+ /** Dotty classpath extras provided through define */
+ def dottyExtras: List[String] =
+ Option(sys.props("dotty.tests.extraclasspath"))
+ .map(_.split(":").toList)
+ .getOrElse(Nil)
+
+ /** Dotty interfaces path provided through define */
+ def dottyInterfaces: String = sys.props("dotty.tests.classes.interfaces")
+
+ /** Dotty library path provided through define */
+ def dottyLib: String = sys.props("dotty.tests.classes.library")
+}
diff --git a/compiler/test/dotty/partest/DPConfig.scala b/compiler/test/dotty/partest/DPConfig.scala
deleted file mode 100644
index 5c493f465..000000000
--- a/compiler/test/dotty/partest/DPConfig.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package dotty.partest
-
-import scala.collection.JavaConversions._
-import scala.reflect.io.Path
-import java.io.File
-
-import scala.tools.partest.PartestDefaults
-
-
-/** Dotty Partest runs all tests in the provided testDirs located under
- * testRoot. There can be several directories with pos resp. neg tests, as
- * long as the prefix is pos/neg.
- *
- * Each testDir can also have a __defaultFlags.flags file, which provides
- * compiler flags and is used unless there's a specific flags file (e.g. for
- * test pos/A.scala, if there's a pos/A.flags file those flags are used,
- * otherwise pos/__defaultFlags.flags are used if the file exists).
- */
-object DPConfig {
- /** Options used for _running_ the run tests.
- * Note that this is different from the options used when _compiling_ tests,
- * those are determined by the sbt configuration.
- */
- val runJVMOpts = s"-Xms64M -Xmx1024M ${PartestDefaults.javaOpts}"
-
- val testRoot = (Path("..") / Path("tests") / Path("partest-generated")).toString
- val genLog = Path(testRoot) / Path("gen.log")
-
- lazy val testDirs = {
- val root = new File(testRoot)
- val dirs = if (!root.exists) Array.empty[String] else root.listFiles.filter(_.isDirectory).map(_.getName)
- if (dirs.isEmpty)
- throw new Exception("Partest did not detect any generated sources")
- dirs
- }
-
- // Tests finish faster when running in parallel, but console output is
- // out of order and sometimes the compiler crashes
- val runTestsInParallel = true
-}
diff --git a/compiler/test/dotty/partest/DPConsoleRunner.scala b/compiler/test/dotty/partest/DPConsoleRunner.scala
deleted file mode 100644
index 3362d7a59..000000000
--- a/compiler/test/dotty/partest/DPConsoleRunner.scala
+++ /dev/null
@@ -1,411 +0,0 @@
-/* NOTE: Adapted from ScalaJSPartest.scala in
- * https://github.com/scala-js/scala-js/
- * TODO make partest configurable */
-
-package dotty.partest
-
-import dotty.tools.FatalError
-import scala.reflect.io.AbstractFile
-import scala.tools.partest._
-import scala.tools.partest.nest._
-import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
-import ClassPath.{ join, split }
-import FileManager.{ compareFiles, compareContents, joinPaths, withTempFile }
-import scala.util.matching.Regex
-import tools.nsc.io.{ File => NSCFile }
-import java.io.{ File, PrintStream, FileOutputStream, PrintWriter, FileWriter }
-import java.net.URLClassLoader
-
-/** Runs dotty partest from the Console, discovering test sources in
- * DPConfig.testRoot that have been generated automatically by
- * DPPrepJUnitRunner. Use `sbt partest` to run. If additional jars are
- * required by some run tests, add them to partestDeps in the sbt Build.scala.
- */
-object DPConsoleRunner {
- def main(args: Array[String]): Unit = {
- // unfortunately sbt runTask passes args as single string
- // extra jars for run tests are passed with -dottyJars <count> <jar1> <jar2> ...
- val jarFinder = """-dottyJars (\d*) (.*)""".r
- val (jarList, otherArgs) = args.toList.partition(jarFinder.findFirstIn(_).isDefined)
- val (extraJars, moreArgs) = jarList match {
- case Nil => sys.error("Error: DPConsoleRunner needs \"-dottyJars <jarCount> <jars>*\".")
- case jarFinder(nr, jarString) :: Nil =>
- val jars = jarString.split(" ").toList
- val count = nr.toInt
- if (jars.length < count)
- sys.error("Error: DPConsoleRunner found wrong number of dottyJars: " + jars + ", expected: " + nr)
- else (jars.take(count), jars.drop(count))
- case list => sys.error("Error: DPConsoleRunner found several -dottyJars options: " + list)
- }
- new DPConsoleRunner((otherArgs ::: moreArgs) mkString (" "), extraJars).runPartest
- }
-}
-
-// console runner has a suite runner which creates a test runner for each test
-class DPConsoleRunner(args: String, extraJars: List[String]) extends ConsoleRunner(args) {
- override val suiteRunner = new DPSuiteRunner (
- testSourcePath = optSourcePath getOrElse DPConfig.testRoot,
- fileManager = new DottyFileManager(extraJars),
- updateCheck = optUpdateCheck,
- failed = optFailed,
- consoleArgs = args)
-
- override def run = {}
- def runPartest = super.run
-}
-
-class DottyFileManager(extraJars: List[String]) extends FileManager(Nil) {
- lazy val extraJarList = extraJars.map(NSCFile(_))
- override lazy val libraryUnderTest = Path(extraJars.find(_.contains("scala-library")).getOrElse(""))
- override lazy val reflectUnderTest = Path(extraJars.find(_.contains("scala-reflect")).getOrElse(""))
- override lazy val compilerUnderTest = Path(extraJars.find(_.contains("dotty")).getOrElse(""))
-}
-
-class DPSuiteRunner(testSourcePath: String, // relative path, like "files", or "pending"
- fileManager: DottyFileManager,
- updateCheck: Boolean,
- failed: Boolean,
- consoleArgs: String,
- javaCmdPath: String = PartestDefaults.javaCmd,
- javacCmdPath: String = PartestDefaults.javacCmd,
- scalacExtraArgs: Seq[String] = Seq.empty,
- javaOpts: String = DPConfig.runJVMOpts)
-extends SuiteRunner(testSourcePath, fileManager, updateCheck, failed, javaCmdPath, javacCmdPath, scalacExtraArgs, javaOpts) {
-
- if (!DPConfig.runTestsInParallel)
- sys.props("partest.threads") = "1"
-
- sys.props("partest.root") = "."
-
- // override to provide Dotty banner
- override def banner: String = {
- s"""|Welcome to Partest for Dotty! Partest version: ${Properties.versionNumberString}
- |Compiler under test: dotty.tools.dotc.Bench or dotty.tools.dotc.Main
- |Generated test sources: ${PathSettings.srcDir}${File.separator}
- |Test directories: ${DPConfig.testDirs.toList.mkString(", ")}
- |Debugging: failed tests have compiler output in test-kind.clog, run output in test-kind.log, class files in test-kind.obj
- |Parallel: ${DPConfig.runTestsInParallel}
- |Options: (use partest --help for usage information) ${consoleArgs}
- """.stripMargin
- }
-
- /** Some tests require a limitation of resources, tests which are compiled
- * with one or more of the flags in this list will be run with
- * `limitedThreads`. This is necessary because some test flags require a lot
- * of memory when running the compiler and may exhaust the available memory
- * when run in parallel with too many other tests.
- *
- * This number could be increased on the CI, but might fail locally if
- * scaled too extreme - override with:
- *
- * ```
- * -Ddotty.tests.limitedThreads=X
- * ```
- */
- def limitResourceFlags = List("-Ytest-pickler")
- private val limitedThreads = sys.props.get("dotty.tests.limitedThreads").getOrElse("2")
-
- override def runTestsForFiles(kindFiles: Array[File], kind: String): Array[TestState] = {
- val (limitResourceTests, parallelTests) =
- kindFiles partition { kindFile =>
- val flags = kindFile.changeExtension("flags").fileContents
- limitResourceFlags.exists(seqFlag => flags.contains(seqFlag))
- }
-
- val seqResults =
- if (!limitResourceTests.isEmpty) {
- val savedThreads = sys.props("partest.threads")
- sys.props("partest.threads") = {
- assert(
- savedThreads == null || limitedThreads.toInt <= savedThreads.toInt,
- """|Should not use more threads than the default, when the point
- |is to limit the amount of resources""".stripMargin
- )
- limitedThreads
- }
-
- NestUI.echo(s"## we will run ${limitResourceTests.length} tests using ${PartestDefaults.numThreads} thread(s) in parallel")
- val res = super.runTestsForFiles(limitResourceTests, kind)
-
- if (savedThreads != null)
- sys.props("partest.threads") = savedThreads
- else
- sys.props.remove("partest.threads")
-
- res
- } else Array[TestState]()
-
- val parResults =
- if (!parallelTests.isEmpty) {
- NestUI.echo(s"## we will run ${parallelTests.length} tests in parallel using ${PartestDefaults.numThreads} thread(s)")
- super.runTestsForFiles(parallelTests, kind)
- } else Array[TestState]()
-
- seqResults ++ parResults
- }
-
- // override for DPTestRunner and redirecting compilation output to test.clog
- override def runTest(testFile: File): TestState = {
- val runner = new DPTestRunner(testFile, this)
-
- val state =
- try {
- runner.run match {
- // Append compiler output to transcript if compilation failed,
- // printed with --verbose option
- case TestState.Fail(f, r@"compilation failed", transcript) =>
- TestState.Fail(f, r, transcript ++ runner.cLogFile.fileLines.dropWhile(_ == ""))
- case res => res
- }
- } catch {
- case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
- }
- reportTest(state)
- runner.cleanup()
-
- onFinishTest(testFile, state)
- }
-
- // override NestUI.reportTest because --show-diff doesn't work. The diff used
- // seems to add each line to transcript separately, whereas NestUI assumes
- // that the diff string was added as one entry in the transcript
- def reportTest(state: TestState) = {
- import NestUI._
- import NestUI.color._
-
- if (isTerse && state.isOk) {
- NestUI.reportTest(state)
- } else {
- echo(statusLine(state))
- if (!state.isOk && isDiffy) {
- val differ = bold(red("% ")) + "diff "
- state.transcript.dropWhile(s => !(s startsWith differ)) foreach (echo(_))
- // state.transcript find (_ startsWith differ) foreach (echo(_)) // original
- }
- }
- }
-}
-
-class DPTestRunner(testFile: File, suiteRunner: DPSuiteRunner) extends nest.Runner(testFile, suiteRunner) {
- val cLogFile = SFile(logFile).changeExtension("clog")
-
- // override to provide DottyCompiler
- override def newCompiler = new dotty.partest.DPDirectCompiler(this)
-
- // Adapted from nest.Runner#javac because:
- // - Our classpath handling is different and we need to pass extraClassPath
- // to java to get the scala-library which is required for some java tests
- // - The compiler output should be redirected to cLogFile, like the output of
- // dotty itself
- override def javac(files: List[File]): TestState = {
- import fileManager._
- import suiteRunner._
- import FileManager.joinPaths
- // compile using command-line javac compiler
- val args = Seq(
- suiteRunner.javacCmdPath, // FIXME: Dotty deviation just writing "javacCmdPath" doesn't work
- "-d",
- outDir.getAbsolutePath,
- "-classpath",
- joinPaths(outDir :: extraClasspath ++ testClassPath)
- ) ++ files.map(_.getAbsolutePath)
-
- pushTranscript(args mkString " ")
-
- val captured = StreamCapture(runCommand(args, cLogFile))
- if (captured.result) genPass() else {
- cLogFile appendAll captured.stderr
- cLogFile appendAll captured.stdout
- genFail("java compilation failed")
- }
- }
-
- // Overriden in order to recursively get all sources that should be handed to
- // the compiler. Otherwise only sources in the top dir is compiled - works
- // because the compiler is on the classpath.
- override def sources(file: File): List[File] =
- if (file.isDirectory)
- file.listFiles.toList.flatMap { f =>
- if (f.isDirectory) sources(f)
- else if (f.isJavaOrScala) List(f)
- else Nil
- }
- else List(file)
-
- // Enable me to "fix" the depth issue - remove once completed
- //override def compilationRounds(file: File): List[CompileRound] = {
- // val srcs = sources(file) match {
- // case Nil =>
- // System.err.println {
- // s"""|================================================================================
- // |Warning! You attempted to compile sources from:
- // | $file
- // |but partest was unable to find any sources - uncomment DPConsoleRunner#sources
- // |================================================================================""".stripMargin
- // }
- // List(new File("./tests/pos/HelloWorld.scala")) // "just compile some crap" - Guillaume
- // case xs =>
- // xs
- // }
- // (groupedFiles(srcs) map mixedCompileGroup).flatten
- //}
-
- // FIXME: This is copy-pasted from nest.Runner where it is private
- // Remove this once https://github.com/scala/scala-partest/pull/61 is merged
- /** Runs command redirecting standard out and
- * error out to output file.
- */
- def runCommand(args: Seq[String], outFile: File): Boolean = {
- import scala.sys.process.{ Process, ProcessLogger }
- //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
- val pl = ProcessLogger(outFile)
- val nonzero = 17 // rounding down from 17.3
- def run: Int = {
- val p = Process(args) run pl
- try p.exitValue
- catch {
- case e: InterruptedException =>
- NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
- p.destroy
- nonzero
- case t: Throwable =>
- NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
- p.destroy
- throw t
- }
- finally pl.close()
- }
- (pl buffer run) == 0
- }
-
- // override to provide default dotty flags from file in directory
- override def flagsForCompilation(sources: List[File]): List[String] = {
- val specificFlags = super.flagsForCompilation(sources)
- if (specificFlags.isEmpty) defaultFlags
- else specificFlags
- }
-
- val defaultFlags = {
- val defaultFile = parentFile.listFiles.toList.find(_.getName == "__defaultFlags.flags")
- defaultFile.map({ file =>
- SFile(file).safeSlurp.map({ content => words(content).filter(_.nonEmpty) }).getOrElse(Nil)
- }).getOrElse(Nil)
- }
-
- // override to add the check for nr of compilation errors if there's a
- // target.nerr file
- override def runNegTest() = runInContext {
- sealed abstract class NegTestState
- // Don't get confused, the neg test passes when compilation fails for at
- // least one round (optionally checking the number of compiler errors and
- // compiler console output)
- case object CompFailed extends NegTestState
- // the neg test fails when all rounds return either of these:
- case class CompFailedButWrongNErr(expected: String, found: String) extends NegTestState
- case object CompFailedButWrongDiff extends NegTestState
- case object CompSucceeded extends NegTestState
-
- def nerrIsOk(reason: String) = {
- val nerrFinder = """compilation failed with (\d+) errors""".r
- reason match {
- case nerrFinder(found) =>
- SFile(FileOps(testFile) changeExtension "nerr").safeSlurp match {
- case Some(exp) if (exp != found) => CompFailedButWrongNErr(exp, found)
- case _ => CompFailed
- }
- case _ => CompFailed
- }
- }
-
- // we keep the partest semantics where only one round needs to fail
- // compilation, not all
- val compFailingRounds =
- compilationRounds(testFile)
- .map { round =>
- val ok = round.isOk
- setLastState(if (ok) genPass else genFail("compilation failed"))
- (round.result, ok)
- }
- .filter { case (_, ok) => !ok }
-
- val failureStates = compFailingRounds.map({ case (result, _) => result match {
- // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
- case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => CompSucceeded
- case Fail(_, reason, _) => if (diffIsOk) nerrIsOk(reason) else CompFailedButWrongDiff
- case _ => if (diffIsOk) CompFailed else CompFailedButWrongDiff
- }})
-
- if (failureStates.exists({ case CompFailed => true; case _ => false })) {
- true
- } else {
- val existsNerr = failureStates.exists({
- case CompFailedButWrongNErr(exp, found) =>
- nextTestActionFailing(s"wrong number of compilation errors, expected: $exp, found: $found")
- true
- case _ =>
- false
- })
-
- if (existsNerr) false
- else {
- val existsDiff = failureStates.exists({
- case CompFailedButWrongDiff =>
- nextTestActionFailing(s"output differs")
- true
- case _ =>
- false
- })
- if (existsDiff) false
- else nextTestActionFailing("expected compilation failure")
- }
- }
- }
-
- // override to change check file updating to original file, not generated
- override def diffIsOk: Boolean = {
- // always normalize the log first
- normalizeLog()
- val diff = currentDiff
- // if diff is not empty, is update needed?
- val updating: Option[Boolean] = (
- if (diff == "") None
- else Some(suiteRunner.updateCheck)
- )
- pushTranscript(s"diff $logFile $checkFile")
- nextTestAction(updating) {
- case Some(true) =>
- val origCheck = SFile(checkFile.changeExtension("checksrc").fileLines(1))
- NestUI.echo("Updating original checkfile " + origCheck)
- origCheck writeAll file2String(logFile)
- genUpdated()
- case Some(false) =>
- // Get a word-highlighted diff from git if we can find it
- val bestDiff = if (updating.isEmpty) "" else {
- if (checkFile.canRead)
- gitDiff(logFile, checkFile) getOrElse {
- s"diff $logFile $checkFile\n$diff"
- }
- else diff
- }
- pushTranscript(bestDiff)
- genFail("output differs")
- case None => genPass() // redundant default case
- } getOrElse true
- }
-
- // override to add dotty and scala jars to classpath
- override def extraClasspath =
- suiteRunner.fileManager.asInstanceOf[DottyFileManager].extraJarList ::: super.extraClasspath
-
-
- // FIXME: Dotty deviation: error if return type is omitted:
- // overriding method cleanup in class Runner of type ()Unit;
- // method cleanup of type => Boolean | Unit has incompatible type
-
- // override to keep class files if failed and delete clog if ok
- override def cleanup: Unit = if (lastState.isOk) {
- logFile.delete
- cLogFile.delete
- Directory(outDir).deleteRecursively
- }
-}
diff --git a/compiler/test/dotty/partest/DPDirectCompiler.scala b/compiler/test/dotty/partest/DPDirectCompiler.scala
deleted file mode 100644
index 410dac338..000000000
--- a/compiler/test/dotty/partest/DPDirectCompiler.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package dotty.partest
-
-import dotty.tools.dotc.reporting.ConsoleReporter
-import scala.tools.partest.{ TestState, nest }
-import java.io.{ File, PrintWriter, FileWriter }
-
-
-/* NOTE: Adapted from partest.DirectCompiler */
-class DPDirectCompiler(runner: DPTestRunner) extends nest.DirectCompiler(runner) {
-
- override def compile(opts0: List[String], sources: List[File]): TestState = {
- val clogFWriter = new FileWriter(runner.cLogFile.jfile, true)
- val clogWriter = new PrintWriter(clogFWriter, true)
- clogWriter.println("\ncompiling " + sources.mkString(" ") + "\noptions: " + opts0.mkString(" "))
-
- try {
- val processor =
- if (opts0.exists(_.startsWith("#"))) dotty.tools.dotc.Bench else dotty.tools.dotc.Main
- val clogger = new ConsoleReporter(writer = clogWriter)
- val reporter = processor.process((sources.map(_.toString) ::: opts0).toArray, clogger)
- if (!reporter.hasErrors) runner.genPass()
- else {
- clogWriter.println(reporter.summary)
- runner.genFail(s"compilation failed with ${reporter.errorCount} errors")
- }
- } catch {
- case t: Throwable =>
- t.printStackTrace
- t.printStackTrace(clogWriter)
- runner.genCrash(t)
- } finally {
- clogFWriter.close
- clogWriter.close
- }
- }
-}
diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala
index 742b93fae..ff50d7238 100644
--- a/compiler/test/dotty/tools/dotc/CompilationTests.scala
+++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala
@@ -2,19 +2,24 @@ package dotty
package tools
package dotc
-import org.junit.Test
-import java.io.{ File => JFile }
-import org.junit.experimental.categories.Category
+import org.junit.{ Test, BeforeClass, AfterClass }
import scala.util.matching.Regex
+import scala.concurrent.duration._
-@Category(Array(classOf[ParallelTesting]))
-class CompilationTests extends ParallelSummaryReport with ParallelTesting {
+import vulpix.{ ParallelTesting, SummaryReport, SummaryReporting, TestConfiguration }
+
+class CompilationTests extends ParallelTesting {
+ import TestConfiguration._
import CompilationTests._
- def isInteractive: Boolean = ParallelSummaryReport.isInteractive
+ // Test suite configuration --------------------------------------------------
- def testFilter: Option[Regex] = sys.props.get("dotty.partest.filter").map(r => new Regex(r))
+ def maxDuration = 180.seconds
+ def numberOfSlaves = 5
+ def safeMode = Properties.testsSafeMode
+ def isInteractive = SummaryReport.isInteractive
+ def testFilter = Properties.testsFilter
// Positive tests ------------------------------------------------------------
@@ -211,9 +216,9 @@ class CompilationTests extends ParallelSummaryReport with ParallelTesting {
val opt = Array(
"-classpath",
// compile with bootstrapped library on cp:
- defaultOutputDir + "lib$1/src/:" +
+ defaultOutputDir + "lib/src/:" +
// as well as bootstrapped compiler:
- defaultOutputDir + "dotty1$1/dotty/:" +
+ defaultOutputDir + "dotty1/dotty/:" +
Jars.dottyInterfaces
)
@@ -248,65 +253,6 @@ class CompilationTests extends ParallelSummaryReport with ParallelTesting {
}
object CompilationTests {
- implicit val defaultOutputDir: String = "../out/"
-
- implicit class RichStringArray(val xs: Array[String]) extends AnyVal {
- def and(args: String*): Array[String] = {
- val argsArr: Array[String] = args.toArray
- xs ++ argsArr
- }
- }
-
- val noCheckOptions = Array(
- "-pagewidth", "120",
- "-color:never"
- )
-
- val checkOptions = Array(
- "-Yno-deep-subtypes",
- "-Yno-double-bindings",
- "-Yforce-sbt-phases"
- )
-
- val classPath = {
- val paths = Jars.dottyTestDeps map { p =>
- val file = new JFile(p)
- assert(
- file.exists,
- s"""|File "$p" couldn't be found. Run `packageAll` from build tool before
- |testing.
- |
- |If running without sbt, test paths need to be setup environment variables:
- |
- | - DOTTY_LIBRARY
- | - DOTTY_COMPILER
- | - DOTTY_INTERFACES
- | - DOTTY_EXTRAS
- |
- |Where these all contain locations, except extras which is a colon
- |separated list of jars.
- |
- |When compiling with eclipse, you need the sbt-interfaces jar, put
- |it in extras."""
- )
- file.getAbsolutePath
- } mkString (":")
-
- Array("-classpath", paths)
- }
-
- private val yCheckOptions = Array("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef")
-
- val defaultOptions = noCheckOptions ++ checkOptions ++ yCheckOptions ++ classPath
- val allowDeepSubtypes = defaultOptions diff Array("-Yno-deep-subtypes")
- val allowDoubleBindings = defaultOptions diff Array("-Yno-double-bindings")
- val picklingOptions = defaultOptions ++ Array(
- "-Xprint-types",
- "-Ytest-pickler",
- "-Ystop-after:pickler",
- "-Yprintpos"
- )
- val scala2Mode = defaultOptions ++ Array("-language:Scala2")
- val explicitUTF8 = defaultOptions ++ Array("-encoding", "UTF8")
- val explicitUTF16 = defaultOptions ++ Array("-encoding", "UTF16")
+ implicit val summaryReport: SummaryReporting = new SummaryReport
+ @AfterClass def cleanup(): Unit = summaryReport.echoSummary()
}
diff --git a/compiler/test/dotty/tools/dotc/CompilerTest.scala b/compiler/test/dotty/tools/dotc/CompilerTest.scala
index f35f9f919..c5234ccca 100644
--- a/compiler/test/dotty/tools/dotc/CompilerTest.scala
+++ b/compiler/test/dotty/tools/dotc/CompilerTest.scala
@@ -2,7 +2,6 @@ package dotty.tools.dotc
import repl.TestREPL
import core.Contexts._
-import dotty.partest.DPConfig
import interfaces.Diagnostic.ERROR
import reporting._
import diagnostic.MessageContainer
@@ -11,33 +10,11 @@ import config.CompilerCommand
import dotty.tools.io.PlainFile
import scala.collection.mutable.ListBuffer
import scala.reflect.io.{ Path, Directory, File => SFile, AbstractFile }
-import scala.tools.partest.nest.{ FileManager, NestUI }
import scala.annotation.tailrec
import java.io.{ RandomAccessFile, File => JFile }
-/** This class has two modes: it can directly run compiler tests, or it can
- * generate the necessary file structure for partest in the directory
- * DPConfig.testRoot. Both modes are regular JUnit tests. Which mode is used
- * depends on the existence of the tests/locks/partest-ppid.lock file which is
- * created by sbt to trigger partest generation. Sbt will then run partest on
- * the generated sources.
- *
- * Through overriding the partestableXX methods, tests can always be run as
- * JUnit compiler tests. Run tests cannot be run by JUnit, only by partest.
- *
- * A test can either be a file or a directory. Partest will generate a
- * <test>-<kind>.log file with output of failed tests. Partest reads compiler
- * flags and the number of errors expected from a neg test from <test>.flags
- * and <test>.nerr files (also generated). The test is in a parent directory
- * that determines the kind of test:
- * - pos: checks that compilation succeeds
- * - neg: checks that compilation fails with the given number of errors
- * - run: compilation succeeds, partest: test run generates the output in
- * <test>.check. Run tests always need to be:
- * object Test { def main(args: Array[String]): Unit = ... }
- * Classpath jars can be added to partestDeps in the sbt Build.scala.
- */
+/** Legacy compiler tests that run single threaded */
abstract class CompilerTest {
/** Override with output dir of test so it can be patched. Partest expects
@@ -49,32 +26,9 @@ abstract class CompilerTest {
def partestableDir(prefix: String, dirName: String, args: List[String]) = true
def partestableList(testName: String, files: List[String], args: List[String]) = true
- val generatePartestFiles = {
- /* Because we fork in test, the JVM in which this JUnit test runs has a
- * different pid from the one that started the partest. But the forked VM
- * receives the pid of the parent as system property. If the lock file
- * exists, the parent is requesting partest generation. This mechanism
- * allows one sbt instance to run test (JUnit only) and another partest.
- * We cannot run two instances of partest at the same time, because they're
- * writing to the same directories. The sbt lock file generation prevents
- * this.
- */
- val pid = System.getProperty("partestParentID")
- if (pid == null)
- false
- else
- new JFile(".." + JFile.separator + "tests" + JFile.separator + "locks" + JFile.separator + s"partest-$pid.lock").exists
- }
-
- // Delete generated files from previous run and create new log
- val logFile = if (!generatePartestFiles) None else Some(CompilerTest.init)
-
/** Always run with JUnit. */
- def compileLine(cmdLine: String)(implicit defaultOptions: List[String]): Unit = {
- if (generatePartestFiles)
- log("WARNING: compileLine will always run with JUnit, no partest files generated.")
+ def compileLine(cmdLine: String)(implicit defaultOptions: List[String]): Unit =
compileArgs(cmdLine.split("\n"), Nil)
- }
/** Compiles the given code file.
*
@@ -88,36 +42,22 @@ abstract class CompilerTest {
(implicit defaultOptions: List[String]): Unit = {
val filePath = s"$prefix$fileName$extension"
val expErrors = expectedErrors(filePath)
- if (!generatePartestFiles || !partestableFile(prefix, fileName, extension, args ++ defaultOptions)) {
- if (runTest)
- log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$fileName$extension")
- if (args.contains("-rewrite")) {
- val file = new PlainFile(filePath)
- val data = file.toByteArray
- // compile with rewrite
- compileArgs((filePath :: args).toArray, expErrors)
- // compile again, check that file now compiles without -language:Scala2
- val plainArgs = args.filter(arg => arg != "-rewrite" && arg != "-language:Scala2")
- compileFile(prefix, fileName, plainArgs, extension, runTest)
- // restore original test file
- val out = file.output
- out.write(data)
- out.close()
- }
- else compileArgs((filePath :: args).toArray, expErrors)
- } else {
- val kind = testKind(prefix, runTest)
- log(s"generating partest files for test file: $prefix$fileName$extension of kind $kind")
-
- val sourceFile = new JFile(prefix + fileName + extension)
- if (sourceFile.exists) {
- val firstDest = SFile(DPConfig.testRoot + JFile.separator + kind + JFile.separator + fileName + extension)
- val xerrors = expErrors.map(_.totalErrors).sum
- computeDestAndCopyFiles(sourceFile, firstDest, kind, args ++ defaultOptions, xerrors.toString)
- } else {
- throw new java.io.FileNotFoundException(s"Unable to locate test file $prefix$fileName")
- }
+ if (runTest)
+ log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$fileName$extension")
+ if (args.contains("-rewrite")) {
+ val file = new PlainFile(filePath)
+ val data = file.toByteArray
+ // compile with rewrite
+ compileArgs((filePath :: args).toArray, expErrors)
+ // compile again, check that file now compiles without -language:Scala2
+ val plainArgs = args.filter(arg => arg != "-rewrite" && arg != "-language:Scala2")
+ compileFile(prefix, fileName, plainArgs, extension, runTest)
+ // restore original test file
+ val out = file.output
+ out.write(data)
+ out.close()
}
+ else compileArgs((filePath :: args).toArray, expErrors)
}
def runFile(prefix: String, fileName: String, args: List[String] = Nil, extension: String = ".scala")
(implicit defaultOptions: List[String]): Unit = {
@@ -167,33 +107,11 @@ abstract class CompilerTest {
val expErrors = expectedErrors(filePaths.toList)
(filePaths, javaFilePaths, normArgs, expErrors)
}
- if (!generatePartestFiles || !partestableDir(prefix, dirName, args ++ defaultOptions)) {
- if (runTest)
- log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$dirName")
- val (filePaths, javaFilePaths, normArgs, expErrors) = computeFilePathsAndExpErrors
- compileWithJavac(javaFilePaths, Array.empty) // javac needs to run first on dotty-library
- compileArgs(javaFilePaths ++ filePaths ++ normArgs, expErrors)
- } else {
- val (sourceDir, flags, deep) = args match {
- case "-deep" :: args1 => (flattenDir(prefix, dirName), args1 ++ defaultOptions, "deep")
- case _ => (new JFile(prefix + dirName), args ++ defaultOptions, "shallow")
- }
- val kind = testKind(prefix, runTest)
- log(s"generating partest files for test directory ($deep): $prefix$dirName of kind $kind")
-
- if (sourceDir.exists) {
- val firstDest = Directory(DPConfig.testRoot + JFile.separator + kind + JFile.separator + dirName)
- val xerrors = if (isNegTest(prefix)) {
- val (_, _, _, expErrors) = computeFilePathsAndExpErrors
- expErrors.map(_.totalErrors).sum
- } else 0
- computeDestAndCopyFiles(sourceDir, firstDest, kind, flags, xerrors.toString)
- if (deep == "deep")
- Directory(sourceDir).deleteRecursively
- } else {
- throw new java.io.FileNotFoundException(s"Unable to locate test dir $prefix$dirName")
- }
- }
+ if (runTest)
+ log(s"WARNING: run tests can only be run by partest, JUnit just verifies compilation: $prefix$dirName")
+ val (filePaths, javaFilePaths, normArgs, expErrors) = computeFilePathsAndExpErrors
+ compileWithJavac(javaFilePaths, Array.empty) // javac needs to run first on dotty-library
+ compileArgs(javaFilePaths ++ filePaths ++ normArgs, expErrors)
}
def runDir(prefix: String, dirName: String, args: List[String] = Nil)
(implicit defaultOptions: List[String]): Unit =
@@ -222,19 +140,8 @@ abstract class CompilerTest {
/** Compiles the given list of code files. */
def compileList(testName: String, files: List[String], args: List[String] = Nil)
(implicit defaultOptions: List[String]): Unit = {
- if (!generatePartestFiles || !partestableList(testName, files, args ++ defaultOptions)) {
- val expErrors = expectedErrors(files)
- compileArgs((files ++ args).toArray, expErrors)
- } else {
- val destDir = Directory(DPConfig.testRoot + JFile.separator + testName)
- files.foreach({ file =>
- val sourceFile = new JFile(file)
- val destFile = destDir / (if (file.startsWith("../")) file.substring(3) else file)
- recCopyFiles(sourceFile, destFile)
- })
- compileDir(DPConfig.testRoot + JFile.separator, testName, args)
- destDir.deleteRecursively
- }
+ val expErrors = expectedErrors(files)
+ compileArgs((files ++ args).toArray, expErrors)
}
// ========== HELPERS =============
@@ -425,60 +332,6 @@ abstract class CompilerTest {
}
import Difference._
- /** The same source might be used for several partest test cases (e.g. with
- * different flags). Detects existing versions and computes the path to be
- * used for this version, e.g. testname_v1 for the first alternative. */
- private def computeDestAndCopyFiles(source: JFile, dest: Path, kind: String, oldFlags: List[String], nerr: String,
- nr: Int = 0, oldOutput: String = defaultOutputDir): Unit = {
-
- val partestOutput = dest.jfile.getParentFile + JFile.separator + dest.stripExtension + "-" + kind + ".obj"
-
- val altOutput =
- source.getParentFile.getAbsolutePath.map(x => if (x == JFile.separatorChar) '_' else x)
-
- val (beforeCp, remaining) = oldFlags
- .map(f => if (f == oldOutput) partestOutput else f)
- .span(_ != "-classpath")
- val flags = beforeCp ++ List("-classpath", (partestOutput :: remaining.drop(1)).mkString(":"))
-
- val difference = getExisting(dest).isDifferent(source, flags, nerr)
- difference match {
- case NotExists => copyFiles(source, dest, partestOutput, flags, nerr, kind)
- case ExistsSame => // nothing else to do
- case ExistsDifferent =>
- val nextDest = dest.parent / (dest match {
- case d: Directory =>
- val newVersion = replaceVersion(d.name, nr).getOrElse(altOutput)
- Directory(newVersion)
- case f =>
- val newVersion = replaceVersion(f.stripExtension, nr).getOrElse(altOutput)
- SFile(newVersion).addExtension(f.extension)
- })
- computeDestAndCopyFiles(source, nextDest, kind, flags, nerr, nr + 1, partestOutput)
- }
- }
-
- /** Copies the test sources. Creates flags, nerr, check and output files. */
- private def copyFiles(sourceFile: Path, dest: Path, partestOutput: String, flags: List[String], nerr: String, kind: String) = {
- recCopyFiles(sourceFile, dest)
-
- new JFile(partestOutput).mkdirs
-
- if (flags.nonEmpty)
- dest.changeExtension("flags").createFile(true).writeAll(flags.mkString(" "))
- if (nerr != "0")
- dest.changeExtension("nerr").createFile(true).writeAll(nerr)
- sourceFile.changeExtension("check").ifFile({ check =>
- if (kind == "run") {
- FileManager.copyFile(check.jfile, dest.changeExtension("check").jfile)
- dest.changeExtension("checksrc").createFile(true).writeAll("check file generated from source:\n" + check.toString)
- } else {
- log(s"WARNING: ignoring $check for test kind $kind")
- }
- })
-
- }
-
/** Recursively copy over source files and directories, excluding extensions
* that aren't in extensionsToCopy. */
private def recCopyFiles(sourceFile: Path, dest: Path): Unit = {
@@ -576,38 +429,6 @@ abstract class CompilerTest {
}
}
- /** Creates a temporary directory and copies all (deep) files over, thus
- * flattening the directory structure. */
- private def flattenDir(prefix: String, dirName: String): JFile = {
- val destDir = Directory(DPConfig.testRoot + JFile.separator + "_temp")
- Directory(prefix + dirName).deepFiles.foreach(source => recCopyFiles(source, destDir / source.name))
- destDir.jfile
- }
-
- /** Write either to console (JUnit) or log file (partest). */
- private def log(msg: String) = logFile.map(_.appendAll(msg + "\n")).getOrElse(println(msg))
-}
-
-object CompilerTest extends App {
-
- /** Deletes generated partest sources from a previous run, recreates
- * directory and returns the freshly created log file. */
- lazy val init: SFile = {
- scala.reflect.io.Directory(DPConfig.testRoot).deleteRecursively
- new JFile(DPConfig.testRoot).mkdirs
- val log = DPConfig.genLog.createFile(true)
- println(s"CompilerTest is generating tests for partest, log: $log")
- log
- }
-
-// val dotcDir = "/Users/odersky/workspace/dotty/src/dotty/"
-
-// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "CompilationUnit")
-// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Compiler")
-// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Driver")
-// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Main")
-// new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Run")
-
-// new CompilerTest().compileDir(dotcDir + "tools/dotc")
- // new CompilerTest().compileFile(dotcDir + "tools/dotc/", "Run")
+ /** Write either to console */
+ private def log(msg: String) = println(msg)
}
diff --git a/compiler/test/dotty/tools/dotc/ParallelSummaryReport.java b/compiler/test/dotty/tools/dotc/ParallelSummaryReport.java
deleted file mode 100644
index 5608b3656..000000000
--- a/compiler/test/dotty/tools/dotc/ParallelSummaryReport.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package dotty.tools.dotc;
-
-import org.junit.BeforeClass;
-import org.junit.AfterClass;
-import java.util.ArrayDeque;
-
-import dotty.tools.dotc.reporting.TestReporter;
-import dotty.tools.dotc.reporting.TestReporter$;
-
-/** Note that while `ParallelTesting` runs in parallel, JUnit tests cannot with
- * this class
- */
-public class ParallelSummaryReport {
- public final static boolean isInteractive = !System.getenv().containsKey("DRONE");
-
- private static TestReporter rep = TestReporter.reporter(System.out, -1);
- private static ArrayDeque<String> failedTests = new ArrayDeque<>();
- private static ArrayDeque<String> reproduceInstructions = new ArrayDeque<>();
- private static int passed;
- private static int failed;
-
- public final static void reportFailed() {
- failed++;
- }
-
- public final static void reportPassed() {
- passed++;
- }
-
- public final static void addFailedTest(String msg) {
- failedTests.offer(msg);
- }
-
- public final static void addReproduceInstruction(String msg) {
- reproduceInstructions.offer(msg);
- }
-
- @BeforeClass public final static void setup() {
- rep = TestReporter.reporter(System.out, -1);
- failedTests = new ArrayDeque<>();
- reproduceInstructions = new ArrayDeque<>();
- }
-
- @AfterClass public final static void teardown() {
- rep.echo(
- "\n================================================================================" +
- "\nTest Report" +
- "\n================================================================================" +
- "\n" +
- passed + " passed, " + failed + " failed, " + (passed + failed) + " total" +
- "\n"
- );
-
- failedTests
- .stream()
- .map(x -> " " + x)
- .forEach(rep::echo);
-
- // If we're compiling locally, we don't need reproduce instructions
- if (isInteractive) rep.flushToStdErr();
-
- rep.echo("");
-
- reproduceInstructions
- .stream()
- .forEach(rep::echo);
-
- // If we're on the CI, we want everything
- if (!isInteractive) rep.flushToStdErr();
-
- if (failed > 0) rep.flushToFile();
- }
-}
diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
index 5641240a7..213181b56 100644
--- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
+++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
@@ -23,6 +23,10 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M
final def errors: Iterator[MessageContainer] = _errorBuf.iterator
protected final val _messageBuf = mutable.ArrayBuffer.empty[String]
+ final def messages: Iterator[String] = _messageBuf.iterator
+
+ private[this] var _didCrash = false
+ final def compilerCrashed: Boolean = _didCrash
final def flushToFile(): Unit =
_messageBuf
@@ -33,7 +37,6 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M
final def flushToStdErr(): Unit =
_messageBuf
.iterator
- .map(_.replaceAll("\u001b\\[.*?m", ""))
.foreach(System.err.println)
final def inlineInfo(pos: SourcePosition): String =
@@ -44,9 +47,17 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M
}
else ""
- def echo(msg: String) =
+ def log(msg: String) =
_messageBuf.append(msg)
+ def logStackTrace(thrown: Throwable): Unit = {
+ _didCrash = true
+ val sw = new java.io.StringWriter
+ val pw = new java.io.PrintWriter(sw)
+ thrown.printStackTrace(pw)
+ log(sw.toString)
+ }
+
/** Prints the message with the given position indication. */
def printMessageAndPos(m: MessageContainer, extra: String)(implicit ctx: Context): Unit = {
val msg = messageAndPos(m.contained, m.pos, diagnosticLevel(m))
@@ -73,42 +84,66 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M
_errorBuf.append(m)
printMessageAndPos(m, extra)
}
- case w: Warning =>
- printMessageAndPos(w, extra)
- case _ =>
+ case m =>
+ printMessageAndPos(m, extra)
}
}
}
object TestReporter {
- private[this] lazy val logWriter = {
+ private[this] var outFile: JFile = _
+ private[this] var logWriter: PrintWriter = _
+
+ private[this] def initLog() = if (logWriter eq null) {
val df = new SimpleDateFormat("yyyy-MM-dd-HH:mm")
val timestamp = df.format(new Date)
new JFile("../testlogs").mkdirs()
- new PrintWriter(new FileOutputStream(new JFile(s"../testlogs/tests-$timestamp.log"), true))
+ outFile = new JFile(s"../testlogs/tests-$timestamp.log")
+ logWriter = new PrintWriter(new FileOutputStream(outFile, true))
}
- def writeToLog(str: String) = {
+ def logPrintln(str: String) = {
+ initLog()
logWriter.println(str)
logWriter.flush()
}
+ def logPrint(str: String): Unit = {
+ initLog()
+ logWriter.println(str)
+ }
+
+ def logFlush(): Unit =
+ if (logWriter ne null) logWriter.flush()
+
+ def logPath: String = {
+ initLog()
+ outFile.getCanonicalPath
+ }
+
def reporter(ps: PrintStream, logLevel: Int): TestReporter =
- new TestReporter(new PrintWriter(ps, true), writeToLog, logLevel)
+ new TestReporter(new PrintWriter(ps, true), logPrintln, logLevel)
def simplifiedReporter(writer: PrintWriter): TestReporter = {
- val rep = new TestReporter(writer, writeToLog, WARNING) {
+ val rep = new TestReporter(writer, logPrintln, WARNING) {
/** Prints the message with the given position indication in a simplified manner */
override def printMessageAndPos(m: MessageContainer, extra: String)(implicit ctx: Context): Unit = {
- val msg = s"${m.pos.line + 1}: " + m.contained.kind + extra
- val extraInfo = inlineInfo(m.pos)
+ def report() = {
+ val msg = s"${m.pos.line + 1}: " + m.contained.kind + extra
+ val extraInfo = inlineInfo(m.pos)
- writer.println(msg)
- _messageBuf.append(msg)
+ writer.println(msg)
+ _messageBuf.append(msg)
- if (extraInfo.nonEmpty) {
- writer.println(extraInfo)
- _messageBuf.append(extraInfo)
+ if (extraInfo.nonEmpty) {
+ writer.println(extraInfo)
+ _messageBuf.append(extraInfo)
+ }
+ }
+ m match {
+ case m: Error => report()
+ case m: Warning => report()
+ case _ => ()
}
}
}
diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
index eff86e6e7..1ec4a70a5 100644
--- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
+++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
@@ -9,11 +9,12 @@ import scala.io.Source._
import scala.reflect.io.Directory
import org.junit.Test
import reporting.TestReporter
+import vulpix.TestConfiguration
class PatmatExhaustivityTest {
val testsDir = "../tests/patmat"
// stop-after: patmatexhaust-huge.scala crash compiler
- val options = List("-color:never", "-Ystop-after:splitter", "-Ycheck-all-patmat") ++ CompilationTests.classPath
+ val options = List("-color:never", "-Ystop-after:splitter", "-Ycheck-all-patmat") ++ TestConfiguration.classPath
private def compileFile(file: File) = {
val stringBuffer = new StringWriter()
diff --git a/compiler/test/dotty/tools/vulpix/ChildJVMMain.java b/compiler/test/dotty/tools/vulpix/ChildJVMMain.java
new file mode 100644
index 000000000..90b795898
--- /dev/null
+++ b/compiler/test/dotty/tools/vulpix/ChildJVMMain.java
@@ -0,0 +1,34 @@
+package dotty.tools.vulpix;
+
+import java.io.File;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.lang.reflect.Method;
+
+public class ChildJVMMain {
+ static final String MessageEnd = "##THIS IS THE END FOR ME, GOODBYE##";
+
+ private static void runMain(String dir) throws Exception {
+ ArrayList<URL> cp = new ArrayList<>();
+ for (String path : dir.split(":"))
+ cp.add(new File(path).toURI().toURL());
+
+ URLClassLoader ucl = new URLClassLoader(cp.toArray(new URL[cp.size()]));
+ Class<?> cls = ucl.loadClass("Test");
+ Method meth = cls.getMethod("main", String[].class);
+ Object[] args = new Object[]{ new String[]{ "jvm" } };
+ meth.invoke(null, args);
+ }
+
+ public static void main(String[] args) throws Exception {
+ BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
+
+ while (true) {
+ runMain(stdin.readLine());
+ System.out.println(MessageEnd);
+ }
+ }
+}
diff --git a/compiler/test/dotty/tools/dotc/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
index 80c56808b..b0312523d 100644
--- a/compiler/test/dotty/tools/dotc/ParallelTesting.scala
+++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
@@ -1,11 +1,10 @@
package dotty
package tools
-package dotc
+package vulpix
import java.io.{ File => JFile }
import java.text.SimpleDateFormat
import java.util.HashMap
-import java.lang.reflect.InvocationTargetException
import java.nio.file.StandardCopyOption.REPLACE_EXISTING
import java.nio.file.{ Files, Path, Paths, NoSuchFileException }
import java.util.concurrent.{ Executors => JExecutors, TimeUnit, TimeoutException }
@@ -17,11 +16,12 @@ import scala.collection.mutable
import scala.util.matching.Regex
import scala.util.Random
-import core.Contexts._
-import reporting.{ Reporter, TestReporter }
-import reporting.diagnostic.MessageContainer
-import interfaces.Diagnostic.ERROR
+import dotc.core.Contexts._
+import dotc.reporting.{ Reporter, TestReporter }
+import dotc.reporting.diagnostic.MessageContainer
+import dotc.interfaces.Diagnostic.ERROR
import dotc.util.DiffUtil
+import dotc.{ Driver, Compiler }
/** A parallel testing suite whose goal is to integrate nicely with JUnit
*
@@ -29,20 +29,20 @@ import dotc.util.DiffUtil
* using this, you should be running your JUnit tests **sequentially**, as the
* test suite itself runs with a high level of concurrency.
*/
-trait ParallelTesting { self =>
+trait ParallelTesting extends RunnerOrchestration { self =>
import ParallelTesting._
- import ParallelSummaryReport._
/** If the running environment supports an interactive terminal, each `Test`
* will be run with a progress bar and real time feedback
*/
def isInteractive: Boolean
- /** A regex which is used to filter which tests to run, if `None` will run
- * all tests
+ /** A string which is used to filter which tests to run, if `None` will run
+ * all tests. All absolute paths that contain the substring `testFilter`
+ * will be run
*/
- def testFilter: Option[Regex]
+ def testFilter: Option[String]
/** A test source whose files or directory of files is to be compiled
* in a specific way defined by the `Test`
@@ -52,6 +52,15 @@ trait ParallelTesting { self =>
def outDir: JFile
def flags: Array[String]
+ def classPath: String =
+ outDir.getAbsolutePath +
+ flags
+ .dropWhile(_ != "-classpath")
+ .drop(1)
+ .headOption
+ .map(":" + _)
+ .getOrElse("")
+
def title: String = self match {
case self: JointCompilationSource =>
@@ -174,21 +183,48 @@ trait ParallelTesting { self =>
/** Each `Test` takes the `testSources` and performs the compilation and assertions
* according to the implementing class "neg", "run" or "pos".
*/
- private abstract class Test(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean) {
+ private abstract class Test(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit val summaryReport: SummaryReporting) { test =>
+
+ import summaryReport._
+
protected final val realStdout = System.out
protected final val realStderr = System.err
+ /** A runnable that logs its contents in a buffer */
+ trait LoggedRunnable extends Runnable {
+ /** Instances of `LoggedRunnable` implement this method instead of the
+ * `run` method
+ */
+ def checkTestSource(): Unit
+
+ private[this] val logBuffer = mutable.ArrayBuffer.empty[String]
+ def log(msg: String): Unit = logBuffer.append(msg)
+
+ def logReporterContents(reporter: TestReporter): Unit =
+ reporter.messages.foreach(log)
+
+ def echo(msg: String): Unit = {
+ log(msg)
+ test.echo(msg)
+ }
+
+ final def run(): Unit = {
+ checkTestSource()
+ summaryReport.echoToLog(logBuffer.iterator)
+ }
+ }
+
/** Actual compilation run logic, the test behaviour is defined here */
- protected def compilationRunnable(testSource: TestSource): Runnable
+ protected def encapsulatedCompilation(testSource: TestSource): LoggedRunnable
/** All testSources left after filtering out */
private val filteredSources =
if (!testFilter.isDefined) testSources
else testSources.filter {
case JointCompilationSource(_, files, _, _) =>
- files.exists(file => testFilter.get.findFirstIn(file.getAbsolutePath).isDefined)
+ files.exists(file => file.getAbsolutePath.contains(testFilter.get))
case SeparateCompilationSource(_, dir, _, _) =>
- testFilter.get.findFirstIn(dir.getAbsolutePath).isDefined
+ dir.getAbsolutePath.contains(testFilter.get)
}
/** Total amount of test sources being compiled by this test */
@@ -197,12 +233,12 @@ trait ParallelTesting { self =>
private[this] var _errorCount = 0
def errorCount: Int = _errorCount
- private[this] var _testSourcesCompiled = 0
- private def testSourcesCompiled: Int = _testSourcesCompiled
+ private[this] var _testSourcesCompleted = 0
+ private def testSourcesCompleted: Int = _testSourcesCompleted
/** Complete the current compilation with the amount of errors encountered */
- protected final def registerCompilation(errors: Int) = synchronized {
- _testSourcesCompiled += 1
+ protected final def registerCompletion(errors: Int) = synchronized {
+ _testSourcesCompleted += 1
_errorCount += errors
}
@@ -211,7 +247,7 @@ trait ParallelTesting { self =>
protected[this] final def fail(): Unit = synchronized { _failed = true }
def didFail: Boolean = _failed
- protected def echoBuildInstructions(reporter: TestReporter, testSource: TestSource, err: Int, war: Int) = {
+ protected def logBuildInstructions(reporter: TestReporter, testSource: TestSource, err: Int, war: Int) = {
val errorMsg = testSource.buildInstructions(reporter.errorCount, reporter.warningCount)
addFailureInstruction(errorMsg)
failTestSource(testSource)
@@ -224,20 +260,24 @@ trait ParallelTesting { self =>
/** The test sources that failed according to the implementing subclass */
private[this] val failedTestSources = mutable.ArrayBuffer.empty[String]
- protected final def failTestSource(testSource: TestSource) = synchronized {
- failedTestSources.append(testSource.name + " failed")
+ protected final def failTestSource(testSource: TestSource, reason: Option[String] = None) = synchronized {
+ val extra = reason.map(" with reason: " + _).getOrElse("")
+ failedTestSources.append(testSource.title + s" failed" + extra)
fail()
}
/** Prints to `System.err` if we're not suppressing all output */
- protected def echo(msg: String): Unit =
- if (!suppressAllOutput) realStderr.println(msg)
+ protected def echo(msg: String): Unit = if (!suppressAllOutput) {
+ // pad right so that output is at least as large as progress bar line
+ val paddingRight = " " * math.max(0, 80 - msg.length)
+ realStderr.println(msg + paddingRight)
+ }
/** A single `Runnable` that prints a progress bar for the curent `Test` */
private def createProgressMonitor: Runnable = new Runnable {
def run(): Unit = {
val start = System.currentTimeMillis
- var tCompiled = testSourcesCompiled
+ var tCompiled = testSourcesCompleted
while (tCompiled < sourceCount) {
val timestamp = (System.currentTimeMillis - start) / 1000
val progress = (tCompiled.toDouble / sourceCount * 40).toInt
@@ -246,15 +286,15 @@ trait ParallelTesting { self =>
"[" + ("=" * (math.max(progress - 1, 0))) +
(if (progress > 0) ">" else "") +
(" " * (39 - progress)) +
- s"] compiling ($tCompiled/$sourceCount, ${timestamp}s)\r"
+ s"] completed ($tCompiled/$sourceCount, ${timestamp}s)\r"
)
Thread.sleep(100)
- tCompiled = testSourcesCompiled
+ tCompiled = testSourcesCompleted
}
// println, otherwise no newline and cursor at start of line
realStdout.println(
- s"[=======================================] compiled ($sourceCount/$sourceCount, " +
+ s"[=======================================] completed ($sourceCount/$sourceCount, " +
s"${(System.currentTimeMillis - start) / 1000}s) "
)
}
@@ -265,7 +305,9 @@ trait ParallelTesting { self =>
*/
protected def tryCompile(testSource: TestSource)(op: => Unit): Unit =
try {
- if (!isInteractive) realStdout.println(s"Testing ${testSource.title}")
+ val testing = s"Testing ${testSource.title}"
+ summaryReport.echoToLog(testing)
+ if (!isInteractive) realStdout.println(testing)
op
} catch {
case NonFatal(e) => {
@@ -273,7 +315,7 @@ trait ParallelTesting { self =>
// run should fail
failTestSource(testSource)
e.printStackTrace()
- registerCompilation(1)
+ registerCompletion(1)
throw e
}
}
@@ -288,15 +330,6 @@ trait ParallelTesting { self =>
val files: Array[JFile] = files0.flatMap(flattenFiles)
- def findJarFromRuntime(partialName: String) = {
- val urls = ClassLoader.getSystemClassLoader.asInstanceOf[java.net.URLClassLoader].getURLs.map(_.getFile.toString)
- urls.find(_.contains(partialName)).getOrElse {
- throw new java.io.FileNotFoundException(
- s"""Unable to locate $partialName on classpath:\n${urls.toList.mkString("\n")}"""
- )
- }
- }
-
def addOutDir(xs: Array[String]): Array[String] = {
val (beforeCp, cpAndAfter) = xs.toList.span(_ != "-classpath")
if (cpAndAfter.nonEmpty) {
@@ -307,11 +340,10 @@ trait ParallelTesting { self =>
}
def compileWithJavac(fs: Array[String]) = if (fs.nonEmpty) {
- val scalaLib = findJarFromRuntime("scala-library-2.")
val fullArgs = Array(
"javac",
"-classpath",
- s".:$scalaLib:${targetDir.getAbsolutePath}"
+ s".:${Jars.scalaLibraryFromRuntime}:${targetDir.getAbsolutePath}"
) ++ flags.takeRight(2) ++ fs
Runtime.getRuntime.exec(fullArgs).waitFor() == 0
@@ -339,16 +371,23 @@ trait ParallelTesting { self =>
}
val allArgs = addOutDir(flags)
- driver.process(allArgs ++ files.map(_.getAbsolutePath), reporter = reporter)
- val javaFiles = files.filter(_.getName.endsWith(".java")).map(_.getAbsolutePath)
- assert(compileWithJavac(javaFiles), s"java compilation failed for ${javaFiles.mkString(", ")}")
+ // Compile with a try to catch any StackTrace generated by the compiler:
+ try {
+ driver.process(allArgs ++ files.map(_.getAbsolutePath), reporter = reporter)
+
+ val javaFiles = files.filter(_.getName.endsWith(".java")).map(_.getAbsolutePath)
+ assert(compileWithJavac(javaFiles), s"java compilation failed for ${javaFiles.mkString(", ")}")
+ }
+ catch {
+ case NonFatal(ex) => reporter.logStackTrace(ex)
+ }
reporter
}
private[ParallelTesting] def executeTestSuite(): this.type = {
- assert(_testSourcesCompiled == 0, "not allowed to re-use a `CompileRun`")
+ assert(_testSourcesCompleted == 0, "not allowed to re-use a `CompileRun`")
if (filteredSources.nonEmpty) {
val pool = threadLimit match {
@@ -359,7 +398,7 @@ trait ParallelTesting { self =>
if (isInteractive && !suppressAllOutput) pool.submit(createProgressMonitor)
filteredSources.foreach { target =>
- pool.submit(compilationRunnable(target))
+ pool.submit(encapsulatedCompilation(target))
}
pool.shutdown()
@@ -379,7 +418,7 @@ trait ParallelTesting { self =>
}
else echo {
testFilter
- .map(r => s"""No files matched regex "$r" in test""")
+ .map(r => s"""No files matched "$r" in test""")
.getOrElse("No tests available under target - erroneous test?")
}
@@ -387,129 +426,109 @@ trait ParallelTesting { self =>
}
}
- private final class PosTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)
+ private final class PosTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting)
extends Test(testSources, times, threadLimit, suppressAllOutput) {
- protected def compilationRunnable(testSource: TestSource): Runnable = new Runnable {
- def run(): Unit = tryCompile(testSource) {
+ protected def encapsulatedCompilation(testSource: TestSource) = new LoggedRunnable {
+ def checkTestSource(): Unit = tryCompile(testSource) {
testSource match {
case testSource @ JointCompilationSource(_, files, flags, outDir) => {
val reporter = compile(testSource.sourceFiles, flags, false, outDir)
- registerCompilation(reporter.errorCount)
+ registerCompletion(reporter.errorCount)
- if (reporter.errorCount > 0)
- echoBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
+ if (reporter.compilerCrashed || reporter.errorCount > 0) {
+ logReporterContents(reporter)
+ logBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
+ }
}
case testSource @ SeparateCompilationSource(_, dir, flags, outDir) => {
val reporters = testSource.compilationGroups.map(files => compile(files, flags, false, outDir))
+ val compilerCrashed = reporters.exists(_.compilerCrashed)
val errorCount = reporters.foldLeft(0) { (acc, reporter) =>
if (reporter.errorCount > 0)
- echoBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
+ logBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
acc + reporter.errorCount
}
- registerCompilation(errorCount)
+ def warningCount = reporters.foldLeft(0)(_ + _.warningCount)
+
+ registerCompletion(errorCount)
- if (errorCount > 0) failTestSource(testSource)
+ if (compilerCrashed || errorCount > 0) {
+ reporters.foreach(logReporterContents)
+ logBuildInstructions(reporters.head, testSource, errorCount, warningCount)
+ }
}
}
-
}
}
}
- private final class RunTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)
+ private final class RunTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting)
extends Test(testSources, times, threadLimit, suppressAllOutput) {
- private def runMain(dir: JFile, testSource: TestSource): Array[String] = {
- def renderStackTrace(ex: Throwable): String =
- if (ex == null) ""
- else ex.getStackTrace
- .takeWhile(_.getMethodName != "invoke0")
- .mkString(" ", "\n ", "")
-
- import java.io.{ ByteArrayOutputStream, PrintStream }
- import java.net.{ URL, URLClassLoader }
-
- val printStream = new ByteArrayOutputStream
+ private[this] var didAddNoRunWarning = false
+ private[this] def addNoRunWarning() = if (!didAddNoRunWarning) {
+ didAddNoRunWarning = true
+ summaryReport.addStartingMessage {
+ """|WARNING
+ |-------
+ |Run tests were only compiled, not run - this is due to the `dotty.tests.norun`
+ |property being set
+ |""".stripMargin
+ }
+ }
- try {
- // Do classloading magic and running here:
- val ucl = new URLClassLoader(Array(dir.toURI.toURL))
- val cls = ucl.loadClass("Test")
- val meth = cls.getMethod("main", classOf[Array[String]])
-
- synchronized {
- try {
- val ps = new PrintStream(printStream)
- System.setOut(ps)
- System.setErr(ps)
- Console.withOut(printStream) {
- Console.withErr(printStream) {
- meth.invoke(null, Array("jvm")) // partest passes at least "jvm" as an arg
- }
- }
- System.setOut(realStdout)
- System.setErr(realStderr)
- } catch {
- case t: Throwable =>
- System.setOut(realStdout)
- System.setErr(realStderr)
- throw t
+ private def verifyOutput(checkFile: Option[JFile], dir: JFile, testSource: TestSource, warnings: Int) = {
+ if (Properties.testsNoRun) addNoRunWarning()
+ else runMain(testSource.classPath) match {
+ case Success(_) if !checkFile.isDefined || !checkFile.get.exists => // success!
+ case Success(output) => {
+ val outputLines = output.lines.toArray
+ val checkLines: Array[String] = Source.fromFile(checkFile.get).getLines.toArray
+ val sourceTitle = testSource.title
+
+ def linesMatch =
+ outputLines
+ .zip(checkLines)
+ .forall { case (x, y) => x == y }
+
+ if (outputLines.length != checkLines.length || !linesMatch) {
+ // Print diff to files and summary:
+ val diff = outputLines.zip(checkLines).map { case (act, exp) =>
+ DiffUtil.mkColoredLineDiff(exp, act)
+ }.mkString("\n")
+
+ val msg =
+ s"""|Output from '$sourceTitle' did not match check file.
+ |Diff ('e' is expected, 'a' is actual):
+ |""".stripMargin + diff + "\n"
+ echo(msg)
+ addFailureInstruction(msg)
+
+ // Print build instructions to file and summary:
+ val buildInstr = testSource.buildInstructions(0, warnings)
+ addFailureInstruction(buildInstr)
+
+ // Fail target:
+ failTestSource(testSource)
}
}
- }
- catch {
- case ex: NoSuchMethodException =>
- echo(s"test in '$dir' did not contain method: ${ex.getMessage}\n${renderStackTrace(ex.getCause)}")
- failTestSource(testSource)
- case ex: ClassNotFoundException =>
- echo(s"test in '$dir' did not contain class: ${ex.getMessage}\n${renderStackTrace(ex.getCause)}")
+ case Failure(output) =>
+ echo(s"Test '${testSource.title}' failed with output:")
+ echo(output)
failTestSource(testSource)
- case ex: InvocationTargetException =>
- echo(s"An exception ocurred when running main: ${ex.getCause}\n${renderStackTrace(ex.getCause)}")
- failTestSource(testSource)
- }
- printStream.toString("utf-8").lines.toArray
- }
-
- private def verifyOutput(checkFile: JFile, dir: JFile, testSource: TestSource, warnings: Int) = {
- val outputLines = runMain(dir, testSource)
- val checkLines = Source.fromFile(checkFile).getLines.toArray
- val sourceTitle = testSource.title
-
- def linesMatch =
- outputLines
- .zip(checkLines)
- .forall { case (x, y) => x == y }
-
- if (outputLines.length != checkLines.length || !linesMatch) {
- // Print diff to files and summary:
- val diff = outputLines.zip(checkLines).map { case (act, exp) =>
- DiffUtil.mkColoredLineDiff(exp, act)
- }.mkString("\n")
-
- val msg =
- s"""|Output from '$sourceTitle' did not match check file.
- |Diff ('e' is expected, 'a' is actual):
- |""".stripMargin + diff + "\n"
- echo(msg)
- addFailureInstruction(msg)
-
- // Print build instructions to file and summary:
- val buildInstr = testSource.buildInstructions(0, warnings)
- addFailureInstruction(buildInstr)
-
- // Fail target:
- failTestSource(testSource)
+ case Timeout =>
+ echo("failed because test " + testSource.title + " timed out")
+ failTestSource(testSource, Some("test timed out"))
}
}
- protected def compilationRunnable(testSource: TestSource): Runnable = new Runnable {
- def run(): Unit = tryCompile(testSource) {
- val (errorCount, warningCount, hasCheckFile, verifier: Function0[Unit]) = testSource match {
+ protected def encapsulatedCompilation(testSource: TestSource) = new LoggedRunnable {
+ def checkTestSource(): Unit = tryCompile(testSource) {
+ val (compilerCrashed, errorCount, warningCount, verifier: Function0[Unit]) = testSource match {
case testSource @ JointCompilationSource(_, files, flags, outDir) => {
val checkFile = files.flatMap { file =>
if (file.isDirectory) Nil
@@ -522,49 +541,51 @@ trait ParallelTesting { self =>
}.headOption
val reporter = compile(testSource.sourceFiles, flags, false, outDir)
- if (reporter.errorCount > 0)
- echoBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
+ if (reporter.compilerCrashed || reporter.errorCount > 0) {
+ logReporterContents(reporter)
+ logBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
+ }
- registerCompilation(reporter.errorCount)
- (reporter.errorCount, reporter.warningCount, checkFile.isDefined, () => verifyOutput(checkFile.get, outDir, testSource, reporter.warningCount))
+ (reporter.compilerCrashed, reporter.errorCount, reporter.warningCount, () => verifyOutput(checkFile, outDir, testSource, reporter.warningCount))
}
case testSource @ SeparateCompilationSource(_, dir, flags, outDir) => {
val checkFile = new JFile(dir.getAbsolutePath.reverse.dropWhile(_ == '/').reverse + ".check")
+ val reporters = testSource.compilationGroups.map(compile(_, flags, false, outDir))
+ val compilerCrashed = reporters.exists(_.compilerCrashed)
val (errorCount, warningCount) =
- testSource
- .compilationGroups
- .map(compile(_, flags, false, outDir))
- .foldLeft((0,0)) { case ((errors, warnings), reporter) =>
- if (reporter.errorCount > 0)
- echoBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
+ reporters.foldLeft((0,0)) { case ((errors, warnings), reporter) =>
+ if (reporter.errorCount > 0)
+ logBuildInstructions(reporter, testSource, reporter.errorCount, reporter.warningCount)
- (errors + reporter.errorCount, warnings + reporter.warningCount)
- }
+ (errors + reporter.errorCount, warnings + reporter.warningCount)
+ }
- if (errorCount > 0) fail()
+ if (errorCount > 0) {
+ reporters.foreach(logReporterContents)
+ logBuildInstructions(reporters.head, testSource, errorCount, warningCount)
+ }
- registerCompilation(errorCount)
- (errorCount, warningCount, checkFile.exists, () => verifyOutput(checkFile, outDir, testSource, warningCount))
+ (compilerCrashed, errorCount, warningCount, () => verifyOutput(Some(checkFile), outDir, testSource, warningCount))
}
}
- if (errorCount == 0 && hasCheckFile) verifier()
- else if (errorCount == 0) runMain(testSource.outDir, testSource)
- else if (errorCount > 0) {
- echo(s"\nCompilation failed for: '$testSource'")
+ if (!compilerCrashed && errorCount == 0) verifier()
+ else {
+ echo(s" Compilation failed for: '${testSource.title}' ")
val buildInstr = testSource.buildInstructions(errorCount, warningCount)
addFailureInstruction(buildInstr)
failTestSource(testSource)
}
+ registerCompletion(errorCount)
}
}
}
- private final class NegTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)
+ private final class NegTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting)
extends Test(testSources, times, threadLimit, suppressAllOutput) {
- protected def compilationRunnable(testSource: TestSource): Runnable = new Runnable {
- def run(): Unit = tryCompile(testSource) {
+ protected def encapsulatedCompilation(testSource: TestSource) = new LoggedRunnable {
+ def checkTestSource(): Unit = tryCompile(testSource) {
// In neg-tests we allow two types of error annotations,
// "nopos-error" which doesn't care about position and "error" which
// has to be annotated on the correct line number.
@@ -616,27 +637,39 @@ trait ParallelTesting { self =>
}
}
- val (expectedErrors, actualErrors, hasMissingAnnotations, errorMap) = testSource match {
+ val (compilerCrashed, expectedErrors, actualErrors, hasMissingAnnotations, errorMap) = testSource match {
case testSource @ JointCompilationSource(_, files, flags, outDir) => {
val sourceFiles = testSource.sourceFiles
val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(sourceFiles)
val reporter = compile(sourceFiles, flags, true, outDir)
val actualErrors = reporter.errorCount
- (expectedErrors, actualErrors, () => getMissingExpectedErrors(errorMap, reporter.errors), errorMap)
+ if (reporter.compilerCrashed || actualErrors > 0)
+ logReporterContents(reporter)
+
+ (reporter.compilerCrashed, expectedErrors, actualErrors, () => getMissingExpectedErrors(errorMap, reporter.errors), errorMap)
}
case testSource @ SeparateCompilationSource(_, dir, flags, outDir) => {
val compilationGroups = testSource.compilationGroups
val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(compilationGroups.toArray.flatten)
val reporters = compilationGroups.map(compile(_, flags, true, outDir))
+ val compilerCrashed = reporters.exists(_.compilerCrashed)
val actualErrors = reporters.foldLeft(0)(_ + _.errorCount)
val errors = reporters.iterator.flatMap(_.errors)
- (expectedErrors, actualErrors, () => getMissingExpectedErrors(errorMap, errors), errorMap)
+
+ if (actualErrors > 0)
+ reporters.foreach(logReporterContents)
+
+ (compilerCrashed, expectedErrors, actualErrors, () => getMissingExpectedErrors(errorMap, errors), errorMap)
}
}
- if (expectedErrors != actualErrors) {
+ if (compilerCrashed) {
+ echo(s"Compiler crashed when compiling: ${testSource.title}")
+ failTestSource(testSource)
+ }
+ else if (expectedErrors != actualErrors) {
echo {
s"\nWrong number of errors encountered when compiling $testSource, expected: $expectedErrors, actual: $actualErrors\n"
}
@@ -655,7 +688,7 @@ trait ParallelTesting { self =>
failTestSource(testSource)
}
- registerCompilation(actualErrors)
+ registerCompletion(actualErrors)
}
}
}
@@ -805,7 +838,7 @@ trait ParallelTesting { self =>
* compilation without generating errors and that they do not crash the
* compiler
*/
- def checkCompile(): this.type = {
+ def checkCompile()(implicit summaryReport: SummaryReporting): this.type = {
val test = new PosTest(targets, times, threadLimit, shouldFail).executeTestSuite()
if (!shouldFail && test.didFail) {
@@ -822,7 +855,7 @@ trait ParallelTesting { self =>
* correct amount of errors at the correct positions. It also makes sure
* that none of these tests crash the compiler
*/
- def checkExpectedErrors(): this.type = {
+ def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = {
val test = new NegTest(targets, times, threadLimit, shouldFail).executeTestSuite()
if (!shouldFail && test.didFail) {
@@ -840,7 +873,7 @@ trait ParallelTesting { self =>
* the compiler; it also makes sure that all tests can run with the
* expected output
*/
- def checkRuns(): this.type = {
+ def checkRuns()(implicit summaryReport: SummaryReporting): this.type = {
val test = new RunTest(targets, times, threadLimit, shouldFail).executeTestSuite()
if (!shouldFail && test.didFail) {
@@ -983,6 +1016,7 @@ trait ParallelTesting { self =>
.getOrElse {
throw new IllegalStateException("Unable to reflectively find calling method")
}
+ .takeWhile(_ != '$')
}
/** Compiles a single file from the string path `f` using the supplied flags */
@@ -1037,7 +1071,7 @@ trait ParallelTesting { self =>
val targetDir = new JFile(outDir + "/" + sourceDir.getName + "/")
targetDir.mkdirs()
- val target = JointCompilationSource(callingMethod, randomized, flags, targetDir)
+ val target = JointCompilationSource(s"compiling '$f' in test '$callingMethod'", randomized, flags, targetDir)
new CompilationTest(target)
}
@@ -1054,7 +1088,7 @@ trait ParallelTesting { self =>
targetDir.mkdirs()
assert(targetDir.exists, s"couldn't create target directory: $targetDir")
- val target = JointCompilationSource(callingMethod, files.map(new JFile(_)).toArray, flags, targetDir)
+ val target = JointCompilationSource(s"$testName from $callingMethod", files.map(new JFile(_)).toArray, flags, targetDir)
// Create a CompilationTest and let the user decide whether to execute a pos or a neg test
new CompilationTest(target)
diff --git a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala
new file mode 100644
index 000000000..ad068e9ef
--- /dev/null
+++ b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala
@@ -0,0 +1,196 @@
+package dotty
+package tools
+package vulpix
+
+import java.io.{ File => JFile, InputStreamReader, BufferedReader, PrintStream }
+import java.util.concurrent.TimeoutException
+
+import scala.concurrent.duration.Duration
+import scala.concurrent.{ Await, Future }
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.collection.mutable
+
+/** Vulpix spawns JVM subprocesses (`numberOfSlaves`) in order to run tests
+ * without compromising the main JVM
+ *
+ * These need to be orchestrated in a safe manner with a simple protocol. This
+ * interface provides just that.
+ *
+ * The protocol is defined as:
+ *
+ * - master sends classpath to for which to run `Test#main` and waits for
+ * `maxDuration`
+ * - slave invokes the method and waits until completion
+ * - upon completion it sends back a `RunComplete` message
+ * - the master checks if the child is still alive
+ * - child is still alive, the output was valid
+ * - child is dead, the output is the failure message
+ *
+ * If this whole chain of events is not completed within `maxDuration`, the
+ * child process is destroyed and a new child is spawned.
+ */
+trait RunnerOrchestration {
+
+ /** The maximum amount of active runners, which contain a child JVM */
+ def numberOfSlaves: Int
+
+ /** The maximum duration the child process is allowed to consume before
+ * getting destroyed
+ */
+ def maxDuration: Duration
+
+ /** Destroy and respawn process after each test */
+ def safeMode: Boolean
+
+ /** Running a `Test` class's main method from the specified `dir` */
+ def runMain(classPath: String)(implicit summaryReport: SummaryReporting): Status =
+ monitor.runMain(classPath)
+
+ private[this] val monitor = new RunnerMonitor
+
+ /** The runner monitor object keeps track of child JVM processes by keeping
+ * them in two structures - one for free, and one for busy children.
+ *
+ * When a user calls `runMain` the monitor makes takes a free JVM and blocks
+ * until the run is complete - or `maxDuration` has passed. It then performs
+ * cleanup by returning the used JVM to the free list, or respawning it if
+ * it died
+ */
+ private class RunnerMonitor {
+
+ def runMain(classPath: String)(implicit summaryReport: SummaryReporting): Status =
+ withRunner(_.runMain(classPath))
+
+ private class Runner(private var process: Process) {
+ private[this] var childStdout: BufferedReader = _
+ private[this] var childStdin: PrintStream = _
+
+ /** Checks if `process` is still alive
+ *
+ * When `process.exitValue()` is called on an active process the caught
+ * exception is thrown. As such we can know if the subprocess exited or
+ * not.
+ */
+ def isAlive: Boolean =
+ try { process.exitValue(); false }
+ catch { case _: IllegalThreadStateException => true }
+
+ /** Destroys the underlying process and kills IO streams */
+ def kill(): Unit = {
+ if (process ne null) process.destroy()
+ process = null
+ childStdout = null
+ childStdin = null
+ }
+
+ /** Did add hook to kill the child VMs? */
+ private[this] var didAddCleanupCallback = false
+
+ /** Blocks less than `maxDuration` while running `Test.main` from `dir` */
+ def runMain(classPath: String)(implicit summaryReport: SummaryReporting): Status = {
+ if (!didAddCleanupCallback) {
+ // If for some reason the test runner (i.e. sbt) doesn't kill the VM, we
+ // need to clean up ourselves.
+ summaryReport.addCleanup(killAll)
+ }
+ assert(process ne null,
+ "Runner was killed and then reused without setting a new process")
+
+ // Makes the encapsulating RunnerMonitor spawn a new runner
+ def respawn(): Unit = {
+ process.destroy()
+ process = createProcess
+ childStdout = null
+ childStdin = null
+ }
+
+ if (childStdin eq null)
+ childStdin = new PrintStream(process.getOutputStream, /* autoFlush = */ true)
+
+ // pass file to running process
+ childStdin.println(classPath)
+
+ // Create a future reading the object:
+ val readOutput = Future {
+ val sb = new StringBuilder
+
+ if (childStdout eq null)
+ childStdout = new BufferedReader(new InputStreamReader(process.getInputStream))
+
+ var childOutput = childStdout.readLine()
+ while (childOutput != ChildJVMMain.MessageEnd && childOutput != null) {
+ sb.append(childOutput)
+ sb += '\n'
+ childOutput = childStdout.readLine()
+ }
+
+ if (process.isAlive && childOutput != null) Success(sb.toString)
+ else Failure(sb.toString)
+ }
+
+ // Await result for `maxDuration` and then timout and destroy the
+ // process:
+ val status =
+ try Await.result(readOutput, maxDuration)
+ catch { case _: TimeoutException => Timeout }
+
+ // Handle failure of the VM:
+ status match {
+ case _: Success if safeMode => respawn()
+ case _: Success => // no need to respawn sub process
+ case _: Failure => respawn()
+ case Timeout => respawn()
+ }
+ status
+ }
+ }
+
+ /** Create a process which has the classpath of the `ChildJVMMain` and the
+ * scala library.
+ */
+ private def createProcess: Process = {
+ val sep = sys.props("file.separator")
+ val cp =
+ classOf[ChildJVMMain].getProtectionDomain.getCodeSource.getLocation.getFile + ":" +
+ Jars.scalaLibraryFromRuntime
+ val javaBin = sys.props("java.home") + sep + "bin" + sep + "java"
+ new ProcessBuilder(javaBin, "-cp", cp, "dotty.tools.vulpix.ChildJVMMain")
+ .redirectErrorStream(true)
+ .redirectInput(ProcessBuilder.Redirect.PIPE)
+ .redirectOutput(ProcessBuilder.Redirect.PIPE)
+ .start()
+ }
+
+ private[this] val allRunners = List.fill(numberOfSlaves)(new Runner(createProcess))
+ private[this] val freeRunners = mutable.Queue(allRunners: _*)
+ private[this] val busyRunners = mutable.Set.empty[Runner]
+
+ private def getRunner(): Runner = synchronized {
+ while (freeRunners.isEmpty) wait()
+
+ val runner = freeRunners.dequeue()
+ busyRunners += runner
+
+ notify()
+ runner
+ }
+
+ private def freeRunner(runner: Runner): Unit = synchronized {
+ freeRunners.enqueue(runner)
+ busyRunners -= runner
+ notify()
+ }
+
+ private def withRunner[T](op: Runner => T): T = {
+ val runner = getRunner()
+ val result = op(runner)
+ freeRunner(runner)
+ result
+ }
+
+ private def killAll(): Unit = allRunners.foreach(_.kill())
+
+ // On shutdown, we need to kill all runners:
+ sys.addShutdownHook(killAll())
+ }
+}
diff --git a/compiler/test/dotty/tools/vulpix/Status.scala b/compiler/test/dotty/tools/vulpix/Status.scala
new file mode 100644
index 000000000..3de7aff2b
--- /dev/null
+++ b/compiler/test/dotty/tools/vulpix/Status.scala
@@ -0,0 +1,7 @@
+package dotty.tools
+package vulpix
+
+sealed trait Status
+final case class Success(output: String) extends Status
+final case class Failure(output: String) extends Status
+final case object Timeout extends Status
diff --git a/compiler/test/dotty/tools/vulpix/SummaryReport.scala b/compiler/test/dotty/tools/vulpix/SummaryReport.scala
new file mode 100644
index 000000000..678d88809
--- /dev/null
+++ b/compiler/test/dotty/tools/vulpix/SummaryReport.scala
@@ -0,0 +1,145 @@
+package dotty
+package tools
+package vulpix
+
+import scala.collection.mutable
+import dotc.reporting.TestReporter
+
+/** `SummaryReporting` can be used by unit tests by utilizing `@AfterClass` to
+ * call `echoSummary`
+ *
+ * This is used in vulpix by passing the companion object's `SummaryReporting`
+ * to each test, the `@AfterClass def` then calls the `SummaryReport`'s
+ * `echoSummary` method in order to dump the summary to both stdout and a log
+ * file
+ */
+trait SummaryReporting {
+ /** Report a failed test */
+ def reportFailed(): Unit
+
+ /** Report a test as passing */
+ def reportPassed(): Unit
+
+ /** Add the name of the failed test */
+ def addFailedTest(msg: String): Unit
+
+ /** Add instructions to reproduce the error */
+ def addReproduceInstruction(instr: String): Unit
+
+ /** Add a message that will be issued in the beginning of the summary */
+ def addStartingMessage(msg: String): Unit
+
+ /** Add a cleanup hook to be run upon completion */
+ def addCleanup(f: () => Unit): Unit
+
+ /** Echo the summary report to the appropriate locations */
+ def echoSummary(): Unit
+
+ /** Echoes *immediately* to file */
+ def echoToLog(msg: String): Unit
+
+ /** Echoes contents of `it` to file *immediately* then flushes */
+ def echoToLog(it: Iterator[String]): Unit
+}
+
+/** A summary report that doesn't do anything */
+final class NoSummaryReport extends SummaryReporting {
+ def reportFailed(): Unit = ()
+ def reportPassed(): Unit = ()
+ def addFailedTest(msg: String): Unit = ()
+ def addReproduceInstruction(instr: String): Unit = ()
+ def addStartingMessage(msg: String): Unit = ()
+ def addCleanup(f: () => Unit): Unit = ()
+ def echoSummary(): Unit = ()
+ def echoToLog(msg: String): Unit = ()
+ def echoToLog(it: Iterator[String]): Unit = ()
+}
+
+/** A summary report that logs to both stdout and the `TestReporter.logWriter`
+ * which outputs to a log file in `./testlogs/`
+ */
+final class SummaryReport extends SummaryReporting {
+
+ private val startingMessages = mutable.ArrayBuffer.empty[String]
+ private val failedTests = mutable.ArrayBuffer.empty[String]
+ private val reproduceInstructions = mutable.ArrayBuffer.empty[String]
+ private val cleanUps = mutable.ArrayBuffer.empty[() => Unit]
+
+ private[this] var passed = 0
+ private[this] var failed = 0
+
+ def reportFailed(): Unit =
+ failed += 1
+
+ def reportPassed(): Unit =
+ passed += 1
+
+ def addFailedTest(msg: String): Unit =
+ failedTests.append(msg)
+
+ def addReproduceInstruction(instr: String): Unit =
+ reproduceInstructions.append(instr)
+
+ def addStartingMessage(msg: String): Unit =
+ startingMessages.append(msg)
+
+ def addCleanup(f: () => Unit): Unit =
+ cleanUps.append(f)
+
+ /** Both echoes the summary to stdout and prints to file */
+ def echoSummary(): Unit = {
+ import SummaryReport._
+
+ val rep = new StringBuilder
+ rep.append(
+ s"""|
+ |================================================================================
+ |Test Report
+ |================================================================================
+ |
+ |$passed passed, $failed failed, ${passed + failed} total
+ |""".stripMargin
+ )
+
+ startingMessages.foreach(rep.append)
+
+ failedTests.map(x => s" $x\n").foreach(rep.append)
+
+ // If we're compiling locally, we don't need instructions on how to
+ // reproduce failures
+ if (isInteractive) {
+ println(rep.toString)
+ if (failed > 0) println {
+ s"""|
+ |--------------------------------------------------------------------------------
+ |Note - reproduction instructions have been dumped to log file:
+ | ${TestReporter.logPath}
+ |--------------------------------------------------------------------------------""".stripMargin
+ }
+ }
+
+ rep += '\n'
+
+ reproduceInstructions.foreach(rep.append)
+
+ // If we're on the CI, we want everything
+ if (!isInteractive) println(rep.toString)
+
+ TestReporter.logPrintln(rep.toString)
+
+ // Perform cleanup callback:
+ if (cleanUps.nonEmpty) cleanUps.foreach(_.apply())
+ }
+
+ def echoToLog(msg: String): Unit =
+ TestReporter.logPrintln(msg)
+
+ def echoToLog(it: Iterator[String]): Unit = {
+ it.foreach(TestReporter.logPrint)
+ TestReporter.logFlush()
+ }
+}
+
+object SummaryReport {
+ val isInteractive = Properties.testsInteractive && !Properties.isRunByDrone
+}
diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala
new file mode 100644
index 000000000..dcf3fbaf0
--- /dev/null
+++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala
@@ -0,0 +1,67 @@
+package dotty
+package tools
+package vulpix
+
+object TestConfiguration {
+ implicit val defaultOutputDir: String = "../out/"
+
+ implicit class RichStringArray(val xs: Array[String]) extends AnyVal {
+ def and(args: String*): Array[String] = {
+ val argsArr: Array[String] = args.toArray
+ xs ++ argsArr
+ }
+ }
+
+ val noCheckOptions = Array(
+ "-pagewidth", "120",
+ "-color:never"
+ )
+
+ val checkOptions = Array(
+ "-Yno-deep-subtypes",
+ "-Yno-double-bindings",
+ "-Yforce-sbt-phases"
+ )
+
+ val classPath = {
+ val paths = Jars.dottyTestDeps map { p =>
+ val file = new java.io.File(p)
+ assert(
+ file.exists,
+ s"""|File "$p" couldn't be found. Run `packageAll` from build tool before
+ |testing.
+ |
+ |If running without sbt, test paths need to be setup environment variables:
+ |
+ | - DOTTY_LIBRARY
+ | - DOTTY_COMPILER
+ | - DOTTY_INTERFACES
+ | - DOTTY_EXTRAS
+ |
+ |Where these all contain locations, except extras which is a colon
+ |separated list of jars.
+ |
+ |When compiling with eclipse, you need the sbt-interfaces jar, put
+ |it in extras."""
+ )
+ file.getAbsolutePath
+ } mkString (":")
+
+ Array("-classpath", paths)
+ }
+
+ private val yCheckOptions = Array("-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef")
+
+ val defaultOptions = noCheckOptions ++ checkOptions ++ yCheckOptions ++ classPath
+ val allowDeepSubtypes = defaultOptions diff Array("-Yno-deep-subtypes")
+ val allowDoubleBindings = defaultOptions diff Array("-Yno-double-bindings")
+ val picklingOptions = defaultOptions ++ Array(
+ "-Xprint-types",
+ "-Ytest-pickler",
+ "-Ystop-after:pickler",
+ "-Yprintpos"
+ )
+ val scala2Mode = defaultOptions ++ Array("-language:Scala2")
+ val explicitUTF8 = defaultOptions ++ Array("-encoding", "UTF8")
+ val explicitUTF16 = defaultOptions ++ Array("-encoding", "UTF16")
+}
diff --git a/compiler/test/dotty/tools/dotc/ParallelTestTests.scala b/compiler/test/dotty/tools/vulpix/VulpixTests.scala
index cfb108ea7..f875e7c13 100644
--- a/compiler/test/dotty/tools/dotc/ParallelTestTests.scala
+++ b/compiler/test/dotty/tools/vulpix/VulpixTests.scala
@@ -1,15 +1,21 @@
-package dotty
-package tools
-package dotc
+package dotty.tools
+package vulpix
import org.junit.Assert._
import org.junit.Test
+import scala.concurrent.duration._
import scala.util.control.NonFatal
-class ParallelTestTests extends ParallelTesting {
- import CompilationTests._
+/** Meta tests for the Vulpix test suite */
+class VulpixTests extends ParallelTesting {
+ import TestConfiguration._
+ implicit val _: SummaryReporting = new NoSummaryReport
+
+ def maxDuration = 3.seconds
+ def numberOfSlaves = 5
+ def safeMode = sys.env.get("SAFEMODE").isDefined
def isInteractive = !sys.env.contains("DRONE")
def testFilter = None
@@ -55,4 +61,16 @@ class ParallelTestTests extends ParallelTesting {
@Test def runOutRedirects: Unit =
compileFile("../tests/partest-test/i2147.scala", defaultOptions).expectFailure.checkRuns()
+
+ @Test def infiteNonRec: Unit =
+ compileFile("../tests/partest-test/infinite.scala", defaultOptions).expectFailure.checkRuns()
+
+ @Test def infiteTailRec: Unit =
+ compileFile("../tests/partest-test/infiniteTail.scala", defaultOptions).expectFailure.checkRuns()
+
+ @Test def infiniteAlloc: Unit =
+ compileFile("../tests/partest-test/infiniteAlloc.scala", defaultOptions).expectFailure.checkRuns()
+
+ @Test def deadlock: Unit =
+ compileFile("../tests/partest-test/deadlock.scala", defaultOptions).expectFailure.checkRuns()
}
diff --git a/docs/docs/contributing/testing.md b/docs/docs/contributing/testing.md
index 07aab1918..f786ac233 100644
--- a/docs/docs/contributing/testing.md
+++ b/docs/docs/contributing/testing.md
@@ -80,10 +80,9 @@ This might be aliased in the future. It is also possible to run tests filtered
by using:
```bash
-> filterTest .*i2147.scala
+> vulpix i2147.scala
```
This will run both the test `./tests/pos/i2147.scala` and
-`./tests/partest-test/i2147.scala` since both of these match the given regular
-expression. This also means that you could run `filterTest .*` to run all
-integration tests.
+`./tests/partest-test/i2147.scala` since both of these match the given string.
+This also means that you could run `vulpix` with no arguments to run all integration tests.
diff --git a/docs/docs/contributing/workflow.md b/docs/docs/contributing/workflow.md
index 3c654e8f6..b277cc243 100644
--- a/docs/docs/contributing/workflow.md
+++ b/docs/docs/contributing/workflow.md
@@ -57,20 +57,15 @@ $ sbt
To test a specific test tests/x/y.scala (for example tests/pos/t210.scala):
```bash
-> filterTest .*pos/t210.scala
+> vulpix pos/t210.scala
```
-The filterTest task takes a regular expression as its argument. For example,
-you could run a negative and a positive test with:
+The `vulpix` task uses its argument for a substring test. For example, you
+could run both a negative and a positive test with the same name
+(`pos/i2101.scala` & `neg/i2101.scala`):
```bash
-> filterTest (.*pos/t697.scala)|(.*neg/i2101.scala)
-```
-
-or if they have the same name, the equivalent can be achieved with:
-
-```bash
-> filterTest .*/i2101.scala
+> vulpix i2101.scala
```
## Inspecting Trees with Type Stealer ##
diff --git a/project/Build.scala b/project/Build.scala
index bb02416cc..8b1c0e31e 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -47,8 +47,8 @@ object Build {
// Spawns a repl with the correct classpath
lazy val repl = inputKey[Unit]("run the REPL with correct classpath")
- // Run tests with filter
- lazy val filterTest = inputKey[Unit]("runs integration test with the supplied filter")
+ // Run tests with filter through vulpix test suite
+ lazy val vulpix = inputKey[Unit]("runs integration test with the supplied filter")
// Used to compile files similar to ./bin/dotc script
lazy val dotc =
@@ -135,26 +135,7 @@ object Build {
triggeredMessage in ThisBuild := Watched.clearWhenTriggered,
addCommandAlias("run", "dotty-compiler/run") ++
- addCommandAlias(
- "partest",
- ";publishLocal" + // Non-bootstrapped dotty needs to be published first
- ";dotty-compiler-bootstrapped/lockPartestFile" +
- ";dotty-compiler-bootstrapped/test:test" +
- ";dotty-compiler-bootstrapped/runPartestRunner"
- ) ++
- addCommandAlias(
- "partest-only",
- ";publishLocal" + // Non-bootstrapped dotty needs to be published first
- ";dotty-compiler-bootstrapped/lockPartestFile" +
- ";dotty-compiler-bootstrapped/test:test-only dotc.tests" +
- ";dotty-compiler-bootstrapped/runPartestRunner"
- ) ++
- addCommandAlias(
- "partest-only-no-bootstrap",
- ";dotty-compiler/lockPartestFile" +
- ";dotty-compiler/test:test-only dotc.tests" +
- ";dotty-compiler/runPartestRunner"
- )
+ addCommandAlias("legacyTests", "dotty-compiler/testOnly dotc.tests")
).
settings(publishing)
@@ -287,43 +268,6 @@ object Build {
"org.scala-lang" % "scala-reflect" % scalacVersion,
"org.scala-lang" % "scala-library" % scalacVersion % "test"),
- // start partest specific settings:
- libraryDependencies += "org.scala-lang.modules" %% "scala-partest" % "1.0.11" % "test",
- testOptions in Test += Tests.Cleanup({ () => partestLockFile.delete }),
- // this option is needed so that partest doesn't run
- partestDeps := Seq(
- scalaCompiler,
- "org.scala-lang" % "scala-reflect" % scalacVersion,
- "org.scala-lang" % "scala-library" % scalacVersion % "test"
- ),
- lockPartestFile := {
- // When this file is present, running `test` generates the files for
- // partest. Otherwise it just executes the tests directly.
- val lockDir = partestLockFile.getParentFile
- lockDir.mkdirs
- // Cannot have concurrent partests as they write to the same directory.
- if (lockDir.list.size > 0)
- throw new RuntimeException("ERROR: sbt partest: another partest is already running, pid in lock file: " + lockDir.list.toList.mkString(" "))
- partestLockFile.createNewFile
- partestLockFile.deleteOnExit
- },
- runPartestRunner := Def.inputTaskDyn {
- // Magic! This is both an input task and a dynamic task. Apparently
- // command line arguments get passed to the last task in an aliased
- // sequence (see partest alias below), so this works.
- val args = Def.spaceDelimited("<arg>").parsed
- val jars = List(
- (packageBin in Compile).value.getAbsolutePath,
- packageAll.value("dotty-library"),
- packageAll.value("dotty-interfaces")
- ) ++ getJarPaths(partestDeps.value, ivyPaths.value.ivyHome)
- val dottyJars =
- s"""-dottyJars ${jars.length + 2} dotty.jar dotty-lib.jar ${jars.mkString(" ")}"""
- // Provide the jars required on the classpath of run tests
- runTask(Test, "dotty.partest.DPConsoleRunner", dottyJars + " " + args.mkString(" "))
- }.evaluated,
- // end partest specific settings
-
// enable improved incremental compilation algorithm
incOptions := incOptions.value.withNameHashing(true),
@@ -340,12 +284,18 @@ object Build {
)
}.evaluated,
- filterTest := Def.inputTaskDyn {
+ test in Test := {
+ // Exclude legacy tests by default
+ (testOnly in Test).toTask(" -- --exclude-categories=java.lang.Exception").value
+ },
+
+ vulpix := Def.inputTaskDyn {
val args: Seq[String] = spaceDelimited("<arg>").parsed
- testOptions := Seq()
- (testOnly in Test).toTask(
- " dotty.tools.dotc.CompilationTests -- -Ddotty.partest.filter=" + args.head
- )
+ val cmd = " dotty.tools.dotc.CompilationTests" + {
+ if (args.nonEmpty) " -- -Ddotty.tests.filter=" + args.mkString(" ")
+ else ""
+ }
+ (testOnly in Test).toTask(cmd)
}.evaluated,
// Override run to be able to run compiled classfiles
@@ -471,35 +421,10 @@ object Build {
"-Ddotty.tests.classes.compiler=" + pA("dotty-compiler")
)
- ("-DpartestParentID=" + pid) :: jars ::: tuning ::: agentOptions ::: ci_build ::: path.toList
+ jars ::: tuning ::: agentOptions ::: ci_build ::: path.toList
}
)
- // Partest tasks
- lazy val partestDeps =
- SettingKey[Seq[ModuleID]]("partestDeps", "Finds jars for partest dependencies")
- lazy val runPartestRunner =
- InputKey[Unit]("runPartestRunner", "Runs partest")
- lazy val lockPartestFile =
- TaskKey[Unit]("lockPartestFile", "Creates the lock file at ./tests/locks/partest-<pid>.lock")
- lazy val partestLockFile =
- new File("." + File.separator + "tests" + File.separator + "locks" + File.separator + s"partest-$pid.lock")
-
- def pid = java.lang.Long.parseLong(java.lang.management.ManagementFactory.getRuntimeMXBean().getName().split("@")(0))
-
- def getJarPaths(modules: Seq[ModuleID], ivyHome: Option[File]): Seq[String] = ivyHome match {
- case Some(home) =>
- modules.map({ module =>
- val file = Path(home) / Path("cache") /
- Path(module.organization) / Path(module.name) / Path("jars") /
- Path(module.name + "-" + module.revision + ".jar")
- if (!file.isFile) throw new RuntimeException("ERROR: sbt getJarPaths: dependency jar not found: " + file)
- else file.jfile.getAbsolutePath
- })
- case None => throw new RuntimeException("ERROR: sbt getJarPaths: ivyHome not defined")
- }
- // end partest tasks
-
lazy val `dotty-compiler` = project.in(file("compiler")).
dependsOn(`dotty-interfaces`).
dependsOn(`dotty-library`).
diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala
index a6b9fa65e..a835421d8 100644
--- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala
+++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala
@@ -173,7 +173,7 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean, includeSynthToNameHashin
private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = ".") = {
val args = Array.empty[String]
- import dotty.tools.dotc._
+ import dotty.tools.dotc.{Compiler, Driver}
import dotty.tools.dotc.core.Contexts._
val driver = new Driver {
diff --git a/tests/run/t5293.scala b/tests/disabled/run/t5293.scala
index 8a99989c5..8a99989c5 100644
--- a/tests/run/t5293.scala
+++ b/tests/disabled/run/t5293.scala
diff --git a/tests/run/WeakHashSetTest.scala b/tests/disabled/scalac-dependent/WeakHashSetTest.scala
index 8bcb95091..8bcb95091 100644
--- a/tests/run/WeakHashSetTest.scala
+++ b/tests/disabled/scalac-dependent/WeakHashSetTest.scala
diff --git a/tests/run/shortClass.scala b/tests/disabled/scalac-dependent/shortClass.scala
index c5c2043f4..c5c2043f4 100644
--- a/tests/run/shortClass.scala
+++ b/tests/disabled/scalac-dependent/shortClass.scala
diff --git a/tests/run/showraw_nosymbol.scala b/tests/disabled/scalac-dependent/showraw_nosymbol.scala
index 191647583..191647583 100644
--- a/tests/run/showraw_nosymbol.scala
+++ b/tests/disabled/scalac-dependent/showraw_nosymbol.scala
diff --git a/tests/run/sm-interpolator.scala b/tests/disabled/scalac-dependent/sm-interpolator.scala
index e4bec7afb..e4bec7afb 100644
--- a/tests/run/sm-interpolator.scala
+++ b/tests/disabled/scalac-dependent/sm-interpolator.scala
diff --git a/tests/run/structural.scala b/tests/disabled/scalac-dependent/structural.scala
index 0f18f4579..0f18f4579 100644
--- a/tests/run/structural.scala
+++ b/tests/disabled/scalac-dependent/structural.scala
diff --git a/tests/run/t6732.scala b/tests/disabled/scalac-dependent/t6732.scala
index ff0f0494d..ff0f0494d 100644
--- a/tests/run/t6732.scala
+++ b/tests/disabled/scalac-dependent/t6732.scala
diff --git a/tests/partest-test/deadlock.scala b/tests/partest-test/deadlock.scala
new file mode 100644
index 000000000..df561aff3
--- /dev/null
+++ b/tests/partest-test/deadlock.scala
@@ -0,0 +1,44 @@
+object Test {
+ class Lock
+ val lock1 = new Lock
+ val lock2 = new Lock
+
+ private[this] var took2: Boolean = false
+ def lock2Taken(): Unit = synchronized {
+ took2 = true
+ notify()
+ }
+ def tookLock2: Boolean = synchronized(took2)
+
+ val thread1 = new Thread {
+ override def run(): Unit = synchronized {
+ lock1.synchronized {
+ while (!tookLock2) wait()
+ lock2.synchronized {
+ println("thread1 in lock2!")
+ }
+ }
+ println("thread1, done!")
+ }
+ }
+
+ val thread2 = new Thread {
+ override def run(): Unit = synchronized {
+ lock2.synchronized {
+ lock2Taken()
+ lock1.synchronized {
+ println("thread2 in lock1!")
+ }
+ }
+ println("thread2, done!")
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ thread1.start() // takes lock1 then sleeps 1s - tries to take lock2
+ thread2.start() // takes lock2 then sleeps 1s - tries to take lock1
+
+ thread1.join() // wait for threads to complete, can't because deadlock!
+ thread2.join()
+ }
+}
diff --git a/tests/partest-test/infinite.scala b/tests/partest-test/infinite.scala
new file mode 100644
index 000000000..961382fea
--- /dev/null
+++ b/tests/partest-test/infinite.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ var sum = 0
+ while(true) {
+ sum += 1
+ }
+ println(sum)
+ }
+}
diff --git a/tests/partest-test/infiniteAlloc.scala b/tests/partest-test/infiniteAlloc.scala
new file mode 100644
index 000000000..89fa5d6ef
--- /dev/null
+++ b/tests/partest-test/infiniteAlloc.scala
@@ -0,0 +1,9 @@
+import scala.collection.mutable
+object Test {
+ val map = mutable.Map.empty[String, String]
+
+ def main(args: Array[String]): Unit = while (true) {
+ val time = System.currentTimeMillis.toString
+ map += (time -> time)
+ }
+}
diff --git a/tests/partest-test/infiniteTail.scala b/tests/partest-test/infiniteTail.scala
new file mode 100644
index 000000000..b3132cc19
--- /dev/null
+++ b/tests/partest-test/infiniteTail.scala
@@ -0,0 +1,7 @@
+object Test {
+ def foo: Int = bar
+ def bar: Int = foo
+
+ def main(args: Array[String]): Unit =
+ println(foo)
+}
diff --git a/tests/pos/i2212.scala b/tests/pos/i2212.scala
new file mode 100644
index 000000000..416c8ca04
--- /dev/null
+++ b/tests/pos/i2212.scala
@@ -0,0 +1,19 @@
+package object squants {
+ type Time = squants.time.Time
+}
+package squants.time {
+ class Time
+ object Time { def x = 2 }
+}
+package squants.velocity {
+ import squants.time._ // <-- imports `Time` value
+ import squants.Time // <-- imports type alias
+ object Velocity { Time.x }
+}
+
+import scala.math.BigDecimal.RoundingMode
+import scala.math.BigDecimal.RoundingMode.RoundingMode
+
+object Money {
+ def foo(round: RoundingMode = RoundingMode.HALF_EVEN): Int = ???
+}
diff --git a/tests/pos/i2218.scala b/tests/pos/i2218.scala
new file mode 100644
index 000000000..fbede8b9b
--- /dev/null
+++ b/tests/pos/i2218.scala
@@ -0,0 +1,9 @@
+trait Rule[In]
+
+class C {
+ def ruleWithName[In](f: In => Int): Rule[In] = {
+ new DefaultRule(f) {}
+ }
+
+ class DefaultRule[In](f: In => Int) extends Rule[In]
+}
diff --git a/tests/run/1938-2.scala b/tests/run/1938-2.scala
new file mode 100644
index 000000000..32e4c4518
--- /dev/null
+++ b/tests/run/1938-2.scala
@@ -0,0 +1,37 @@
+object ProdNonEmpty {
+ def _1: Int = 0
+ def _2: String = "???" // Slight variation with scalac: this test passes
+ // with ??? here. I think dotty behavior is fine
+ // according to the spec given that methods involved
+ // in pattern matching should be pure.
+ def isEmpty = false
+ def unapply(s: String): this.type = this
+ def get = this
+}
+
+object ProdEmpty {
+ def _1: Int = ???
+ def _2: String = ???
+ def isEmpty = true
+ def unapply(s: String): this.type = this
+ def get = this
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ "" match {
+ case ProdNonEmpty(0, _) => ()
+ case _ => ???
+ }
+
+ "" match {
+ case ProdNonEmpty(1, _) => ???
+ case _ => ()
+ }
+
+ "" match {
+ case ProdEmpty(_, _) => ???
+ case _ => ()
+ }
+ }
+}
diff --git a/tests/run/i2163.scala b/tests/run/i2163.scala
new file mode 100644
index 000000000..952f651e3
--- /dev/null
+++ b/tests/run/i2163.scala
@@ -0,0 +1,21 @@
+class Base(f: Int => Int) {
+ def result = f(3)
+}
+
+class Child(x: Int) extends Base(y => x + y)
+
+class Outer(z: Int) {
+ class Base(f: Int => Int) {
+ def result = f(3)
+ }
+
+ class Child(x: Int) extends Base(y => x + y + z)
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(new Child(4).result == 7)
+ val o = new Outer(2)
+ assert(new o.Child(2).result == 7)
+ }
+}
diff --git a/tests/run/t429.scala b/tests/run/t429.scala
index eeed4b080..411d199ee 100644
--- a/tests/run/t429.scala
+++ b/tests/run/t429.scala
@@ -11,5 +11,6 @@ object Test {
}
def main (args: Array[String]): Unit = {
Console.print((new B).y);
+ println()
}
}
diff --git a/tests/run/t5857.scala b/tests/run/t5857.scala
index eabf5dc86..fe67a7546 100644
--- a/tests/run/t5857.scala
+++ b/tests/run/t5857.scala
@@ -36,10 +36,9 @@ object Test {
b
}
- // whatever it is, it should be less than, say, 250ms
+ // whatever it is, it should be less than, say, 1000ms
// if `max` involves traversal, it takes over 5 seconds on a 3.2GHz i7 CPU
//println(exectime)
- assert(exectime < 250, exectime)
+ assert(exectime < 1000, exectime)
}
-
}
diff --git a/tests/run/t9915/C_1.java b/tests/run/t9915/C_1.java
new file mode 100644
index 000000000..4269cf74e
--- /dev/null
+++ b/tests/run/t9915/C_1.java
@@ -0,0 +1,20 @@
+/*
+ * javac: -encoding UTF-8
+ */
+public class C_1 {
+ public static final String NULLED = "X\000ABC";
+ public static final String SUPPED = "π’ˆπ’π’‘π’›π’π’˜π’•π’–";
+
+ public String nulled() {
+ return C_1.NULLED;
+ }
+ public String supped() {
+ return C_1.SUPPED;
+ }
+ public int nulledSize() {
+ return C_1.NULLED.length();
+ }
+ public int suppedSize() {
+ return C_1.SUPPED.length();
+ }
+}
diff --git a/tests/run/t9915/Test_2.scala b/tests/run/t9915/Test_2.scala
new file mode 100644
index 000000000..afed667cc
--- /dev/null
+++ b/tests/run/t9915/Test_2.scala
@@ -0,0 +1,12 @@
+
+object Test extends App {
+ val c = new C_1
+ assert(c.nulled == "X\u0000ABC") // "X\000ABC"
+ assert(c.supped == "π’ˆπ’π’‘π’›π’π’˜π’•π’–")
+
+ assert(C_1.NULLED == "X\u0000ABC") // "X\000ABC"
+ assert(C_1.SUPPED == "π’ˆπ’π’‘π’›π’π’˜π’•π’–")
+
+ assert(C_1.NULLED.size == "XYABC".size)
+ assert(C_1.SUPPED.codePointCount(0, C_1.SUPPED.length) == 8)
+}