From 9ba4cb69331386dfde9bac69dc2d5b22401face3 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Wed, 12 Dec 2018 16:56:02 -0800 Subject: collapse boilerplate folder structure within src/ folders (#505) * collapse boilerplate folder structure within src/ folders * . --- contrib/buildinfo/src/BuildInfo.scala | 44 + contrib/buildinfo/src/mill/contrib/BuildInfo.scala | 44 - contrib/buildinfo/test/src/BuildInfoTests.scala | 111 +++ .../test/src/mill/contrib/BuildInfoTests.scala | 111 --- contrib/scalapblib/src/ScalaPBModule.scala | 70 ++ contrib/scalapblib/src/ScalaPBWorker.scala | 69 ++ .../mill/contrib/scalapblib/ScalaPBModule.scala | 70 -- .../mill/contrib/scalapblib/ScalaPBWorker.scala | 69 -- contrib/scalapblib/test/src/TutorialTests.scala | 112 +++ .../mill/contrib/scalapblib/TutorialTests.scala | 112 --- contrib/testng/src/ResultEvent.java | 45 + contrib/testng/src/TestNGFramework.java | 25 + contrib/testng/src/TestNGInstance.java | 67 ++ contrib/testng/src/TestNGRunner.java | 76 ++ contrib/testng/src/mill/testng/ResultEvent.java | 45 - .../testng/src/mill/testng/TestNGFramework.java | 25 - contrib/testng/src/mill/testng/TestNGInstance.java | 67 -- contrib/testng/src/mill/testng/TestNGRunner.java | 76 -- contrib/tut/src/TutModule.scala | 131 +++ contrib/tut/src/mill/contrib/tut/TutModule.scala | 131 --- contrib/tut/test/src/TutTests.scala | 123 +++ .../tut/test/src/mill/contrib/tut/TutTests.scala | 123 --- contrib/twirllib/src/TwirlModule.scala | 55 ++ contrib/twirllib/src/TwirlWorker.scala | 153 ++++ .../twirllib/src/mill/twirllib/TwirlModule.scala | 55 -- .../twirllib/src/mill/twirllib/TwirlWorker.scala | 153 ---- contrib/twirllib/test/src/HelloWorldTests.scala | 100 +++ .../test/src/mill/twirllib/HelloWorldTests.scala | 100 --- docs/example-1/foo/src/Example.scala | 6 + docs/example-1/foo/src/foo/Example.scala | 6 - docs/example-2/foo/src/Example.scala | 6 + docs/example-2/foo/src/foo/Example.scala | 6 - integration/test/src/AcyclicTests.scala | 31 + integration/test/src/AmmoniteTests.scala | 35 + integration/test/src/BetterFilesTests.scala | 26 + integration/test/src/CaffeineTests.scala | 34 + integration/test/src/DocAnnotationsTests.scala | 33 + integration/test/src/IntegrationTestSuite.scala | 28 + integration/test/src/JawnTests.scala | 31 + integration/test/src/PlayJsonTests.scala | 57 ++ integration/test/src/UpickleTests.scala | 30 + integration/test/src/forked/Tests.scala | 10 + integration/test/src/local/Tests.scala | 10 + .../test/src/mill/integration/AcyclicTests.scala | 31 - .../test/src/mill/integration/AmmoniteTests.scala | 35 - .../src/mill/integration/BetterFilesTests.scala | 26 - .../test/src/mill/integration/CaffeineTests.scala | 34 - .../src/mill/integration/DocAnnotationsTests.scala | 33 - .../mill/integration/IntegrationTestSuite.scala | 28 - .../test/src/mill/integration/JawnTests.scala | 31 - .../test/src/mill/integration/PlayJsonTests.scala | 57 -- .../test/src/mill/integration/UpickleTests.scala | 30 - .../test/src/mill/integration/forked/Tests.scala | 10 - .../test/src/mill/integration/local/Tests.scala | 10 - main/client/src/InputPumper.java | 37 + main/client/src/Lock.java | 14 + main/client/src/Locked.java | 10 + main/client/src/Locks.java | 107 +++ main/client/src/MillClientMain.java | 162 ++++ main/client/src/ProxyOutputStream.java | 34 + main/client/src/ProxyStreamPumper.java | 60 ++ main/client/src/Util.java | 95 +++ main/client/src/mill/main/client/InputPumper.java | 37 - main/client/src/mill/main/client/Lock.java | 14 - main/client/src/mill/main/client/Locked.java | 10 - main/client/src/mill/main/client/Locks.java | 107 --- .../src/mill/main/client/MillClientMain.java | 162 ---- .../src/mill/main/client/ProxyOutputStream.java | 34 - .../src/mill/main/client/ProxyStreamPumper.java | 60 -- main/client/src/mill/main/client/Util.java | 95 --- main/client/test/src/ClientTests.java | 151 ++++ .../test/src/mill/main/client/ClientTests.java | 151 ---- main/core/src/define/Applicative.scala | 108 +++ main/core/src/define/BaseModule.scala | 56 ++ main/core/src/define/Caller.scala | 13 + main/core/src/define/Cross.scala | 90 ++ main/core/src/define/Ctx.scala | 100 +++ main/core/src/define/Discover.scala | 89 ++ main/core/src/define/Graph.scala | 72 ++ main/core/src/define/Module.scala | 96 +++ main/core/src/define/Task.scala | 344 ++++++++ main/core/src/eval/Evaluator.scala | 443 ++++++++++ main/core/src/eval/Tarjans.scala | 51 ++ main/core/src/eval/package.scala | 12 + main/core/src/mill/define/Applicative.scala | 108 --- main/core/src/mill/define/BaseModule.scala | 56 -- main/core/src/mill/define/Caller.scala | 13 - main/core/src/mill/define/Cross.scala | 90 -- main/core/src/mill/define/Ctx.scala | 100 --- main/core/src/mill/define/Discover.scala | 89 -- main/core/src/mill/define/Graph.scala | 72 -- main/core/src/mill/define/Module.scala | 96 --- main/core/src/mill/define/Task.scala | 344 -------- main/core/src/mill/eval/Evaluator.scala | 443 ---------- main/core/src/mill/eval/Tarjans.scala | 51 -- main/core/src/mill/eval/package.scala | 12 - main/core/src/mill/util/AggWrapper.scala | 119 --- main/core/src/mill/util/EitherOps.scala | 18 - main/core/src/mill/util/EnclosingClass.scala | 15 - main/core/src/mill/util/JsonFormatters.scala | 10 - main/core/src/mill/util/Loggers.scala | 190 ----- main/core/src/mill/util/MultiBiMap.scala | 57 -- main/core/src/mill/util/ParseArgs.scala | 137 --- main/core/src/mill/util/Router.scala | 451 ---------- main/core/src/mill/util/Scripts.scala | 330 -------- main/core/src/mill/util/Watched.scala | 8 - main/core/src/mill/util/package.scala | 7 - main/core/src/util/AggWrapper.scala | 119 +++ main/core/src/util/EitherOps.scala | 18 + main/core/src/util/EnclosingClass.scala | 15 + main/core/src/util/JsonFormatters.scala | 10 + main/core/src/util/Loggers.scala | 190 +++++ main/core/src/util/MultiBiMap.scala | 57 ++ main/core/src/util/ParseArgs.scala | 137 +++ main/core/src/util/Router.scala | 451 ++++++++++ main/core/src/util/Scripts.scala | 330 ++++++++ main/core/src/util/Watched.scala | 8 + main/core/src/util/package.scala | 7 + main/graphviz/src/GraphvizTools.scala | 71 ++ .../src/mill/main/graphviz/GraphvizTools.scala | 71 -- main/moduledefs/src/AutoOverridePlugin.scala | 172 ++++ main/moduledefs/src/Cacher.scala | 35 + main/moduledefs/src/Scaladoc.java | 11 + .../src/mill/moduledefs/AutoOverridePlugin.scala | 172 ---- main/moduledefs/src/mill/moduledefs/Cacher.scala | 35 - main/moduledefs/src/mill/moduledefs/Scaladoc.java | 11 - main/src/MillMain.scala | 155 ++++ main/src/main/MainModule.scala | 269 ++++++ main/src/main/MainRunner.scala | 170 ++++ main/src/main/MainScopts.scala | 31 + main/src/main/MillServerMain.scala | 227 +++++ main/src/main/ReplApplyHandler.scala | 163 ++++ main/src/main/Resolve.scala | 443 ++++++++++ main/src/main/RunScript.scala | 262 ++++++ main/src/main/VisualizeModule.scala | 60 ++ main/src/mill/MillMain.scala | 155 ---- main/src/mill/main/MainModule.scala | 269 ------ main/src/mill/main/MainRunner.scala | 170 ---- main/src/mill/main/MainScopts.scala | 31 - main/src/mill/main/MillServerMain.scala | 227 ----- main/src/mill/main/ReplApplyHandler.scala | 163 ---- main/src/mill/main/Resolve.scala | 443 ---------- main/src/mill/main/RunScript.scala | 262 ------ main/src/mill/main/VisualizeModule.scala | 60 -- main/src/mill/modules/Assembly.scala | 126 --- main/src/mill/modules/Jvm.scala | 481 ----------- main/src/mill/modules/Util.scala | 73 -- main/src/mill/package.scala | 12 - main/src/modules/Assembly.scala | 126 +++ main/src/modules/Jvm.scala | 481 +++++++++++ main/src/modules/Util.scala | 73 ++ main/src/package.scala | 12 + main/test/src/TestMain.scala | 6 + main/test/src/UTestFramework.scala | 11 + main/test/src/define/ApplicativeTests.scala | 125 +++ main/test/src/define/BasePathTests.scala | 73 ++ main/test/src/define/CacherTests.scala | 75 ++ main/test/src/define/DiscoverTests.scala | 63 ++ main/test/src/define/GraphTests.scala | 211 +++++ main/test/src/define/MacroErrorTests.scala | 145 ++++ main/test/src/eval/CrossTests.scala | 56 ++ main/test/src/eval/EvaluationTests.scala | 354 ++++++++ main/test/src/eval/FailureTests.scala | 132 +++ main/test/src/eval/JavaCompileJarTests.scala | 164 ++++ main/test/src/eval/ModuleTests.scala | 45 + main/test/src/eval/TarjanTests.scala | 91 ++ main/test/src/eval/TaskTests.scala | 95 +++ main/test/src/main/ClientServerTests.scala | 214 +++++ main/test/src/main/ForeignBuildsTest.scala | 30 + main/test/src/main/ForeignConflictTest.scala | 25 + main/test/src/main/JavaCompileJarTests.scala | 67 ++ main/test/src/main/MainTests.scala | 272 ++++++ main/test/src/mill/TestMain.scala | 6 - main/test/src/mill/UTestFramework.scala | 11 - main/test/src/mill/define/ApplicativeTests.scala | 125 --- main/test/src/mill/define/BasePathTests.scala | 73 -- main/test/src/mill/define/CacherTests.scala | 75 -- main/test/src/mill/define/DiscoverTests.scala | 63 -- main/test/src/mill/define/GraphTests.scala | 211 ----- main/test/src/mill/define/MacroErrorTests.scala | 145 ---- main/test/src/mill/eval/CrossTests.scala | 56 -- main/test/src/mill/eval/EvaluationTests.scala | 354 -------- main/test/src/mill/eval/FailureTests.scala | 132 --- main/test/src/mill/eval/JavaCompileJarTests.scala | 164 ---- main/test/src/mill/eval/ModuleTests.scala | 45 - main/test/src/mill/eval/TarjanTests.scala | 91 -- main/test/src/mill/eval/TaskTests.scala | 95 --- main/test/src/mill/main/ClientServerTests.scala | 214 ----- main/test/src/mill/main/ForeignBuildsTest.scala | 30 - main/test/src/mill/main/ForeignConflictTest.scala | 25 - main/test/src/mill/main/JavaCompileJarTests.scala | 67 -- main/test/src/mill/main/MainTests.scala | 272 ------ main/test/src/mill/util/ParseArgsTest.scala | 254 ------ main/test/src/mill/util/ScriptTestSuite.scala | 53 -- main/test/src/mill/util/TestEvaluator.scala | 80 -- main/test/src/mill/util/TestGraphs.scala | 271 ------ main/test/src/mill/util/TestUtil.scala | 86 -- main/test/src/util/ParseArgsTest.scala | 254 ++++++ main/test/src/util/ScriptTestSuite.scala | 53 ++ main/test/src/util/TestEvaluator.scala | 80 ++ main/test/src/util/TestGraphs.scala | 271 ++++++ main/test/src/util/TestUtil.scala | 86 ++ scalajslib/api/src/ScalaJSWorkerApi.scala | 41 + .../src/mill/scalajslib/api/ScalaJSWorkerApi.scala | 41 - scalajslib/src/ScalaJSModule.scala | 200 +++++ scalajslib/src/ScalaJSWorkerApi.scala | 72 ++ scalajslib/src/mill/scalajslib/ScalaJSModule.scala | 200 ----- .../src/mill/scalajslib/ScalaJSWorkerApi.scala | 72 -- .../resources/multi-module/shared/src/Utils.scala | 5 + .../multi-module/shared/src/shared/Utils.scala | 5 - scalajslib/test/src/HelloJSWorldTests.scala | 247 ++++++ scalajslib/test/src/MultiModuleTests.scala | 93 ++ scalajslib/test/src/NodeJSConfigTests.scala | 103 +++ scalajslib/test/src/ScalaJsUtils.scala | 21 + .../src/mill/scalajslib/HelloJSWorldTests.scala | 247 ------ .../src/mill/scalajslib/MultiModuleTests.scala | 93 -- .../src/mill/scalajslib/NodeJSConfigTests.scala | 103 --- .../test/src/mill/scalajslib/ScalaJsUtils.scala | 21 - scalajslib/worker/0.6/src/ScalaJSWorkerImpl.scala | 87 ++ .../mill/scalajslib/worker/ScalaJSWorkerImpl.scala | 87 -- scalajslib/worker/1.0/src/ScalaJSWorkerImpl.scala | 85 ++ .../mill/scalajslib/worker/ScalaJSWorkerImpl.scala | 85 -- scalalib/api/src/ZincWorkerApi.scala | 76 ++ .../api/src/mill/scalalib/api/ZincWorkerApi.scala | 76 -- .../backgroundwrapper/src/BackgroundWrapper.java | 40 + .../backgroundwrapper/BackgroundWrapper.java | 40 - scalalib/src/Dep.scala | 121 +++ scalalib/src/Dependency.scala | 22 + scalalib/src/GenIdeaImpl.scala | 474 +++++++++++ scalalib/src/JavaModule.scala | 608 ++++++++++++++ scalalib/src/Lib.scala | 133 +++ scalalib/src/MiscModule.scala | 101 +++ scalalib/src/PublishModule.scala | 124 +++ scalalib/src/ScalaModule.scala | 275 ++++++ scalalib/src/TestRunner.scala | 153 ++++ scalalib/src/Versions.scala | 8 + scalalib/src/ZincWorkerModule.scala | 56 ++ .../src/dependency/DependencyUpdatesImpl.scala | 52 ++ .../dependency/metadata/MavenMetadataLoader.scala | 21 + .../src/dependency/metadata/MetadataLoader.scala | 7 + .../metadata/MetadataLoaderFactory.scala | 11 + .../updates/ModuleDependenciesUpdates.scala | 15 + .../src/dependency/updates/UpdatesFinder.scala | 75 ++ .../versions/ModuleDependenciesVersions.scala | 12 + scalalib/src/dependency/versions/Version.scala | 227 +++++ .../src/dependency/versions/VersionParser.scala | 30 + .../src/dependency/versions/VersionsFinder.scala | 73 ++ scalalib/src/mill/scalalib/Dep.scala | 121 --- scalalib/src/mill/scalalib/Dependency.scala | 22 - scalalib/src/mill/scalalib/GenIdeaImpl.scala | 474 ----------- scalalib/src/mill/scalalib/JavaModule.scala | 608 -------------- scalalib/src/mill/scalalib/Lib.scala | 133 --- scalalib/src/mill/scalalib/MiscModule.scala | 101 --- scalalib/src/mill/scalalib/PublishModule.scala | 124 --- scalalib/src/mill/scalalib/ScalaModule.scala | 275 ------ scalalib/src/mill/scalalib/TestRunner.scala | 153 ---- scalalib/src/mill/scalalib/Versions.scala | 8 - scalalib/src/mill/scalalib/ZincWorkerModule.scala | 56 -- .../dependency/DependencyUpdatesImpl.scala | 52 -- .../dependency/metadata/MavenMetadataLoader.scala | 21 - .../dependency/metadata/MetadataLoader.scala | 7 - .../metadata/MetadataLoaderFactory.scala | 11 - .../updates/ModuleDependenciesUpdates.scala | 15 - .../dependency/updates/UpdatesFinder.scala | 75 -- .../versions/ModuleDependenciesVersions.scala | 12 - .../scalalib/dependency/versions/Version.scala | 227 ----- .../dependency/versions/VersionParser.scala | 30 - .../dependency/versions/VersionsFinder.scala | 73 -- scalalib/src/mill/scalalib/package.scala | 12 - scalalib/src/mill/scalalib/publish/Ivy.scala | 59 -- .../src/mill/scalalib/publish/JsonFormatters.scala | 11 - scalalib/src/mill/scalalib/publish/Licence.scala | 479 ----------- .../src/mill/scalalib/publish/LocalPublisher.scala | 32 - scalalib/src/mill/scalalib/publish/Pom.scala | 117 --- .../mill/scalalib/publish/SonatypeHttpApi.scala | 134 --- .../mill/scalalib/publish/SonatypePublisher.scala | 164 ---- .../src/mill/scalalib/publish/VersionControl.scala | 131 --- scalalib/src/mill/scalalib/publish/package.scala | 3 - scalalib/src/mill/scalalib/publish/settings.scala | 91 -- .../mill/scalalib/scalafmt/ScalafmtModule.scala | 57 -- .../mill/scalalib/scalafmt/ScalafmtWorker.scala | 57 -- scalalib/src/package.scala | 12 + scalalib/src/publish/Ivy.scala | 59 ++ scalalib/src/publish/JsonFormatters.scala | 11 + scalalib/src/publish/Licence.scala | 479 +++++++++++ scalalib/src/publish/LocalPublisher.scala | 32 + scalalib/src/publish/Pom.scala | 117 +++ scalalib/src/publish/SonatypeHttpApi.scala | 134 +++ scalalib/src/publish/SonatypePublisher.scala | 164 ++++ scalalib/src/publish/VersionControl.scala | 131 +++ scalalib/src/publish/package.scala | 3 + scalalib/src/publish/settings.scala | 91 ++ scalalib/src/scalafmt/ScalafmtModule.scala | 57 ++ scalalib/src/scalafmt/ScalafmtWorker.scala | 57 ++ .../test/resources/hello-java/app/src/Main.java | 10 + .../resources/hello-java/app/src/hello/Main.java | 10 - .../hello-java/app/test/src/MyAppTests.java | 18 + .../hello-java/app/test/src/hello/MyAppTests.java | 18 - .../test/resources/hello-java/core/src/Core.java | 7 + .../resources/hello-java/core/src/hello/Core.java | 7 - .../hello-java/core/test/src/MyCoreTests.java | 15 + .../core/test/src/hello/MyCoreTests.java | 15 - scalalib/test/src/GenIdeaTests.scala | 62 ++ scalalib/test/src/HelloJavaTests.scala | 114 +++ scalalib/test/src/HelloWorldTests.scala | 934 +++++++++++++++++++++ scalalib/test/src/ResolveDepsTests.scala | 77 ++ scalalib/test/src/VersionControlTests.scala | 74 ++ .../metadata/MetadataLoaderFactoryTests.scala | 64 ++ .../dependency/updates/UpdatesFinderTests.scala | 173 ++++ .../src/dependency/versions/VersionTests.scala | 138 +++ scalalib/test/src/mill/scalalib/GenIdeaTests.scala | 62 -- .../test/src/mill/scalalib/HelloJavaTests.scala | 114 --- .../test/src/mill/scalalib/HelloWorldTests.scala | 934 --------------------- .../test/src/mill/scalalib/ResolveDepsTests.scala | 77 -- .../src/mill/scalalib/VersionControlTests.scala | 74 -- .../metadata/MetadataLoaderFactoryTests.scala | 64 -- .../dependency/updates/UpdatesFinderTests.scala | 173 ---- .../dependency/versions/VersionTests.scala | 138 --- .../test/src/mill/scalalib/publish/IvyTests.scala | 60 -- .../test/src/mill/scalalib/publish/PomTests.scala | 205 ----- .../src/mill/scalalib/scalafmt/ScalafmtTests.scala | 104 --- scalalib/test/src/publish/IvyTests.scala | 60 ++ scalalib/test/src/publish/PomTests.scala | 205 +++++ scalalib/test/src/scalafmt/ScalafmtTests.scala | 104 +++ scalalib/worker/src/ZincWorkerImpl.scala | 284 +++++++ .../src/mill/scalalib/worker/ZincWorkerImpl.scala | 284 ------- scalanativelib/api/src/ScalaNativeWorkerApi.scala | 60 ++ .../scalanativelib/api/ScalaNativeWorkerApi.scala | 60 -- scalanativelib/src/ScalaNativeModule.scala | 294 +++++++ scalanativelib/src/ScalaNativeWorkerApi.scala | 44 + .../mill/scalanativelib/ScalaNativeModule.scala | 294 ------- .../mill/scalanativelib/ScalaNativeWorkerApi.scala | 44 - .../hello-native-world/src/ArgsParser.scala | 5 + .../resources/hello-native-world/src/Main.scala | 6 + .../hello-native-world/src/hello/ArgsParser.scala | 5 - .../hello-native-world/src/hello/Main.scala | 6 - .../test/src/HelloNativeWorldTests.scala | 216 +++++ .../scalanativelib/HelloNativeWorldTests.scala | 216 ----- .../worker/0.3/src/ScalaNativeWorkerImpl.scala | 73 ++ .../worker/ScalaNativeWorkerImpl.scala | 73 -- 340 files changed, 18815 insertions(+), 18815 deletions(-) create mode 100644 contrib/buildinfo/src/BuildInfo.scala delete mode 100644 contrib/buildinfo/src/mill/contrib/BuildInfo.scala create mode 100644 contrib/buildinfo/test/src/BuildInfoTests.scala delete mode 100644 contrib/buildinfo/test/src/mill/contrib/BuildInfoTests.scala create mode 100644 contrib/scalapblib/src/ScalaPBModule.scala create mode 100644 contrib/scalapblib/src/ScalaPBWorker.scala delete mode 100644 contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBModule.scala delete mode 100644 contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBWorker.scala create mode 100644 contrib/scalapblib/test/src/TutorialTests.scala delete mode 100644 contrib/scalapblib/test/src/mill/contrib/scalapblib/TutorialTests.scala create mode 100644 contrib/testng/src/ResultEvent.java create mode 100644 contrib/testng/src/TestNGFramework.java create mode 100644 contrib/testng/src/TestNGInstance.java create mode 100644 contrib/testng/src/TestNGRunner.java delete mode 100644 contrib/testng/src/mill/testng/ResultEvent.java delete mode 100644 contrib/testng/src/mill/testng/TestNGFramework.java delete mode 100644 contrib/testng/src/mill/testng/TestNGInstance.java delete mode 100644 contrib/testng/src/mill/testng/TestNGRunner.java create mode 100644 contrib/tut/src/TutModule.scala delete mode 100644 contrib/tut/src/mill/contrib/tut/TutModule.scala create mode 100644 contrib/tut/test/src/TutTests.scala delete mode 100644 contrib/tut/test/src/mill/contrib/tut/TutTests.scala create mode 100644 contrib/twirllib/src/TwirlModule.scala create mode 100644 contrib/twirllib/src/TwirlWorker.scala delete mode 100644 contrib/twirllib/src/mill/twirllib/TwirlModule.scala delete mode 100644 contrib/twirllib/src/mill/twirllib/TwirlWorker.scala create mode 100644 contrib/twirllib/test/src/HelloWorldTests.scala delete mode 100644 contrib/twirllib/test/src/mill/twirllib/HelloWorldTests.scala create mode 100644 docs/example-1/foo/src/Example.scala delete mode 100644 docs/example-1/foo/src/foo/Example.scala create mode 100644 docs/example-2/foo/src/Example.scala delete mode 100644 docs/example-2/foo/src/foo/Example.scala create mode 100644 integration/test/src/AcyclicTests.scala create mode 100644 integration/test/src/AmmoniteTests.scala create mode 100644 integration/test/src/BetterFilesTests.scala create mode 100644 integration/test/src/CaffeineTests.scala create mode 100644 integration/test/src/DocAnnotationsTests.scala create mode 100644 integration/test/src/IntegrationTestSuite.scala create mode 100644 integration/test/src/JawnTests.scala create mode 100644 integration/test/src/PlayJsonTests.scala create mode 100644 integration/test/src/UpickleTests.scala create mode 100644 integration/test/src/forked/Tests.scala create mode 100644 integration/test/src/local/Tests.scala delete mode 100644 integration/test/src/mill/integration/AcyclicTests.scala delete mode 100644 integration/test/src/mill/integration/AmmoniteTests.scala delete mode 100644 integration/test/src/mill/integration/BetterFilesTests.scala delete mode 100644 integration/test/src/mill/integration/CaffeineTests.scala delete mode 100644 integration/test/src/mill/integration/DocAnnotationsTests.scala delete mode 100644 integration/test/src/mill/integration/IntegrationTestSuite.scala delete mode 100644 integration/test/src/mill/integration/JawnTests.scala delete mode 100644 integration/test/src/mill/integration/PlayJsonTests.scala delete mode 100644 integration/test/src/mill/integration/UpickleTests.scala delete mode 100644 integration/test/src/mill/integration/forked/Tests.scala delete mode 100644 integration/test/src/mill/integration/local/Tests.scala create mode 100644 main/client/src/InputPumper.java create mode 100644 main/client/src/Lock.java create mode 100644 main/client/src/Locked.java create mode 100644 main/client/src/Locks.java create mode 100644 main/client/src/MillClientMain.java create mode 100644 main/client/src/ProxyOutputStream.java create mode 100644 main/client/src/ProxyStreamPumper.java create mode 100644 main/client/src/Util.java delete mode 100644 main/client/src/mill/main/client/InputPumper.java delete mode 100644 main/client/src/mill/main/client/Lock.java delete mode 100644 main/client/src/mill/main/client/Locked.java delete mode 100644 main/client/src/mill/main/client/Locks.java delete mode 100644 main/client/src/mill/main/client/MillClientMain.java delete mode 100644 main/client/src/mill/main/client/ProxyOutputStream.java delete mode 100644 main/client/src/mill/main/client/ProxyStreamPumper.java delete mode 100644 main/client/src/mill/main/client/Util.java create mode 100644 main/client/test/src/ClientTests.java delete mode 100644 main/client/test/src/mill/main/client/ClientTests.java create mode 100644 main/core/src/define/Applicative.scala create mode 100644 main/core/src/define/BaseModule.scala create mode 100644 main/core/src/define/Caller.scala create mode 100644 main/core/src/define/Cross.scala create mode 100644 main/core/src/define/Ctx.scala create mode 100644 main/core/src/define/Discover.scala create mode 100644 main/core/src/define/Graph.scala create mode 100644 main/core/src/define/Module.scala create mode 100644 main/core/src/define/Task.scala create mode 100644 main/core/src/eval/Evaluator.scala create mode 100644 main/core/src/eval/Tarjans.scala create mode 100644 main/core/src/eval/package.scala delete mode 100644 main/core/src/mill/define/Applicative.scala delete mode 100644 main/core/src/mill/define/BaseModule.scala delete mode 100644 main/core/src/mill/define/Caller.scala delete mode 100644 main/core/src/mill/define/Cross.scala delete mode 100644 main/core/src/mill/define/Ctx.scala delete mode 100644 main/core/src/mill/define/Discover.scala delete mode 100644 main/core/src/mill/define/Graph.scala delete mode 100644 main/core/src/mill/define/Module.scala delete mode 100644 main/core/src/mill/define/Task.scala delete mode 100644 main/core/src/mill/eval/Evaluator.scala delete mode 100644 main/core/src/mill/eval/Tarjans.scala delete mode 100644 main/core/src/mill/eval/package.scala delete mode 100644 main/core/src/mill/util/AggWrapper.scala delete mode 100644 main/core/src/mill/util/EitherOps.scala delete mode 100644 main/core/src/mill/util/EnclosingClass.scala delete mode 100644 main/core/src/mill/util/JsonFormatters.scala delete mode 100644 main/core/src/mill/util/Loggers.scala delete mode 100644 main/core/src/mill/util/MultiBiMap.scala delete mode 100644 main/core/src/mill/util/ParseArgs.scala delete mode 100644 main/core/src/mill/util/Router.scala delete mode 100644 main/core/src/mill/util/Scripts.scala delete mode 100644 main/core/src/mill/util/Watched.scala delete mode 100644 main/core/src/mill/util/package.scala create mode 100644 main/core/src/util/AggWrapper.scala create mode 100644 main/core/src/util/EitherOps.scala create mode 100644 main/core/src/util/EnclosingClass.scala create mode 100644 main/core/src/util/JsonFormatters.scala create mode 100644 main/core/src/util/Loggers.scala create mode 100644 main/core/src/util/MultiBiMap.scala create mode 100644 main/core/src/util/ParseArgs.scala create mode 100644 main/core/src/util/Router.scala create mode 100644 main/core/src/util/Scripts.scala create mode 100644 main/core/src/util/Watched.scala create mode 100644 main/core/src/util/package.scala create mode 100644 main/graphviz/src/GraphvizTools.scala delete mode 100644 main/graphviz/src/mill/main/graphviz/GraphvizTools.scala create mode 100644 main/moduledefs/src/AutoOverridePlugin.scala create mode 100644 main/moduledefs/src/Cacher.scala create mode 100644 main/moduledefs/src/Scaladoc.java delete mode 100644 main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala delete mode 100644 main/moduledefs/src/mill/moduledefs/Cacher.scala delete mode 100644 main/moduledefs/src/mill/moduledefs/Scaladoc.java create mode 100644 main/src/MillMain.scala create mode 100644 main/src/main/MainModule.scala create mode 100644 main/src/main/MainRunner.scala create mode 100644 main/src/main/MainScopts.scala create mode 100644 main/src/main/MillServerMain.scala create mode 100644 main/src/main/ReplApplyHandler.scala create mode 100644 main/src/main/Resolve.scala create mode 100644 main/src/main/RunScript.scala create mode 100644 main/src/main/VisualizeModule.scala delete mode 100644 main/src/mill/MillMain.scala delete mode 100644 main/src/mill/main/MainModule.scala delete mode 100644 main/src/mill/main/MainRunner.scala delete mode 100644 main/src/mill/main/MainScopts.scala delete mode 100644 main/src/mill/main/MillServerMain.scala delete mode 100644 main/src/mill/main/ReplApplyHandler.scala delete mode 100644 main/src/mill/main/Resolve.scala delete mode 100644 main/src/mill/main/RunScript.scala delete mode 100644 main/src/mill/main/VisualizeModule.scala delete mode 100644 main/src/mill/modules/Assembly.scala delete mode 100644 main/src/mill/modules/Jvm.scala delete mode 100644 main/src/mill/modules/Util.scala delete mode 100644 main/src/mill/package.scala create mode 100644 main/src/modules/Assembly.scala create mode 100644 main/src/modules/Jvm.scala create mode 100644 main/src/modules/Util.scala create mode 100644 main/src/package.scala create mode 100644 main/test/src/TestMain.scala create mode 100644 main/test/src/UTestFramework.scala create mode 100644 main/test/src/define/ApplicativeTests.scala create mode 100644 main/test/src/define/BasePathTests.scala create mode 100644 main/test/src/define/CacherTests.scala create mode 100644 main/test/src/define/DiscoverTests.scala create mode 100644 main/test/src/define/GraphTests.scala create mode 100644 main/test/src/define/MacroErrorTests.scala create mode 100644 main/test/src/eval/CrossTests.scala create mode 100644 main/test/src/eval/EvaluationTests.scala create mode 100644 main/test/src/eval/FailureTests.scala create mode 100644 main/test/src/eval/JavaCompileJarTests.scala create mode 100644 main/test/src/eval/ModuleTests.scala create mode 100644 main/test/src/eval/TarjanTests.scala create mode 100644 main/test/src/eval/TaskTests.scala create mode 100644 main/test/src/main/ClientServerTests.scala create mode 100644 main/test/src/main/ForeignBuildsTest.scala create mode 100644 main/test/src/main/ForeignConflictTest.scala create mode 100644 main/test/src/main/JavaCompileJarTests.scala create mode 100644 main/test/src/main/MainTests.scala delete mode 100644 main/test/src/mill/TestMain.scala delete mode 100644 main/test/src/mill/UTestFramework.scala delete mode 100644 main/test/src/mill/define/ApplicativeTests.scala delete mode 100644 main/test/src/mill/define/BasePathTests.scala delete mode 100644 main/test/src/mill/define/CacherTests.scala delete mode 100644 main/test/src/mill/define/DiscoverTests.scala delete mode 100644 main/test/src/mill/define/GraphTests.scala delete mode 100644 main/test/src/mill/define/MacroErrorTests.scala delete mode 100644 main/test/src/mill/eval/CrossTests.scala delete mode 100644 main/test/src/mill/eval/EvaluationTests.scala delete mode 100644 main/test/src/mill/eval/FailureTests.scala delete mode 100644 main/test/src/mill/eval/JavaCompileJarTests.scala delete mode 100644 main/test/src/mill/eval/ModuleTests.scala delete mode 100644 main/test/src/mill/eval/TarjanTests.scala delete mode 100644 main/test/src/mill/eval/TaskTests.scala delete mode 100644 main/test/src/mill/main/ClientServerTests.scala delete mode 100644 main/test/src/mill/main/ForeignBuildsTest.scala delete mode 100644 main/test/src/mill/main/ForeignConflictTest.scala delete mode 100644 main/test/src/mill/main/JavaCompileJarTests.scala delete mode 100644 main/test/src/mill/main/MainTests.scala delete mode 100644 main/test/src/mill/util/ParseArgsTest.scala delete mode 100644 main/test/src/mill/util/ScriptTestSuite.scala delete mode 100644 main/test/src/mill/util/TestEvaluator.scala delete mode 100644 main/test/src/mill/util/TestGraphs.scala delete mode 100644 main/test/src/mill/util/TestUtil.scala create mode 100644 main/test/src/util/ParseArgsTest.scala create mode 100644 main/test/src/util/ScriptTestSuite.scala create mode 100644 main/test/src/util/TestEvaluator.scala create mode 100644 main/test/src/util/TestGraphs.scala create mode 100644 main/test/src/util/TestUtil.scala create mode 100644 scalajslib/api/src/ScalaJSWorkerApi.scala delete mode 100644 scalajslib/api/src/mill/scalajslib/api/ScalaJSWorkerApi.scala create mode 100644 scalajslib/src/ScalaJSModule.scala create mode 100644 scalajslib/src/ScalaJSWorkerApi.scala delete mode 100644 scalajslib/src/mill/scalajslib/ScalaJSModule.scala delete mode 100644 scalajslib/src/mill/scalajslib/ScalaJSWorkerApi.scala create mode 100644 scalajslib/test/resources/multi-module/shared/src/Utils.scala delete mode 100644 scalajslib/test/resources/multi-module/shared/src/shared/Utils.scala create mode 100644 scalajslib/test/src/HelloJSWorldTests.scala create mode 100644 scalajslib/test/src/MultiModuleTests.scala create mode 100644 scalajslib/test/src/NodeJSConfigTests.scala create mode 100644 scalajslib/test/src/ScalaJsUtils.scala delete mode 100644 scalajslib/test/src/mill/scalajslib/HelloJSWorldTests.scala delete mode 100644 scalajslib/test/src/mill/scalajslib/MultiModuleTests.scala delete mode 100644 scalajslib/test/src/mill/scalajslib/NodeJSConfigTests.scala delete mode 100644 scalajslib/test/src/mill/scalajslib/ScalaJsUtils.scala create mode 100644 scalajslib/worker/0.6/src/ScalaJSWorkerImpl.scala delete mode 100644 scalajslib/worker/0.6/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala create mode 100644 scalajslib/worker/1.0/src/ScalaJSWorkerImpl.scala delete mode 100644 scalajslib/worker/1.0/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala create mode 100644 scalalib/api/src/ZincWorkerApi.scala delete mode 100644 scalalib/api/src/mill/scalalib/api/ZincWorkerApi.scala create mode 100644 scalalib/backgroundwrapper/src/BackgroundWrapper.java delete mode 100644 scalalib/backgroundwrapper/src/mill/scalalib/backgroundwrapper/BackgroundWrapper.java create mode 100644 scalalib/src/Dep.scala create mode 100644 scalalib/src/Dependency.scala create mode 100644 scalalib/src/GenIdeaImpl.scala create mode 100644 scalalib/src/JavaModule.scala create mode 100644 scalalib/src/Lib.scala create mode 100644 scalalib/src/MiscModule.scala create mode 100644 scalalib/src/PublishModule.scala create mode 100644 scalalib/src/ScalaModule.scala create mode 100644 scalalib/src/TestRunner.scala create mode 100644 scalalib/src/Versions.scala create mode 100644 scalalib/src/ZincWorkerModule.scala create mode 100644 scalalib/src/dependency/DependencyUpdatesImpl.scala create mode 100644 scalalib/src/dependency/metadata/MavenMetadataLoader.scala create mode 100644 scalalib/src/dependency/metadata/MetadataLoader.scala create mode 100644 scalalib/src/dependency/metadata/MetadataLoaderFactory.scala create mode 100644 scalalib/src/dependency/updates/ModuleDependenciesUpdates.scala create mode 100644 scalalib/src/dependency/updates/UpdatesFinder.scala create mode 100644 scalalib/src/dependency/versions/ModuleDependenciesVersions.scala create mode 100644 scalalib/src/dependency/versions/Version.scala create mode 100644 scalalib/src/dependency/versions/VersionParser.scala create mode 100644 scalalib/src/dependency/versions/VersionsFinder.scala delete mode 100644 scalalib/src/mill/scalalib/Dep.scala delete mode 100644 scalalib/src/mill/scalalib/Dependency.scala delete mode 100644 scalalib/src/mill/scalalib/GenIdeaImpl.scala delete mode 100644 scalalib/src/mill/scalalib/JavaModule.scala delete mode 100644 scalalib/src/mill/scalalib/Lib.scala delete mode 100644 scalalib/src/mill/scalalib/MiscModule.scala delete mode 100644 scalalib/src/mill/scalalib/PublishModule.scala delete mode 100644 scalalib/src/mill/scalalib/ScalaModule.scala delete mode 100644 scalalib/src/mill/scalalib/TestRunner.scala delete mode 100644 scalalib/src/mill/scalalib/Versions.scala delete mode 100644 scalalib/src/mill/scalalib/ZincWorkerModule.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/DependencyUpdatesImpl.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/metadata/MavenMetadataLoader.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/metadata/MetadataLoader.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/metadata/MetadataLoaderFactory.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/updates/ModuleDependenciesUpdates.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/updates/UpdatesFinder.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/versions/ModuleDependenciesVersions.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/versions/Version.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/versions/VersionParser.scala delete mode 100644 scalalib/src/mill/scalalib/dependency/versions/VersionsFinder.scala delete mode 100644 scalalib/src/mill/scalalib/package.scala delete mode 100644 scalalib/src/mill/scalalib/publish/Ivy.scala delete mode 100644 scalalib/src/mill/scalalib/publish/JsonFormatters.scala delete mode 100644 scalalib/src/mill/scalalib/publish/Licence.scala delete mode 100644 scalalib/src/mill/scalalib/publish/LocalPublisher.scala delete mode 100644 scalalib/src/mill/scalalib/publish/Pom.scala delete mode 100644 scalalib/src/mill/scalalib/publish/SonatypeHttpApi.scala delete mode 100644 scalalib/src/mill/scalalib/publish/SonatypePublisher.scala delete mode 100644 scalalib/src/mill/scalalib/publish/VersionControl.scala delete mode 100644 scalalib/src/mill/scalalib/publish/package.scala delete mode 100644 scalalib/src/mill/scalalib/publish/settings.scala delete mode 100644 scalalib/src/mill/scalalib/scalafmt/ScalafmtModule.scala delete mode 100644 scalalib/src/mill/scalalib/scalafmt/ScalafmtWorker.scala create mode 100644 scalalib/src/package.scala create mode 100644 scalalib/src/publish/Ivy.scala create mode 100644 scalalib/src/publish/JsonFormatters.scala create mode 100644 scalalib/src/publish/Licence.scala create mode 100644 scalalib/src/publish/LocalPublisher.scala create mode 100644 scalalib/src/publish/Pom.scala create mode 100644 scalalib/src/publish/SonatypeHttpApi.scala create mode 100644 scalalib/src/publish/SonatypePublisher.scala create mode 100644 scalalib/src/publish/VersionControl.scala create mode 100644 scalalib/src/publish/package.scala create mode 100644 scalalib/src/publish/settings.scala create mode 100644 scalalib/src/scalafmt/ScalafmtModule.scala create mode 100644 scalalib/src/scalafmt/ScalafmtWorker.scala create mode 100644 scalalib/test/resources/hello-java/app/src/Main.java delete mode 100644 scalalib/test/resources/hello-java/app/src/hello/Main.java create mode 100644 scalalib/test/resources/hello-java/app/test/src/MyAppTests.java delete mode 100644 scalalib/test/resources/hello-java/app/test/src/hello/MyAppTests.java create mode 100644 scalalib/test/resources/hello-java/core/src/Core.java delete mode 100644 scalalib/test/resources/hello-java/core/src/hello/Core.java create mode 100644 scalalib/test/resources/hello-java/core/test/src/MyCoreTests.java delete mode 100644 scalalib/test/resources/hello-java/core/test/src/hello/MyCoreTests.java create mode 100644 scalalib/test/src/GenIdeaTests.scala create mode 100644 scalalib/test/src/HelloJavaTests.scala create mode 100644 scalalib/test/src/HelloWorldTests.scala create mode 100644 scalalib/test/src/ResolveDepsTests.scala create mode 100644 scalalib/test/src/VersionControlTests.scala create mode 100644 scalalib/test/src/dependency/metadata/MetadataLoaderFactoryTests.scala create mode 100644 scalalib/test/src/dependency/updates/UpdatesFinderTests.scala create mode 100644 scalalib/test/src/dependency/versions/VersionTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/GenIdeaTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/HelloJavaTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/HelloWorldTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/ResolveDepsTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/VersionControlTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/dependency/metadata/MetadataLoaderFactoryTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/dependency/updates/UpdatesFinderTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/dependency/versions/VersionTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/publish/IvyTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/publish/PomTests.scala delete mode 100644 scalalib/test/src/mill/scalalib/scalafmt/ScalafmtTests.scala create mode 100644 scalalib/test/src/publish/IvyTests.scala create mode 100644 scalalib/test/src/publish/PomTests.scala create mode 100644 scalalib/test/src/scalafmt/ScalafmtTests.scala create mode 100644 scalalib/worker/src/ZincWorkerImpl.scala delete mode 100644 scalalib/worker/src/mill/scalalib/worker/ZincWorkerImpl.scala create mode 100644 scalanativelib/api/src/ScalaNativeWorkerApi.scala delete mode 100644 scalanativelib/api/src/mill/scalanativelib/api/ScalaNativeWorkerApi.scala create mode 100644 scalanativelib/src/ScalaNativeModule.scala create mode 100644 scalanativelib/src/ScalaNativeWorkerApi.scala delete mode 100644 scalanativelib/src/mill/scalanativelib/ScalaNativeModule.scala delete mode 100644 scalanativelib/src/mill/scalanativelib/ScalaNativeWorkerApi.scala create mode 100644 scalanativelib/test/resources/hello-native-world/src/ArgsParser.scala create mode 100644 scalanativelib/test/resources/hello-native-world/src/Main.scala delete mode 100644 scalanativelib/test/resources/hello-native-world/src/hello/ArgsParser.scala delete mode 100644 scalanativelib/test/resources/hello-native-world/src/hello/Main.scala create mode 100644 scalanativelib/test/src/HelloNativeWorldTests.scala delete mode 100644 scalanativelib/test/src/mill/scalanativelib/HelloNativeWorldTests.scala create mode 100644 scalanativelib/worker/0.3/src/ScalaNativeWorkerImpl.scala delete mode 100644 scalanativelib/worker/0.3/src/mill/scalanativelib/worker/ScalaNativeWorkerImpl.scala diff --git a/contrib/buildinfo/src/BuildInfo.scala b/contrib/buildinfo/src/BuildInfo.scala new file mode 100644 index 00000000..1c8d9d9e --- /dev/null +++ b/contrib/buildinfo/src/BuildInfo.scala @@ -0,0 +1,44 @@ +package mill.contrib + +import mill.T +import mill.define.Target +import mill.api.PathRef +import mill.scalalib.ScalaModule +import mill.api.Ctx + +trait BuildInfo extends ScalaModule { + + def buildInfoPackageName: Option[String] = None + + def buildInfoObjectName: String = "BuildInfo" + + def buildInfoMembers: T[Map[String, String]] = T { + Map.empty[String, String] + } + + private def generateBuildInfo(members: Map[String, Any])(implicit dest: Ctx.Dest): Seq[PathRef] = + if(!members.isEmpty){ + val outputFile = dest.dest / "BuildInfo.scala" + val internalMembers = + members + .map { + case (name, value) => s""" def ${name} = "${value}"""" + } + .mkString("\n") + os.write(outputFile, + s"""|${buildInfoPackageName.map(p => s"package ${p}").getOrElse("")} + |object ${buildInfoObjectName} { + |$internalMembers + |}""".stripMargin) + Seq(PathRef(outputFile)) + } else { + Seq.empty[PathRef] + } + + def buildInfo = T { + generateBuildInfo(buildInfoMembers()) + } + + override def generatedSources: Target[Seq[PathRef]] = super.generatedSources() ++ buildInfo() + +} diff --git a/contrib/buildinfo/src/mill/contrib/BuildInfo.scala b/contrib/buildinfo/src/mill/contrib/BuildInfo.scala deleted file mode 100644 index 1c8d9d9e..00000000 --- a/contrib/buildinfo/src/mill/contrib/BuildInfo.scala +++ /dev/null @@ -1,44 +0,0 @@ -package mill.contrib - -import mill.T -import mill.define.Target -import mill.api.PathRef -import mill.scalalib.ScalaModule -import mill.api.Ctx - -trait BuildInfo extends ScalaModule { - - def buildInfoPackageName: Option[String] = None - - def buildInfoObjectName: String = "BuildInfo" - - def buildInfoMembers: T[Map[String, String]] = T { - Map.empty[String, String] - } - - private def generateBuildInfo(members: Map[String, Any])(implicit dest: Ctx.Dest): Seq[PathRef] = - if(!members.isEmpty){ - val outputFile = dest.dest / "BuildInfo.scala" - val internalMembers = - members - .map { - case (name, value) => s""" def ${name} = "${value}"""" - } - .mkString("\n") - os.write(outputFile, - s"""|${buildInfoPackageName.map(p => s"package ${p}").getOrElse("")} - |object ${buildInfoObjectName} { - |$internalMembers - |}""".stripMargin) - Seq(PathRef(outputFile)) - } else { - Seq.empty[PathRef] - } - - def buildInfo = T { - generateBuildInfo(buildInfoMembers()) - } - - override def generatedSources: Target[Seq[PathRef]] = super.generatedSources() ++ buildInfo() - -} diff --git a/contrib/buildinfo/test/src/BuildInfoTests.scala b/contrib/buildinfo/test/src/BuildInfoTests.scala new file mode 100644 index 00000000..4293bba7 --- /dev/null +++ b/contrib/buildinfo/test/src/BuildInfoTests.scala @@ -0,0 +1,111 @@ +package mill.contrib + +import java.util.jar.JarFile +import mill._ +import mill.define.Target +import mill.api.Result._ +import mill.eval.{Evaluator, Result} +import mill.modules.Assembly +import mill.scalalib.publish.VersionControl +import mill.scalalib.publish._ +import mill.util.{TestEvaluator, TestUtil} +import scala.collection.JavaConverters._ +import utest._ +import utest.framework.TestPath + + +object BuildInfoTests extends TestSuite { + + val scalaVersionString = "2.12.4" + trait BuildInfoModule extends TestUtil.BaseModule with scalalib.ScalaModule with BuildInfo { + def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + def scalaVersion = scalaVersionString + } + + object EmptyBuildInfo extends BuildInfoModule + + object BuildInfo extends BuildInfoModule { + def buildInfoMembers=T{ + Map( + "scalaVersion" -> scalaVersion(), + ) + } + } + + object BuildInfoSettings extends BuildInfoModule { + def buildInfoPackageName = Some("foo") + def buildInfoObjectName = "bar" + def buildInfoMembers=T{ + Map( + "scalaVersion" -> scalaVersion() + ) + } + } + + val resourcePath = os.pwd / 'contrib / 'buildinfo / 'test / 'resources / "buildinfo" + + def workspaceTest[T](m: TestUtil.BaseModule, resourcePath: os.Path = resourcePath) + (t: TestEvaluator => T) + (implicit tp: TestPath): T = { + val eval = new TestEvaluator(m) + os.remove.all(m.millSourcePath) + os.remove.all(eval.outPath) + os.makeDir.all(m.millSourcePath / os.up) + os.copy(resourcePath, m.millSourcePath) + t(eval) + } + + def tests: Tests = Tests { + + 'buildinfo - { + 'createSourcefile - workspaceTest(BuildInfo){ eval => + val expected = + s"""| + |object BuildInfo { + | def scalaVersion = "2.12.4" + |}""".stripMargin + val Right((result, evalCount)) = eval.apply(BuildInfo.buildInfo) + assert( + result.head.path == eval.outPath / 'buildInfo / 'dest / "BuildInfo.scala" && + os.exists(result.head.path) && + os.read(result.head.path) == expected + ) + } + + 'notCreateEmptySourcefile - workspaceTest(EmptyBuildInfo){ eval => + val Right((result, evalCount)) = eval.apply(EmptyBuildInfo.buildInfo) + assert( + result.isEmpty && + !os.exists(eval.outPath / 'buildInfo / 'dest / "BuildInfo.scala") + ) + } + + 'supportCustomSettings - workspaceTest(BuildInfoSettings){ eval => + val expected = + s"""|package foo + |object bar { + | def scalaVersion = "2.12.4" + |}""".stripMargin + val Right((result, evalCount)) = eval.apply(BuildInfoSettings.buildInfo) + assert( + result.head.path == eval.outPath / 'buildInfo / 'dest / "BuildInfo.scala" && + os.exists(result.head.path) && + os.read(result.head.path) == expected + ) + } + + 'compile - workspaceTest(BuildInfo){ eval => + val Right((result, evalCount)) = eval.apply(BuildInfo.compile) + assert(true) + } + + 'run - workspaceTest(BuildInfo){ eval => + val runResult = eval.outPath / "hello-mill" + val Right((result, evalCount)) = eval.apply(BuildInfo.run(runResult.toString)) + assert( + os.exists(runResult), + os.read(runResult) == scalaVersionString) + } + } + } +} diff --git a/contrib/buildinfo/test/src/mill/contrib/BuildInfoTests.scala b/contrib/buildinfo/test/src/mill/contrib/BuildInfoTests.scala deleted file mode 100644 index 4293bba7..00000000 --- a/contrib/buildinfo/test/src/mill/contrib/BuildInfoTests.scala +++ /dev/null @@ -1,111 +0,0 @@ -package mill.contrib - -import java.util.jar.JarFile -import mill._ -import mill.define.Target -import mill.api.Result._ -import mill.eval.{Evaluator, Result} -import mill.modules.Assembly -import mill.scalalib.publish.VersionControl -import mill.scalalib.publish._ -import mill.util.{TestEvaluator, TestUtil} -import scala.collection.JavaConverters._ -import utest._ -import utest.framework.TestPath - - -object BuildInfoTests extends TestSuite { - - val scalaVersionString = "2.12.4" - trait BuildInfoModule extends TestUtil.BaseModule with scalalib.ScalaModule with BuildInfo { - def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - def scalaVersion = scalaVersionString - } - - object EmptyBuildInfo extends BuildInfoModule - - object BuildInfo extends BuildInfoModule { - def buildInfoMembers=T{ - Map( - "scalaVersion" -> scalaVersion(), - ) - } - } - - object BuildInfoSettings extends BuildInfoModule { - def buildInfoPackageName = Some("foo") - def buildInfoObjectName = "bar" - def buildInfoMembers=T{ - Map( - "scalaVersion" -> scalaVersion() - ) - } - } - - val resourcePath = os.pwd / 'contrib / 'buildinfo / 'test / 'resources / "buildinfo" - - def workspaceTest[T](m: TestUtil.BaseModule, resourcePath: os.Path = resourcePath) - (t: TestEvaluator => T) - (implicit tp: TestPath): T = { - val eval = new TestEvaluator(m) - os.remove.all(m.millSourcePath) - os.remove.all(eval.outPath) - os.makeDir.all(m.millSourcePath / os.up) - os.copy(resourcePath, m.millSourcePath) - t(eval) - } - - def tests: Tests = Tests { - - 'buildinfo - { - 'createSourcefile - workspaceTest(BuildInfo){ eval => - val expected = - s"""| - |object BuildInfo { - | def scalaVersion = "2.12.4" - |}""".stripMargin - val Right((result, evalCount)) = eval.apply(BuildInfo.buildInfo) - assert( - result.head.path == eval.outPath / 'buildInfo / 'dest / "BuildInfo.scala" && - os.exists(result.head.path) && - os.read(result.head.path) == expected - ) - } - - 'notCreateEmptySourcefile - workspaceTest(EmptyBuildInfo){ eval => - val Right((result, evalCount)) = eval.apply(EmptyBuildInfo.buildInfo) - assert( - result.isEmpty && - !os.exists(eval.outPath / 'buildInfo / 'dest / "BuildInfo.scala") - ) - } - - 'supportCustomSettings - workspaceTest(BuildInfoSettings){ eval => - val expected = - s"""|package foo - |object bar { - | def scalaVersion = "2.12.4" - |}""".stripMargin - val Right((result, evalCount)) = eval.apply(BuildInfoSettings.buildInfo) - assert( - result.head.path == eval.outPath / 'buildInfo / 'dest / "BuildInfo.scala" && - os.exists(result.head.path) && - os.read(result.head.path) == expected - ) - } - - 'compile - workspaceTest(BuildInfo){ eval => - val Right((result, evalCount)) = eval.apply(BuildInfo.compile) - assert(true) - } - - 'run - workspaceTest(BuildInfo){ eval => - val runResult = eval.outPath / "hello-mill" - val Right((result, evalCount)) = eval.apply(BuildInfo.run(runResult.toString)) - assert( - os.exists(runResult), - os.read(runResult) == scalaVersionString) - } - } - } -} diff --git a/contrib/scalapblib/src/ScalaPBModule.scala b/contrib/scalapblib/src/ScalaPBModule.scala new file mode 100644 index 00000000..db5c5c8b --- /dev/null +++ b/contrib/scalapblib/src/ScalaPBModule.scala @@ -0,0 +1,70 @@ +package mill +package contrib.scalapblib + +import coursier.{Cache, MavenRepository} +import coursier.core.Version +import mill.define.Sources +import mill.api.PathRef +import mill.scalalib.Lib.resolveDependencies +import mill.scalalib._ +import mill.util.Loose + +trait ScalaPBModule extends ScalaModule { + + override def generatedSources = T { super.generatedSources() :+ compileScalaPB() } + + override def ivyDeps = T { + super.ivyDeps() ++ + Agg(ivy"com.thesamet.scalapb::scalapb-runtime:${scalaPBVersion()}") ++ + (if (!scalaPBGrpc()) Agg() else Agg(ivy"com.thesamet.scalapb::scalapb-runtime-grpc:${scalaPBVersion()}")) + } + + def scalaPBVersion: T[String] + + def scalaPBFlatPackage: T[Boolean] = T { false } + + def scalaPBJavaConversions: T[Boolean] = T { false } + + def scalaPBGrpc: T[Boolean] = T { true } + + def scalaPBSingleLineToProtoString: T[Boolean] = T { false } + + def scalaPBSources: Sources = T.sources { + millSourcePath / 'protobuf + } + + def scalaPBOptions: T[String] = T { + ( + (if (scalaPBFlatPackage()) Seq("flat_package") else Seq.empty) ++ + (if (scalaPBJavaConversions()) Seq("java_conversions") else Seq.empty) ++ + (if (scalaPBGrpc()) Seq("grpc") else Seq.empty) ++ ( + if (!scalaPBSingleLineToProtoString()) Seq.empty else { + if (Version(scalaPBVersion()) >= Version("0.7.0")) + Seq("single_line_to_proto_string") + else + Seq("single_line_to_string") + } + ) + ).mkString(",") + } + + def scalaPBClasspath: T[Loose.Agg[PathRef]] = T { + resolveDependencies( + Seq( + Cache.ivy2Local, + MavenRepository("https://repo1.maven.org/maven2") + ), + Lib.depToDependency(_, "2.12.4"), + Seq(ivy"com.thesamet.scalapb::scalapbc:${scalaPBVersion()}") + ) + } + + def compileScalaPB: T[PathRef] = T.persistent { + ScalaPBWorkerApi.scalaPBWorker + .compile( + scalaPBClasspath().map(_.path), + scalaPBSources().map(_.path), + scalaPBOptions(), + T.ctx().dest) + } +} diff --git a/contrib/scalapblib/src/ScalaPBWorker.scala b/contrib/scalapblib/src/ScalaPBWorker.scala new file mode 100644 index 00000000..125cd3fd --- /dev/null +++ b/contrib/scalapblib/src/ScalaPBWorker.scala @@ -0,0 +1,69 @@ +package mill +package contrib.scalapblib + +import java.io.File +import java.lang.reflect.Method +import java.net.URLClassLoader + +import mill.api.PathRef + +class ScalaPBWorker { + + private var scalaPBInstanceCache = Option.empty[(Long, ScalaPBWorkerApi)] + + private def scalaPB(scalaPBClasspath: Agg[os.Path]) = { + val classloaderSig = scalaPBClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum + scalaPBInstanceCache match { + case Some((sig, instance)) if sig == classloaderSig => instance + case _ => + val cl = new URLClassLoader(scalaPBClasspath.map(_.toIO.toURI.toURL).toArray) + val scalaPBCompilerClass = cl.loadClass("scalapb.ScalaPBC") + val mainMethod = scalaPBCompilerClass.getMethod("main", classOf[Array[java.lang.String]]) + + val instance = new ScalaPBWorkerApi { + override def compileScalaPB(source: File, scalaPBOptions: String, generatedDirectory: File) { + val opts = if (scalaPBOptions.isEmpty) "" else scalaPBOptions + ":" + mainMethod.invoke( + null, + Array( + "--throw", + s"--scala_out=${opts}${generatedDirectory.getCanonicalPath}", + s"--proto_path=${source.getParentFile.getCanonicalPath}", + source.getCanonicalPath + ) + ) + } + } + scalaPBInstanceCache = Some((classloaderSig, instance)) + instance + } + } + + def compile(scalaPBClasspath: Agg[os.Path], scalaPBSources: Seq[os.Path], scalaPBOptions: String, dest: os.Path) + (implicit ctx: mill.api.Ctx): mill.api.Result[PathRef] = { + val compiler = scalaPB(scalaPBClasspath) + + def compileScalaPBDir(inputDir: os.Path) { + // ls throws if the path doesn't exist + if (inputDir.toIO.exists) { + os.walk(inputDir).filter(_.last.matches(".*.proto")) + .foreach { proto => + compiler.compileScalaPB(proto.toIO, scalaPBOptions, dest.toIO) + } + } + } + + scalaPBSources.foreach(compileScalaPBDir) + + mill.api.Result.Success(PathRef(dest)) + } +} + +trait ScalaPBWorkerApi { + def compileScalaPB(source: File, scalaPBOptions: String, generatedDirectory: File) +} + +object ScalaPBWorkerApi { + + def scalaPBWorker = new ScalaPBWorker() +} diff --git a/contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBModule.scala b/contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBModule.scala deleted file mode 100644 index db5c5c8b..00000000 --- a/contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBModule.scala +++ /dev/null @@ -1,70 +0,0 @@ -package mill -package contrib.scalapblib - -import coursier.{Cache, MavenRepository} -import coursier.core.Version -import mill.define.Sources -import mill.api.PathRef -import mill.scalalib.Lib.resolveDependencies -import mill.scalalib._ -import mill.util.Loose - -trait ScalaPBModule extends ScalaModule { - - override def generatedSources = T { super.generatedSources() :+ compileScalaPB() } - - override def ivyDeps = T { - super.ivyDeps() ++ - Agg(ivy"com.thesamet.scalapb::scalapb-runtime:${scalaPBVersion()}") ++ - (if (!scalaPBGrpc()) Agg() else Agg(ivy"com.thesamet.scalapb::scalapb-runtime-grpc:${scalaPBVersion()}")) - } - - def scalaPBVersion: T[String] - - def scalaPBFlatPackage: T[Boolean] = T { false } - - def scalaPBJavaConversions: T[Boolean] = T { false } - - def scalaPBGrpc: T[Boolean] = T { true } - - def scalaPBSingleLineToProtoString: T[Boolean] = T { false } - - def scalaPBSources: Sources = T.sources { - millSourcePath / 'protobuf - } - - def scalaPBOptions: T[String] = T { - ( - (if (scalaPBFlatPackage()) Seq("flat_package") else Seq.empty) ++ - (if (scalaPBJavaConversions()) Seq("java_conversions") else Seq.empty) ++ - (if (scalaPBGrpc()) Seq("grpc") else Seq.empty) ++ ( - if (!scalaPBSingleLineToProtoString()) Seq.empty else { - if (Version(scalaPBVersion()) >= Version("0.7.0")) - Seq("single_line_to_proto_string") - else - Seq("single_line_to_string") - } - ) - ).mkString(",") - } - - def scalaPBClasspath: T[Loose.Agg[PathRef]] = T { - resolveDependencies( - Seq( - Cache.ivy2Local, - MavenRepository("https://repo1.maven.org/maven2") - ), - Lib.depToDependency(_, "2.12.4"), - Seq(ivy"com.thesamet.scalapb::scalapbc:${scalaPBVersion()}") - ) - } - - def compileScalaPB: T[PathRef] = T.persistent { - ScalaPBWorkerApi.scalaPBWorker - .compile( - scalaPBClasspath().map(_.path), - scalaPBSources().map(_.path), - scalaPBOptions(), - T.ctx().dest) - } -} diff --git a/contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBWorker.scala b/contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBWorker.scala deleted file mode 100644 index 125cd3fd..00000000 --- a/contrib/scalapblib/src/mill/contrib/scalapblib/ScalaPBWorker.scala +++ /dev/null @@ -1,69 +0,0 @@ -package mill -package contrib.scalapblib - -import java.io.File -import java.lang.reflect.Method -import java.net.URLClassLoader - -import mill.api.PathRef - -class ScalaPBWorker { - - private var scalaPBInstanceCache = Option.empty[(Long, ScalaPBWorkerApi)] - - private def scalaPB(scalaPBClasspath: Agg[os.Path]) = { - val classloaderSig = scalaPBClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum - scalaPBInstanceCache match { - case Some((sig, instance)) if sig == classloaderSig => instance - case _ => - val cl = new URLClassLoader(scalaPBClasspath.map(_.toIO.toURI.toURL).toArray) - val scalaPBCompilerClass = cl.loadClass("scalapb.ScalaPBC") - val mainMethod = scalaPBCompilerClass.getMethod("main", classOf[Array[java.lang.String]]) - - val instance = new ScalaPBWorkerApi { - override def compileScalaPB(source: File, scalaPBOptions: String, generatedDirectory: File) { - val opts = if (scalaPBOptions.isEmpty) "" else scalaPBOptions + ":" - mainMethod.invoke( - null, - Array( - "--throw", - s"--scala_out=${opts}${generatedDirectory.getCanonicalPath}", - s"--proto_path=${source.getParentFile.getCanonicalPath}", - source.getCanonicalPath - ) - ) - } - } - scalaPBInstanceCache = Some((classloaderSig, instance)) - instance - } - } - - def compile(scalaPBClasspath: Agg[os.Path], scalaPBSources: Seq[os.Path], scalaPBOptions: String, dest: os.Path) - (implicit ctx: mill.api.Ctx): mill.api.Result[PathRef] = { - val compiler = scalaPB(scalaPBClasspath) - - def compileScalaPBDir(inputDir: os.Path) { - // ls throws if the path doesn't exist - if (inputDir.toIO.exists) { - os.walk(inputDir).filter(_.last.matches(".*.proto")) - .foreach { proto => - compiler.compileScalaPB(proto.toIO, scalaPBOptions, dest.toIO) - } - } - } - - scalaPBSources.foreach(compileScalaPBDir) - - mill.api.Result.Success(PathRef(dest)) - } -} - -trait ScalaPBWorkerApi { - def compileScalaPB(source: File, scalaPBOptions: String, generatedDirectory: File) -} - -object ScalaPBWorkerApi { - - def scalaPBWorker = new ScalaPBWorker() -} diff --git a/contrib/scalapblib/test/src/TutorialTests.scala b/contrib/scalapblib/test/src/TutorialTests.scala new file mode 100644 index 00000000..fe0ce8d5 --- /dev/null +++ b/contrib/scalapblib/test/src/TutorialTests.scala @@ -0,0 +1,112 @@ +package mill.contrib.scalapblib + +import mill.api.Result +import mill.util.{TestEvaluator, TestUtil} +import utest.framework.TestPath +import utest.{TestSuite, Tests, assert, _} + +object TutorialTests extends TestSuite { + + trait TutorialBase extends TestUtil.BaseModule { + override def millSourcePath: os.Path = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + } + + trait TutorialModule extends ScalaPBModule { + def scalaVersion = "2.12.4" + def scalaPBVersion = "0.7.4" + def scalaPBFlatPackage = true + } + + object Tutorial extends TutorialBase { + + object core extends TutorialModule { + override def scalaPBVersion = "0.7.4" + } + } + + val resourcePath: os.Path = os.pwd / 'contrib / 'scalapblib / 'test / 'protobuf / 'tutorial + + def protobufOutPath(eval: TestEvaluator): os.Path = + eval.outPath / 'core / 'compileScalaPB / 'dest / 'com / 'example / 'tutorial + + def workspaceTest[T](m: TestUtil.BaseModule)(t: TestEvaluator => T) + (implicit tp: TestPath): T = { + val eval = new TestEvaluator(m) + os.remove.all(m.millSourcePath) + println(m.millSourcePath) + os.remove.all(eval.outPath) + println(eval.outPath) + os.makeDir.all(m.millSourcePath / 'core / 'protobuf) + os.copy(resourcePath, m.millSourcePath / 'core / 'protobuf / 'tutorial) + t(eval) + } + + def compiledSourcefiles: Seq[os.RelPath] = Seq[os.RelPath]( + "AddressBook.scala", + "Person.scala", + "TutorialProto.scala" + ) + + def tests: Tests = Tests { + 'scalapbVersion - { + + 'fromBuild - workspaceTest(Tutorial) { eval => + val Right((result, evalCount)) = eval.apply(Tutorial.core.scalaPBVersion) + + assert( + result == "0.7.4", + evalCount > 0 + ) + } + } + + 'compileScalaPB - { + 'calledDirectly - workspaceTest(Tutorial) { eval => + val Right((result, evalCount)) = eval.apply(Tutorial.core.compileScalaPB) + + val outPath = protobufOutPath(eval) + + val outputFiles = os.walk(result.path).filter(os.isFile) + + val expectedSourcefiles = compiledSourcefiles.map(outPath / _) + + assert( + result.path == eval.outPath / 'core / 'compileScalaPB / 'dest, + outputFiles.nonEmpty, + outputFiles.forall(expectedSourcefiles.contains), + outputFiles.size == 3, + evalCount > 0 + ) + + // don't recompile if nothing changed + val Right((_, unchangedEvalCount)) = eval.apply(Tutorial.core.compileScalaPB) + + assert(unchangedEvalCount == 0) + } + + // This throws a NullPointerException in coursier somewhere + // + // 'triggeredByScalaCompile - workspaceTest(Tutorial) { eval => + // val Right((_, evalCount)) = eval.apply(Tutorial.core.compile) + + // val outPath = protobufOutPath(eval) + + // val outputFiles = os.walk(outPath).filter(_.isFile) + + // val expectedSourcefiles = compiledSourcefiles.map(outPath / _) + + // assert( + // outputFiles.nonEmpty, + // outputFiles.forall(expectedSourcefiles.contains), + // outputFiles.size == 3, + // evalCount > 0 + // ) + + // // don't recompile if nothing changed + // val Right((_, unchangedEvalCount)) = eval.apply(Tutorial.core.compile) + + // assert(unchangedEvalCount == 0) + // } + } + } +} diff --git a/contrib/scalapblib/test/src/mill/contrib/scalapblib/TutorialTests.scala b/contrib/scalapblib/test/src/mill/contrib/scalapblib/TutorialTests.scala deleted file mode 100644 index fe0ce8d5..00000000 --- a/contrib/scalapblib/test/src/mill/contrib/scalapblib/TutorialTests.scala +++ /dev/null @@ -1,112 +0,0 @@ -package mill.contrib.scalapblib - -import mill.api.Result -import mill.util.{TestEvaluator, TestUtil} -import utest.framework.TestPath -import utest.{TestSuite, Tests, assert, _} - -object TutorialTests extends TestSuite { - - trait TutorialBase extends TestUtil.BaseModule { - override def millSourcePath: os.Path = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - } - - trait TutorialModule extends ScalaPBModule { - def scalaVersion = "2.12.4" - def scalaPBVersion = "0.7.4" - def scalaPBFlatPackage = true - } - - object Tutorial extends TutorialBase { - - object core extends TutorialModule { - override def scalaPBVersion = "0.7.4" - } - } - - val resourcePath: os.Path = os.pwd / 'contrib / 'scalapblib / 'test / 'protobuf / 'tutorial - - def protobufOutPath(eval: TestEvaluator): os.Path = - eval.outPath / 'core / 'compileScalaPB / 'dest / 'com / 'example / 'tutorial - - def workspaceTest[T](m: TestUtil.BaseModule)(t: TestEvaluator => T) - (implicit tp: TestPath): T = { - val eval = new TestEvaluator(m) - os.remove.all(m.millSourcePath) - println(m.millSourcePath) - os.remove.all(eval.outPath) - println(eval.outPath) - os.makeDir.all(m.millSourcePath / 'core / 'protobuf) - os.copy(resourcePath, m.millSourcePath / 'core / 'protobuf / 'tutorial) - t(eval) - } - - def compiledSourcefiles: Seq[os.RelPath] = Seq[os.RelPath]( - "AddressBook.scala", - "Person.scala", - "TutorialProto.scala" - ) - - def tests: Tests = Tests { - 'scalapbVersion - { - - 'fromBuild - workspaceTest(Tutorial) { eval => - val Right((result, evalCount)) = eval.apply(Tutorial.core.scalaPBVersion) - - assert( - result == "0.7.4", - evalCount > 0 - ) - } - } - - 'compileScalaPB - { - 'calledDirectly - workspaceTest(Tutorial) { eval => - val Right((result, evalCount)) = eval.apply(Tutorial.core.compileScalaPB) - - val outPath = protobufOutPath(eval) - - val outputFiles = os.walk(result.path).filter(os.isFile) - - val expectedSourcefiles = compiledSourcefiles.map(outPath / _) - - assert( - result.path == eval.outPath / 'core / 'compileScalaPB / 'dest, - outputFiles.nonEmpty, - outputFiles.forall(expectedSourcefiles.contains), - outputFiles.size == 3, - evalCount > 0 - ) - - // don't recompile if nothing changed - val Right((_, unchangedEvalCount)) = eval.apply(Tutorial.core.compileScalaPB) - - assert(unchangedEvalCount == 0) - } - - // This throws a NullPointerException in coursier somewhere - // - // 'triggeredByScalaCompile - workspaceTest(Tutorial) { eval => - // val Right((_, evalCount)) = eval.apply(Tutorial.core.compile) - - // val outPath = protobufOutPath(eval) - - // val outputFiles = os.walk(outPath).filter(_.isFile) - - // val expectedSourcefiles = compiledSourcefiles.map(outPath / _) - - // assert( - // outputFiles.nonEmpty, - // outputFiles.forall(expectedSourcefiles.contains), - // outputFiles.size == 3, - // evalCount > 0 - // ) - - // // don't recompile if nothing changed - // val Right((_, unchangedEvalCount)) = eval.apply(Tutorial.core.compile) - - // assert(unchangedEvalCount == 0) - // } - } - } -} diff --git a/contrib/testng/src/ResultEvent.java b/contrib/testng/src/ResultEvent.java new file mode 100644 index 00000000..6e2a50d6 --- /dev/null +++ b/contrib/testng/src/ResultEvent.java @@ -0,0 +1,45 @@ + +package mill.testng; + +import sbt.testing.*; +import org.testng.ITestResult; + +public class ResultEvent { + static Event failure(ITestResult result){ return event(Status.Failure, result); } + static Event skipped(ITestResult result){ return event(Status.Skipped, result); } + static Event success(ITestResult result){ return event(Status.Success, result); } + + static Event event(Status result, ITestResult testNGResult) { + return new Event() { + public String fullyQualifiedName() { + return testNGResult.getTestClass().getName(); + } + + public Fingerprint fingerprint() { + return TestNGFingerprint.instance; + } + + public Selector selector() { + return new SuiteSelector(); + } + + public Status status() { + return result; + } + + public OptionalThrowable throwable() { + if (result != Status.Success){ + return new OptionalThrowable(testNGResult.getThrowable()); + }else { + return new OptionalThrowable(); + } + } + + @Override + public long duration() { + return testNGResult.getEndMillis() - testNGResult.getStartMillis(); + } + }; + } + static String classNameOf(ITestResult result){ return result.getTestClass().getName(); } +} \ No newline at end of file diff --git a/contrib/testng/src/TestNGFramework.java b/contrib/testng/src/TestNGFramework.java new file mode 100644 index 00000000..6e993fcc --- /dev/null +++ b/contrib/testng/src/TestNGFramework.java @@ -0,0 +1,25 @@ +package mill.testng; + + + +import sbt.testing.*; + + +public class TestNGFramework implements Framework { + public String name(){ return "TestNG"; } + + public Fingerprint[] fingerprints() { + return new Fingerprint[]{TestNGFingerprint.instance}; + } + + @Override + public Runner runner(String[] args, String[] remoteArgs, ClassLoader classLoader) { + return new TestNGRunner(args, remoteArgs, classLoader); + } +} + +class TestNGFingerprint implements AnnotatedFingerprint{ + final public static TestNGFingerprint instance = new TestNGFingerprint(); + public String annotationName(){return "org.testng.annotations.Test";} + public boolean isModule(){return false;} +} diff --git a/contrib/testng/src/TestNGInstance.java b/contrib/testng/src/TestNGInstance.java new file mode 100644 index 00000000..4cf274d3 --- /dev/null +++ b/contrib/testng/src/TestNGInstance.java @@ -0,0 +1,67 @@ +package mill.testng; + + +import org.testng.*; +import sbt.testing.EventHandler; +import sbt.testing.Logger; + +import com.beust.jcommander.JCommander; + +import java.net.URLClassLoader; +import java.util.Arrays; + +class TestNGListener implements ITestListener{ + EventHandler basket; + String lastName = ""; + public TestNGListener(EventHandler basket){ + this.basket = basket; + } + public void onTestStart(ITestResult iTestResult) { + String newName = iTestResult.getTestClass().getName() + " " + iTestResult.getName() + " "; + if(!newName.equals(lastName)){ + if (!lastName.equals("")){ + System.out.println(); + } + lastName = newName; + System.out.print(lastName); + } + } + + public void onTestSuccess(ITestResult iTestResult) { + System.out.print('+'); + basket.handle(ResultEvent.success(iTestResult)); + } + + public void onTestFailure(ITestResult iTestResult) { + System.out.print('X'); + basket.handle(ResultEvent.failure(iTestResult)); + } + + public void onTestSkipped(ITestResult iTestResult) { + System.out.print('-'); + basket.handle(ResultEvent.skipped(iTestResult)); + } + + public void onTestFailedButWithinSuccessPercentage(ITestResult iTestResult) { + basket.handle(ResultEvent.failure(iTestResult)); + } + + public void onStart(ITestContext iTestContext) {} + + public void onFinish(ITestContext iTestContext) {} +} + +public class TestNGInstance extends TestNG{ + public TestNGInstance(Logger[] loggers, + ClassLoader testClassLoader, + CommandLineArgs args, + EventHandler eventHandler) { + addClassLoader(testClassLoader); + + this.addListener(new TestNGListener(eventHandler)); + + configure(args); + } +} + + diff --git a/contrib/testng/src/TestNGRunner.java b/contrib/testng/src/TestNGRunner.java new file mode 100644 index 00000000..0ad05f76 --- /dev/null +++ b/contrib/testng/src/TestNGRunner.java @@ -0,0 +1,76 @@ +package mill.testng; + +import com.beust.jcommander.JCommander; +import org.testng.CommandLineArgs; +import sbt.testing.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +class TestNGTask implements Task { + + TaskDef taskDef; + TestNGRunner runner; + CommandLineArgs cliArgs; + public TestNGTask(TaskDef taskDef, + TestNGRunner runner, + CommandLineArgs cliArgs){ + this.taskDef = taskDef; + this.runner = runner; + this.cliArgs = cliArgs; + } + + @Override + public String[] tags() { + return new String[0]; + } + + @Override + public Task[] execute(EventHandler eventHandler, Logger[] loggers) { + new TestNGInstance( + loggers, + runner.testClassLoader, + cliArgs, + eventHandler + ).run(); + return new Task[0]; + } + + @Override + public TaskDef taskDef() { + return taskDef; + } +} + +public class TestNGRunner implements Runner { + ClassLoader testClassLoader; + String[] args; + String[] remoteArgs; + public TestNGRunner(String[] args, String[] remoteArgs, ClassLoader testClassLoader) { + this.testClassLoader = testClassLoader; + this.args = args; + this.remoteArgs = remoteArgs; + } + + public Task[] tasks(TaskDef[] taskDefs) { + CommandLineArgs cliArgs = new CommandLineArgs(); + new JCommander(cliArgs, args); // args is an output parameter of the constructor! + if(cliArgs.testClass == null){ + String[] names = new String[taskDefs.length]; + for(int i = 0; i < taskDefs.length; i += 1){ + names[i] = taskDefs[i].fullyQualifiedName(); + } + cliArgs.testClass = String.join(",", names); + } + if (taskDefs.length == 0) return new Task[]{}; + else return new Task[]{new TestNGTask(taskDefs[0], this, cliArgs)}; + } + + public String done() { return null; } + + public String[] remoteArgs() { return remoteArgs; } + + public String[] args() { return args; } +} diff --git a/contrib/testng/src/mill/testng/ResultEvent.java b/contrib/testng/src/mill/testng/ResultEvent.java deleted file mode 100644 index 6e2a50d6..00000000 --- a/contrib/testng/src/mill/testng/ResultEvent.java +++ /dev/null @@ -1,45 +0,0 @@ - -package mill.testng; - -import sbt.testing.*; -import org.testng.ITestResult; - -public class ResultEvent { - static Event failure(ITestResult result){ return event(Status.Failure, result); } - static Event skipped(ITestResult result){ return event(Status.Skipped, result); } - static Event success(ITestResult result){ return event(Status.Success, result); } - - static Event event(Status result, ITestResult testNGResult) { - return new Event() { - public String fullyQualifiedName() { - return testNGResult.getTestClass().getName(); - } - - public Fingerprint fingerprint() { - return TestNGFingerprint.instance; - } - - public Selector selector() { - return new SuiteSelector(); - } - - public Status status() { - return result; - } - - public OptionalThrowable throwable() { - if (result != Status.Success){ - return new OptionalThrowable(testNGResult.getThrowable()); - }else { - return new OptionalThrowable(); - } - } - - @Override - public long duration() { - return testNGResult.getEndMillis() - testNGResult.getStartMillis(); - } - }; - } - static String classNameOf(ITestResult result){ return result.getTestClass().getName(); } -} \ No newline at end of file diff --git a/contrib/testng/src/mill/testng/TestNGFramework.java b/contrib/testng/src/mill/testng/TestNGFramework.java deleted file mode 100644 index 6e993fcc..00000000 --- a/contrib/testng/src/mill/testng/TestNGFramework.java +++ /dev/null @@ -1,25 +0,0 @@ -package mill.testng; - - - -import sbt.testing.*; - - -public class TestNGFramework implements Framework { - public String name(){ return "TestNG"; } - - public Fingerprint[] fingerprints() { - return new Fingerprint[]{TestNGFingerprint.instance}; - } - - @Override - public Runner runner(String[] args, String[] remoteArgs, ClassLoader classLoader) { - return new TestNGRunner(args, remoteArgs, classLoader); - } -} - -class TestNGFingerprint implements AnnotatedFingerprint{ - final public static TestNGFingerprint instance = new TestNGFingerprint(); - public String annotationName(){return "org.testng.annotations.Test";} - public boolean isModule(){return false;} -} diff --git a/contrib/testng/src/mill/testng/TestNGInstance.java b/contrib/testng/src/mill/testng/TestNGInstance.java deleted file mode 100644 index 4cf274d3..00000000 --- a/contrib/testng/src/mill/testng/TestNGInstance.java +++ /dev/null @@ -1,67 +0,0 @@ -package mill.testng; - - -import org.testng.*; -import sbt.testing.EventHandler; -import sbt.testing.Logger; - -import com.beust.jcommander.JCommander; - -import java.net.URLClassLoader; -import java.util.Arrays; - -class TestNGListener implements ITestListener{ - EventHandler basket; - String lastName = ""; - public TestNGListener(EventHandler basket){ - this.basket = basket; - } - public void onTestStart(ITestResult iTestResult) { - String newName = iTestResult.getTestClass().getName() + " " + iTestResult.getName() + " "; - if(!newName.equals(lastName)){ - if (!lastName.equals("")){ - System.out.println(); - } - lastName = newName; - System.out.print(lastName); - } - } - - public void onTestSuccess(ITestResult iTestResult) { - System.out.print('+'); - basket.handle(ResultEvent.success(iTestResult)); - } - - public void onTestFailure(ITestResult iTestResult) { - System.out.print('X'); - basket.handle(ResultEvent.failure(iTestResult)); - } - - public void onTestSkipped(ITestResult iTestResult) { - System.out.print('-'); - basket.handle(ResultEvent.skipped(iTestResult)); - } - - public void onTestFailedButWithinSuccessPercentage(ITestResult iTestResult) { - basket.handle(ResultEvent.failure(iTestResult)); - } - - public void onStart(ITestContext iTestContext) {} - - public void onFinish(ITestContext iTestContext) {} -} - -public class TestNGInstance extends TestNG{ - public TestNGInstance(Logger[] loggers, - ClassLoader testClassLoader, - CommandLineArgs args, - EventHandler eventHandler) { - addClassLoader(testClassLoader); - - this.addListener(new TestNGListener(eventHandler)); - - configure(args); - } -} - - diff --git a/contrib/testng/src/mill/testng/TestNGRunner.java b/contrib/testng/src/mill/testng/TestNGRunner.java deleted file mode 100644 index 0ad05f76..00000000 --- a/contrib/testng/src/mill/testng/TestNGRunner.java +++ /dev/null @@ -1,76 +0,0 @@ -package mill.testng; - -import com.beust.jcommander.JCommander; -import org.testng.CommandLineArgs; -import sbt.testing.*; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -class TestNGTask implements Task { - - TaskDef taskDef; - TestNGRunner runner; - CommandLineArgs cliArgs; - public TestNGTask(TaskDef taskDef, - TestNGRunner runner, - CommandLineArgs cliArgs){ - this.taskDef = taskDef; - this.runner = runner; - this.cliArgs = cliArgs; - } - - @Override - public String[] tags() { - return new String[0]; - } - - @Override - public Task[] execute(EventHandler eventHandler, Logger[] loggers) { - new TestNGInstance( - loggers, - runner.testClassLoader, - cliArgs, - eventHandler - ).run(); - return new Task[0]; - } - - @Override - public TaskDef taskDef() { - return taskDef; - } -} - -public class TestNGRunner implements Runner { - ClassLoader testClassLoader; - String[] args; - String[] remoteArgs; - public TestNGRunner(String[] args, String[] remoteArgs, ClassLoader testClassLoader) { - this.testClassLoader = testClassLoader; - this.args = args; - this.remoteArgs = remoteArgs; - } - - public Task[] tasks(TaskDef[] taskDefs) { - CommandLineArgs cliArgs = new CommandLineArgs(); - new JCommander(cliArgs, args); // args is an output parameter of the constructor! - if(cliArgs.testClass == null){ - String[] names = new String[taskDefs.length]; - for(int i = 0; i < taskDefs.length; i += 1){ - names[i] = taskDefs[i].fullyQualifiedName(); - } - cliArgs.testClass = String.join(",", names); - } - if (taskDefs.length == 0) return new Task[]{}; - else return new Task[]{new TestNGTask(taskDefs[0], this, cliArgs)}; - } - - public String done() { return null; } - - public String[] remoteArgs() { return remoteArgs; } - - public String[] args() { return args; } -} diff --git a/contrib/tut/src/TutModule.scala b/contrib/tut/src/TutModule.scala new file mode 100644 index 00000000..e51a8d7b --- /dev/null +++ b/contrib/tut/src/TutModule.scala @@ -0,0 +1,131 @@ +package mill +package contrib.tut + +import coursier.MavenRepository +import mill.scalalib._ +import scala.util.matching.Regex + +/** + * Tut is a documentation tool which compiles and evaluates Scala code in documentation files and provides various options for configuring how the results will be displayed in the compiled documentation. + * + * Extending this trait declares a Scala module which compiles markdown, HTML and `.txt` files in the `tut` folder of the module with Tut. + * + * By default the resulting documents are simply placed in the Mill build output folder but they can be placed elsewhere by overriding the [[mill.contrib.tut.TutModule#tutTargetDirectory]] task. + * + * For example: + * + * {{{ + * // build.sc + * import mill._, scalalib._, contrib.tut.__ + * + * object example extends TutModule { + * def scalaVersion = "2.12.6" + * def tutVersion = "0.6.7" + * } + * }}} + * + * This defines a project with the following layout: + * + * {{{ + * build.sc + * example/ + * src/ + * tut/ + * resources/ + * }}} + * + * In order to compile documentation we can execute the `tut` task in the module: + * + * {{{ + * sh> mill example.tut + * }}} + */ +trait TutModule extends ScalaModule { + /** + * This task determines where documentation files must be placed in order to be compiled with Tut. By default this is the `tut` folder at the root of the module. + */ + def tutSourceDirectory = T.sources { millSourcePath / 'tut } + + /** + * A task which determines where the compiled documentation files will be placed. By default this is simply the Mill build's output folder for this task, + * but this can be reconfigured so that documentation goes to the root of the module (e.g. `millSourcePath`) or to a dedicated folder (e.g. `millSourcePath / 'docs`) + */ + def tutTargetDirectory: T[os.Path] = T { T.ctx().dest } + + /** + * A task which determines what classpath is used when compiling documentation. By default this is configured to use the same inputs as the [[mill.contrib.tut.TutModule#runClasspath]], + * except for using [[mill.contrib.tut.TutModule#tutIvyDeps]] rather than the module's [[mill.contrib.tut.TutModule#runIvyDeps]]. + */ + def tutClasspath: T[Agg[PathRef]] = T { + // Same as runClasspath but with tut added to ivyDeps from the start + // This prevents duplicate, differently versioned copies of scala-library ending up on the classpath which can happen when resolving separately + transitiveLocalClasspath() ++ + resources() ++ + localClasspath() ++ + unmanagedClasspath() ++ + tutIvyDeps() + } + + /** + * A task which determines the scalac plugins which will be used when compiling code examples with Tut. The default is to use the [[mill.contrib.tut.TutModule#scalacPluginIvyDeps]] for the module. + */ + def tutScalacPluginIvyDeps: T[Agg[Dep]] = scalacPluginIvyDeps() + + /** + * A [[scala.util.matching.Regex]] task which will be used to determine which files should be compiled with tut. The default pattern is as follows: `.*\.(md|markdown|txt|htm|html)`. + */ + def tutNameFilter: T[Regex] = T { """.*\.(md|markdown|txt|htm|html)""".r } + + /** + * The scalac options which will be used when compiling code examples with Tut. The default is to use the [[mill.contrib.tut.TutModule#scalacOptions]] for the module, + * but filtering out options which are problematic in the REPL, e.g. `-Xfatal-warnings`, `-Ywarn-unused-imports`. + */ + def tutScalacOptions: T[Seq[String]] = + scalacOptions().filterNot(Set( + "-Ywarn-unused:imports", + "-Ywarn-unused-import", + "-Ywarn-dead-code", + "-Xfatal-warnings" + )) + + /** + * The version of Tut to use. + */ + def tutVersion: T[String] + + /** + * A task which determines how to fetch the Tut jar file and all of the dependencies required to compile documentation for the module and returns the resulting files. + */ + def tutIvyDeps: T[Agg[PathRef]] = T { + Lib.resolveDependencies( + repositories :+ MavenRepository(s"https://dl.bintray.com/tpolecat/maven"), + Lib.depToDependency(_, scalaVersion()), + compileIvyDeps() ++ transitiveIvyDeps() ++ Seq( + ivy"org.tpolecat::tut-core:${tutVersion()}" + ) + ) + } + + /** + * A task which performs the dependency resolution for the scalac plugins to be used with Tut. + */ + def tutPluginJars: T[Agg[PathRef]] = resolveDeps(tutScalacPluginIvyDeps)() + + /** + * Run Tut using the configuration specified in this module. The working directory used is the [[mill.contrib.tut.TutModule#millSourcePath]]. + */ + def tut: T[os.CommandResult] = T { + val in = tutSourceDirectory().head.path.toIO.getAbsolutePath + val out = tutTargetDirectory().toIO.getAbsolutePath + val re = tutNameFilter() + val opts = tutScalacOptions() + val pOpts = tutPluginJars().map(pathRef => "-Xplugin:" + pathRef.path.toIO.getAbsolutePath) + val tutArgs = List(in, out, re.pattern.toString) ++ opts ++ pOpts + os.proc( + 'java, + "-cp", tutClasspath().map(_.path.toIO.getAbsolutePath).mkString(java.io.File.pathSeparator), + "tut.TutMain", + tutArgs + ).call(millSourcePath) + } +} diff --git a/contrib/tut/src/mill/contrib/tut/TutModule.scala b/contrib/tut/src/mill/contrib/tut/TutModule.scala deleted file mode 100644 index e51a8d7b..00000000 --- a/contrib/tut/src/mill/contrib/tut/TutModule.scala +++ /dev/null @@ -1,131 +0,0 @@ -package mill -package contrib.tut - -import coursier.MavenRepository -import mill.scalalib._ -import scala.util.matching.Regex - -/** - * Tut is a documentation tool which compiles and evaluates Scala code in documentation files and provides various options for configuring how the results will be displayed in the compiled documentation. - * - * Extending this trait declares a Scala module which compiles markdown, HTML and `.txt` files in the `tut` folder of the module with Tut. - * - * By default the resulting documents are simply placed in the Mill build output folder but they can be placed elsewhere by overriding the [[mill.contrib.tut.TutModule#tutTargetDirectory]] task. - * - * For example: - * - * {{{ - * // build.sc - * import mill._, scalalib._, contrib.tut.__ - * - * object example extends TutModule { - * def scalaVersion = "2.12.6" - * def tutVersion = "0.6.7" - * } - * }}} - * - * This defines a project with the following layout: - * - * {{{ - * build.sc - * example/ - * src/ - * tut/ - * resources/ - * }}} - * - * In order to compile documentation we can execute the `tut` task in the module: - * - * {{{ - * sh> mill example.tut - * }}} - */ -trait TutModule extends ScalaModule { - /** - * This task determines where documentation files must be placed in order to be compiled with Tut. By default this is the `tut` folder at the root of the module. - */ - def tutSourceDirectory = T.sources { millSourcePath / 'tut } - - /** - * A task which determines where the compiled documentation files will be placed. By default this is simply the Mill build's output folder for this task, - * but this can be reconfigured so that documentation goes to the root of the module (e.g. `millSourcePath`) or to a dedicated folder (e.g. `millSourcePath / 'docs`) - */ - def tutTargetDirectory: T[os.Path] = T { T.ctx().dest } - - /** - * A task which determines what classpath is used when compiling documentation. By default this is configured to use the same inputs as the [[mill.contrib.tut.TutModule#runClasspath]], - * except for using [[mill.contrib.tut.TutModule#tutIvyDeps]] rather than the module's [[mill.contrib.tut.TutModule#runIvyDeps]]. - */ - def tutClasspath: T[Agg[PathRef]] = T { - // Same as runClasspath but with tut added to ivyDeps from the start - // This prevents duplicate, differently versioned copies of scala-library ending up on the classpath which can happen when resolving separately - transitiveLocalClasspath() ++ - resources() ++ - localClasspath() ++ - unmanagedClasspath() ++ - tutIvyDeps() - } - - /** - * A task which determines the scalac plugins which will be used when compiling code examples with Tut. The default is to use the [[mill.contrib.tut.TutModule#scalacPluginIvyDeps]] for the module. - */ - def tutScalacPluginIvyDeps: T[Agg[Dep]] = scalacPluginIvyDeps() - - /** - * A [[scala.util.matching.Regex]] task which will be used to determine which files should be compiled with tut. The default pattern is as follows: `.*\.(md|markdown|txt|htm|html)`. - */ - def tutNameFilter: T[Regex] = T { """.*\.(md|markdown|txt|htm|html)""".r } - - /** - * The scalac options which will be used when compiling code examples with Tut. The default is to use the [[mill.contrib.tut.TutModule#scalacOptions]] for the module, - * but filtering out options which are problematic in the REPL, e.g. `-Xfatal-warnings`, `-Ywarn-unused-imports`. - */ - def tutScalacOptions: T[Seq[String]] = - scalacOptions().filterNot(Set( - "-Ywarn-unused:imports", - "-Ywarn-unused-import", - "-Ywarn-dead-code", - "-Xfatal-warnings" - )) - - /** - * The version of Tut to use. - */ - def tutVersion: T[String] - - /** - * A task which determines how to fetch the Tut jar file and all of the dependencies required to compile documentation for the module and returns the resulting files. - */ - def tutIvyDeps: T[Agg[PathRef]] = T { - Lib.resolveDependencies( - repositories :+ MavenRepository(s"https://dl.bintray.com/tpolecat/maven"), - Lib.depToDependency(_, scalaVersion()), - compileIvyDeps() ++ transitiveIvyDeps() ++ Seq( - ivy"org.tpolecat::tut-core:${tutVersion()}" - ) - ) - } - - /** - * A task which performs the dependency resolution for the scalac plugins to be used with Tut. - */ - def tutPluginJars: T[Agg[PathRef]] = resolveDeps(tutScalacPluginIvyDeps)() - - /** - * Run Tut using the configuration specified in this module. The working directory used is the [[mill.contrib.tut.TutModule#millSourcePath]]. - */ - def tut: T[os.CommandResult] = T { - val in = tutSourceDirectory().head.path.toIO.getAbsolutePath - val out = tutTargetDirectory().toIO.getAbsolutePath - val re = tutNameFilter() - val opts = tutScalacOptions() - val pOpts = tutPluginJars().map(pathRef => "-Xplugin:" + pathRef.path.toIO.getAbsolutePath) - val tutArgs = List(in, out, re.pattern.toString) ++ opts ++ pOpts - os.proc( - 'java, - "-cp", tutClasspath().map(_.path.toIO.getAbsolutePath).mkString(java.io.File.pathSeparator), - "tut.TutMain", - tutArgs - ).call(millSourcePath) - } -} diff --git a/contrib/tut/test/src/TutTests.scala b/contrib/tut/test/src/TutTests.scala new file mode 100644 index 00000000..468654bb --- /dev/null +++ b/contrib/tut/test/src/TutTests.scala @@ -0,0 +1,123 @@ +package mill.contrib +package tut + +import mill._ +import mill.api.Result._ +import mill.scalalib._ +import mill.util.{TestEvaluator, TestUtil} +import utest._ +import utest.framework.TestPath + +object TutTests extends TestSuite { + + trait TutTestModule extends TestUtil.BaseModule with TutModule { + def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + def scalaVersion = "2.12.4" + def tutVersion = "0.6.7" + } + + object TutTest extends TutTestModule + + object TutCustomTest extends TutTestModule { + def tutTargetDirectory = millSourcePath + } + + object TutLibrariesTest extends TutTestModule { + def ivyDeps = Agg(ivy"org.typelevel::cats-core:1.4.0") + def tutSourceDirectory = T.sources { resourcePathWithLibraries } + def scalacPluginIvyDeps = Agg(ivy"org.spire-math::kind-projector:0.9.8") + } + + val resourcePath = os.pwd / 'contrib / 'tut / 'test / 'tut + val resourcePathWithLibraries = os.pwd / 'contrib / 'tut / 'test / "tut-with-libraries" + + def workspaceTest[T](m: TestUtil.BaseModule, resourcePath: os.Path = resourcePath) + (t: TestEvaluator => T) + (implicit tp: TestPath): T = { + val eval = new TestEvaluator(m) + os.remove.all(m.millSourcePath) + os.remove.all(eval.outPath) + os.makeDir.all(m.millSourcePath) + os.copy(resourcePath, m.millSourcePath / 'tut) + t(eval) + } + + def tests: Tests = Tests { + 'tut - { + 'createOutputFile - workspaceTest(TutTest) { eval => + val expectedPath = + eval.outPath / 'tutTargetDirectory / 'dest / "TutExample.md" + + val expected = + """ + |```scala + |scala> 1 + 1 + |res0: Int = 2 + |``` + | + """.trim.stripMargin + + val Right((result, evalCount)) = eval.apply(TutTest.tut) + + assert( + os.exists(expectedPath) && + os.read(expectedPath) == expected + ) + } + + 'supportCustomSettings - workspaceTest(TutCustomTest) { eval => + val defaultPath = + eval.outPath / 'tutTargetDirectory / 'dest / "TutExample.md" + val expectedPath = + TutCustomTest.millSourcePath / "TutExample.md" + + val expected = + """ + |```scala + |scala> 1 + 1 + |res0: Int = 2 + |``` + | + """.trim.stripMargin + + val Right((result, evalCount)) = eval.apply(TutCustomTest.tut) + + assert( + !os.exists(defaultPath) && + os.exists(expectedPath) && + os.read(expectedPath) == expected + ) + } + + 'supportUsingLibraries - workspaceTest(TutLibrariesTest, resourcePath = resourcePathWithLibraries) { eval => + val expectedPath = + eval.outPath / 'tutTargetDirectory / 'dest / "TutWithLibraries.md" + + val expected = + """ + |```scala + |import cats._ + |import cats.arrow.FunctionK + |import cats.implicits._ + |``` + | + |```scala + |scala> List(1, 2, 3).combineAll + |res0: Int = 6 + | + |scala> λ[FunctionK[List, Option]](_.headOption)(List(1, 2 ,3)) + |res1: Option[Int] = Some(1) + |``` + | + """.trim.stripMargin + + val Right(_) = eval.apply(TutLibrariesTest.tut) + + assert( + os.exists(expectedPath) && + os.read(expectedPath) == expected + ) + } + } + } +} diff --git a/contrib/tut/test/src/mill/contrib/tut/TutTests.scala b/contrib/tut/test/src/mill/contrib/tut/TutTests.scala deleted file mode 100644 index 468654bb..00000000 --- a/contrib/tut/test/src/mill/contrib/tut/TutTests.scala +++ /dev/null @@ -1,123 +0,0 @@ -package mill.contrib -package tut - -import mill._ -import mill.api.Result._ -import mill.scalalib._ -import mill.util.{TestEvaluator, TestUtil} -import utest._ -import utest.framework.TestPath - -object TutTests extends TestSuite { - - trait TutTestModule extends TestUtil.BaseModule with TutModule { - def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - def scalaVersion = "2.12.4" - def tutVersion = "0.6.7" - } - - object TutTest extends TutTestModule - - object TutCustomTest extends TutTestModule { - def tutTargetDirectory = millSourcePath - } - - object TutLibrariesTest extends TutTestModule { - def ivyDeps = Agg(ivy"org.typelevel::cats-core:1.4.0") - def tutSourceDirectory = T.sources { resourcePathWithLibraries } - def scalacPluginIvyDeps = Agg(ivy"org.spire-math::kind-projector:0.9.8") - } - - val resourcePath = os.pwd / 'contrib / 'tut / 'test / 'tut - val resourcePathWithLibraries = os.pwd / 'contrib / 'tut / 'test / "tut-with-libraries" - - def workspaceTest[T](m: TestUtil.BaseModule, resourcePath: os.Path = resourcePath) - (t: TestEvaluator => T) - (implicit tp: TestPath): T = { - val eval = new TestEvaluator(m) - os.remove.all(m.millSourcePath) - os.remove.all(eval.outPath) - os.makeDir.all(m.millSourcePath) - os.copy(resourcePath, m.millSourcePath / 'tut) - t(eval) - } - - def tests: Tests = Tests { - 'tut - { - 'createOutputFile - workspaceTest(TutTest) { eval => - val expectedPath = - eval.outPath / 'tutTargetDirectory / 'dest / "TutExample.md" - - val expected = - """ - |```scala - |scala> 1 + 1 - |res0: Int = 2 - |``` - | - """.trim.stripMargin - - val Right((result, evalCount)) = eval.apply(TutTest.tut) - - assert( - os.exists(expectedPath) && - os.read(expectedPath) == expected - ) - } - - 'supportCustomSettings - workspaceTest(TutCustomTest) { eval => - val defaultPath = - eval.outPath / 'tutTargetDirectory / 'dest / "TutExample.md" - val expectedPath = - TutCustomTest.millSourcePath / "TutExample.md" - - val expected = - """ - |```scala - |scala> 1 + 1 - |res0: Int = 2 - |``` - | - """.trim.stripMargin - - val Right((result, evalCount)) = eval.apply(TutCustomTest.tut) - - assert( - !os.exists(defaultPath) && - os.exists(expectedPath) && - os.read(expectedPath) == expected - ) - } - - 'supportUsingLibraries - workspaceTest(TutLibrariesTest, resourcePath = resourcePathWithLibraries) { eval => - val expectedPath = - eval.outPath / 'tutTargetDirectory / 'dest / "TutWithLibraries.md" - - val expected = - """ - |```scala - |import cats._ - |import cats.arrow.FunctionK - |import cats.implicits._ - |``` - | - |```scala - |scala> List(1, 2, 3).combineAll - |res0: Int = 6 - | - |scala> λ[FunctionK[List, Option]](_.headOption)(List(1, 2 ,3)) - |res1: Option[Int] = Some(1) - |``` - | - """.trim.stripMargin - - val Right(_) = eval.apply(TutLibrariesTest.tut) - - assert( - os.exists(expectedPath) && - os.read(expectedPath) == expected - ) - } - } - } -} diff --git a/contrib/twirllib/src/TwirlModule.scala b/contrib/twirllib/src/TwirlModule.scala new file mode 100644 index 00000000..328afc47 --- /dev/null +++ b/contrib/twirllib/src/TwirlModule.scala @@ -0,0 +1,55 @@ +package mill +package twirllib + +import coursier.{Cache, MavenRepository} +import mill.define.Sources +import mill.api.PathRef +import mill.scalalib.Lib.resolveDependencies +import mill.scalalib._ +import mill.util.Loose + +import scala.io.Codec +import scala.util.Properties + +trait TwirlModule extends mill.Module { + + def twirlVersion: T[String] + + def twirlSources: Sources = T.sources { + millSourcePath / 'views + } + + def twirlClasspath: T[Loose.Agg[PathRef]] = T { + resolveDependencies( + Seq( + Cache.ivy2Local, + MavenRepository("https://repo1.maven.org/maven2") + ), + Lib.depToDependency(_, "2.12.4"), + Seq( + ivy"com.typesafe.play::twirl-compiler:${twirlVersion()}", + ivy"org.scala-lang.modules::scala-parser-combinators:1.1.0" + ) + ) + } + + def twirlAdditionalImports: Seq[String] = Nil + + private def twirlConstructorAnnotations: Seq[String] = Nil + + private def twirlCodec: Codec = Codec(Properties.sourceEncoding) + + private def twirlInclusiveDot: Boolean = false + + def compileTwirl: T[mill.scalalib.api.CompilationResult] = T.persistent { + TwirlWorkerApi.twirlWorker + .compile( + twirlClasspath().map(_.path), + twirlSources().map(_.path), + T.ctx().dest, + twirlAdditionalImports, + twirlConstructorAnnotations, + twirlCodec, + twirlInclusiveDot) + } +} diff --git a/contrib/twirllib/src/TwirlWorker.scala b/contrib/twirllib/src/TwirlWorker.scala new file mode 100644 index 00000000..09376a6f --- /dev/null +++ b/contrib/twirllib/src/TwirlWorker.scala @@ -0,0 +1,153 @@ +package mill +package twirllib + +import java.io.File +import java.lang.reflect.Method +import java.net.URLClassLoader + +import mill.api.PathRef +import mill.scalalib.api.CompilationResult + +import scala.io.Codec +class TwirlWorker { + + private var twirlInstanceCache = Option.empty[(Long, TwirlWorkerApi)] + + private def twirl(twirlClasspath: Agg[os.Path]) = { + val classloaderSig = twirlClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum + twirlInstanceCache match { + case Some((sig, instance)) if sig == classloaderSig => instance + case _ => + val cl = new URLClassLoader(twirlClasspath.map(_.toIO.toURI.toURL).toArray, null) + + // Switched to using the java api because of the hack-ish thing going on later. + // + // * we'll need to construct a collection of additional imports + // * it will need to consider the defaults + // * and add the user-provided additional imports + // * the default collection in scala api is a Seq[String] + // * but it is defined in a different classloader (namely in cl) + // * so we can not construct our own Seq and pass it to the method - it will be from our classloader, and not compatible + // * the java api has a Collection as the type for this param, for which it is much more doable to append things to it using reflection + // + // NOTE: I tried creating the cl classloader passing the current classloader as the parent: + // val cl = new URLClassLoader(twirlClasspath.map(_.toIO.toURI.toURL).toArray, getClass.getClassLoader) + // in that case it was possible to cast the default to a Seq[String], construct our own Seq[String], and pass it to the method invoke- it was compatible. + // And the tests passed. But when run in a different mill project, I was getting exceptions like this: + // scala.reflect.internal.MissingRequirementError: object scala in compiler mirror not found. + + val twirlCompilerClass = cl.loadClass("play.japi.twirl.compiler.TwirlCompiler") + + // this one is only to get the codec: Codec parameter default value + val twirlScalaCompilerClass = cl.loadClass("play.twirl.compiler.TwirlCompiler") + + val compileMethod = twirlCompilerClass.getMethod("compile", + classOf[java.io.File], + classOf[java.io.File], + classOf[java.io.File], + classOf[java.lang.String], + cl.loadClass("java.util.Collection"), + cl.loadClass("java.util.List"), + cl.loadClass("scala.io.Codec"), + classOf[Boolean]) + + val arrayListClass = cl.loadClass("java.util.ArrayList") + val hashSetClass = cl.loadClass("java.util.HashSet") + + val defaultAdditionalImportsMethod = twirlCompilerClass.getField("DEFAULT_IMPORTS") + val defaultCodecMethod = twirlScalaCompilerClass.getMethod("compile$default$7") + + val instance = new TwirlWorkerApi { + override def compileTwirl(source: File, + sourceDirectory: File, + generatedDirectory: File, + formatterType: String, + additionalImports: Seq[String], + constructorAnnotations: Seq[String], + codec: Codec, + inclusiveDot: Boolean) { + val defaultAdditionalImports = defaultAdditionalImportsMethod.get(null) // unmodifiable collection + // copying it into a modifiable hash set and adding all additional imports + val allAdditionalImports = + hashSetClass + .getConstructor(cl.loadClass("java.util.Collection")) + .newInstance(defaultAdditionalImports) + .asInstanceOf[Object] + val hashSetAddMethod = + allAdditionalImports + .getClass + .getMethod("add", classOf[Object]) + additionalImports.foreach(hashSetAddMethod.invoke(allAdditionalImports, _)) + + val o = compileMethod.invoke(null, source, + sourceDirectory, + generatedDirectory, + formatterType, + allAdditionalImports, + arrayListClass.newInstance().asInstanceOf[Object], // empty list seems to be the default + defaultCodecMethod.invoke(null), + Boolean.box(false) + ) + } + } + twirlInstanceCache = Some((classloaderSig, instance)) + instance + } + } + + def compile(twirlClasspath: Agg[os.Path], + sourceDirectories: Seq[os.Path], + dest: os.Path, + additionalImports: Seq[String], + constructorAnnotations: Seq[String], + codec: Codec, + inclusiveDot: Boolean) + (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { + val compiler = twirl(twirlClasspath) + + def compileTwirlDir(inputDir: os.Path) { + os.walk(inputDir).filter(_.last.matches(".*.scala.(html|xml|js|txt)")) + .foreach { template => + val extFormat = twirlExtensionFormat(template.last) + compiler.compileTwirl(template.toIO, + inputDir.toIO, + dest.toIO, + s"play.twirl.api.$extFormat", + additionalImports, + constructorAnnotations, + codec, + inclusiveDot + ) + } + } + + sourceDirectories.foreach(compileTwirlDir) + + val zincFile = ctx.dest / 'zinc + val classesDir = ctx.dest + + mill.api.Result.Success(CompilationResult(zincFile, PathRef(classesDir))) + } + + private def twirlExtensionFormat(name: String) = + if (name.endsWith("html")) "HtmlFormat" + else if (name.endsWith("xml")) "XmlFormat" + else if (name.endsWith("js")) "JavaScriptFormat" + else "TxtFormat" +} + +trait TwirlWorkerApi { + def compileTwirl(source: File, + sourceDirectory: File, + generatedDirectory: File, + formatterType: String, + additionalImports: Seq[String], + constructorAnnotations: Seq[String], + codec: Codec, + inclusiveDot: Boolean) +} + +object TwirlWorkerApi { + + def twirlWorker = new TwirlWorker() +} diff --git a/contrib/twirllib/src/mill/twirllib/TwirlModule.scala b/contrib/twirllib/src/mill/twirllib/TwirlModule.scala deleted file mode 100644 index 328afc47..00000000 --- a/contrib/twirllib/src/mill/twirllib/TwirlModule.scala +++ /dev/null @@ -1,55 +0,0 @@ -package mill -package twirllib - -import coursier.{Cache, MavenRepository} -import mill.define.Sources -import mill.api.PathRef -import mill.scalalib.Lib.resolveDependencies -import mill.scalalib._ -import mill.util.Loose - -import scala.io.Codec -import scala.util.Properties - -trait TwirlModule extends mill.Module { - - def twirlVersion: T[String] - - def twirlSources: Sources = T.sources { - millSourcePath / 'views - } - - def twirlClasspath: T[Loose.Agg[PathRef]] = T { - resolveDependencies( - Seq( - Cache.ivy2Local, - MavenRepository("https://repo1.maven.org/maven2") - ), - Lib.depToDependency(_, "2.12.4"), - Seq( - ivy"com.typesafe.play::twirl-compiler:${twirlVersion()}", - ivy"org.scala-lang.modules::scala-parser-combinators:1.1.0" - ) - ) - } - - def twirlAdditionalImports: Seq[String] = Nil - - private def twirlConstructorAnnotations: Seq[String] = Nil - - private def twirlCodec: Codec = Codec(Properties.sourceEncoding) - - private def twirlInclusiveDot: Boolean = false - - def compileTwirl: T[mill.scalalib.api.CompilationResult] = T.persistent { - TwirlWorkerApi.twirlWorker - .compile( - twirlClasspath().map(_.path), - twirlSources().map(_.path), - T.ctx().dest, - twirlAdditionalImports, - twirlConstructorAnnotations, - twirlCodec, - twirlInclusiveDot) - } -} diff --git a/contrib/twirllib/src/mill/twirllib/TwirlWorker.scala b/contrib/twirllib/src/mill/twirllib/TwirlWorker.scala deleted file mode 100644 index 09376a6f..00000000 --- a/contrib/twirllib/src/mill/twirllib/TwirlWorker.scala +++ /dev/null @@ -1,153 +0,0 @@ -package mill -package twirllib - -import java.io.File -import java.lang.reflect.Method -import java.net.URLClassLoader - -import mill.api.PathRef -import mill.scalalib.api.CompilationResult - -import scala.io.Codec -class TwirlWorker { - - private var twirlInstanceCache = Option.empty[(Long, TwirlWorkerApi)] - - private def twirl(twirlClasspath: Agg[os.Path]) = { - val classloaderSig = twirlClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum - twirlInstanceCache match { - case Some((sig, instance)) if sig == classloaderSig => instance - case _ => - val cl = new URLClassLoader(twirlClasspath.map(_.toIO.toURI.toURL).toArray, null) - - // Switched to using the java api because of the hack-ish thing going on later. - // - // * we'll need to construct a collection of additional imports - // * it will need to consider the defaults - // * and add the user-provided additional imports - // * the default collection in scala api is a Seq[String] - // * but it is defined in a different classloader (namely in cl) - // * so we can not construct our own Seq and pass it to the method - it will be from our classloader, and not compatible - // * the java api has a Collection as the type for this param, for which it is much more doable to append things to it using reflection - // - // NOTE: I tried creating the cl classloader passing the current classloader as the parent: - // val cl = new URLClassLoader(twirlClasspath.map(_.toIO.toURI.toURL).toArray, getClass.getClassLoader) - // in that case it was possible to cast the default to a Seq[String], construct our own Seq[String], and pass it to the method invoke- it was compatible. - // And the tests passed. But when run in a different mill project, I was getting exceptions like this: - // scala.reflect.internal.MissingRequirementError: object scala in compiler mirror not found. - - val twirlCompilerClass = cl.loadClass("play.japi.twirl.compiler.TwirlCompiler") - - // this one is only to get the codec: Codec parameter default value - val twirlScalaCompilerClass = cl.loadClass("play.twirl.compiler.TwirlCompiler") - - val compileMethod = twirlCompilerClass.getMethod("compile", - classOf[java.io.File], - classOf[java.io.File], - classOf[java.io.File], - classOf[java.lang.String], - cl.loadClass("java.util.Collection"), - cl.loadClass("java.util.List"), - cl.loadClass("scala.io.Codec"), - classOf[Boolean]) - - val arrayListClass = cl.loadClass("java.util.ArrayList") - val hashSetClass = cl.loadClass("java.util.HashSet") - - val defaultAdditionalImportsMethod = twirlCompilerClass.getField("DEFAULT_IMPORTS") - val defaultCodecMethod = twirlScalaCompilerClass.getMethod("compile$default$7") - - val instance = new TwirlWorkerApi { - override def compileTwirl(source: File, - sourceDirectory: File, - generatedDirectory: File, - formatterType: String, - additionalImports: Seq[String], - constructorAnnotations: Seq[String], - codec: Codec, - inclusiveDot: Boolean) { - val defaultAdditionalImports = defaultAdditionalImportsMethod.get(null) // unmodifiable collection - // copying it into a modifiable hash set and adding all additional imports - val allAdditionalImports = - hashSetClass - .getConstructor(cl.loadClass("java.util.Collection")) - .newInstance(defaultAdditionalImports) - .asInstanceOf[Object] - val hashSetAddMethod = - allAdditionalImports - .getClass - .getMethod("add", classOf[Object]) - additionalImports.foreach(hashSetAddMethod.invoke(allAdditionalImports, _)) - - val o = compileMethod.invoke(null, source, - sourceDirectory, - generatedDirectory, - formatterType, - allAdditionalImports, - arrayListClass.newInstance().asInstanceOf[Object], // empty list seems to be the default - defaultCodecMethod.invoke(null), - Boolean.box(false) - ) - } - } - twirlInstanceCache = Some((classloaderSig, instance)) - instance - } - } - - def compile(twirlClasspath: Agg[os.Path], - sourceDirectories: Seq[os.Path], - dest: os.Path, - additionalImports: Seq[String], - constructorAnnotations: Seq[String], - codec: Codec, - inclusiveDot: Boolean) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { - val compiler = twirl(twirlClasspath) - - def compileTwirlDir(inputDir: os.Path) { - os.walk(inputDir).filter(_.last.matches(".*.scala.(html|xml|js|txt)")) - .foreach { template => - val extFormat = twirlExtensionFormat(template.last) - compiler.compileTwirl(template.toIO, - inputDir.toIO, - dest.toIO, - s"play.twirl.api.$extFormat", - additionalImports, - constructorAnnotations, - codec, - inclusiveDot - ) - } - } - - sourceDirectories.foreach(compileTwirlDir) - - val zincFile = ctx.dest / 'zinc - val classesDir = ctx.dest - - mill.api.Result.Success(CompilationResult(zincFile, PathRef(classesDir))) - } - - private def twirlExtensionFormat(name: String) = - if (name.endsWith("html")) "HtmlFormat" - else if (name.endsWith("xml")) "XmlFormat" - else if (name.endsWith("js")) "JavaScriptFormat" - else "TxtFormat" -} - -trait TwirlWorkerApi { - def compileTwirl(source: File, - sourceDirectory: File, - generatedDirectory: File, - formatterType: String, - additionalImports: Seq[String], - constructorAnnotations: Seq[String], - codec: Codec, - inclusiveDot: Boolean) -} - -object TwirlWorkerApi { - - def twirlWorker = new TwirlWorker() -} diff --git a/contrib/twirllib/test/src/HelloWorldTests.scala b/contrib/twirllib/test/src/HelloWorldTests.scala new file mode 100644 index 00000000..71f25ff4 --- /dev/null +++ b/contrib/twirllib/test/src/HelloWorldTests.scala @@ -0,0 +1,100 @@ +package mill.twirllib + +import mill.util.{TestEvaluator, TestUtil} +import utest.framework.TestPath +import utest.{TestSuite, Tests, assert, _} + +import scala.io.Codec + +object HelloWorldTests extends TestSuite { + + trait HelloBase extends TestUtil.BaseModule { + override def millSourcePath: os.Path = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + } + + trait HelloWorldModule extends mill.twirllib.TwirlModule { + def twirlVersion = "1.0.0" + override def twirlAdditionalImports: Seq[String] = additionalImports + } + + object HelloWorld extends HelloBase { + + object core extends HelloWorldModule { + override def twirlVersion = "1.3.15" + } + } + + val resourcePath: os.Path = os.pwd / 'contrib / 'twirllib / 'test / 'resources / "hello-world" + + def workspaceTest[T]( + m: TestUtil.BaseModule, + resourcePath: os.Path = resourcePath + )(t: TestEvaluator => T)(implicit tp: TestPath): T = { + val eval = new TestEvaluator(m) + os.remove.all(m.millSourcePath) + os.remove.all(eval.outPath) + os.makeDir.all(m.millSourcePath / os.up) + os.copy(resourcePath, m.millSourcePath) + t(eval) + } + + def compileClassfiles: Seq[os.RelPath] = Seq[os.RelPath]( + "hello.template.scala", + "wrapper.template.scala" + ) + + def expectedDefaultImports: Seq[String] = Seq( + "import _root_.play.twirl.api.TwirlFeatureImports._", + "import _root_.play.twirl.api.TwirlHelperImports._", + "import _root_.play.twirl.api.Html", + "import _root_.play.twirl.api.JavaScript", + "import _root_.play.twirl.api.Txt", + "import _root_.play.twirl.api.Xml" + ) + + def additionalImports: Seq[String] = Seq( + "mill.twirl.test.AdditionalImport1._", + "mill.twirl.test.AdditionalImport2._" + ) + + def tests: Tests = Tests { + 'twirlVersion - { + + 'fromBuild - workspaceTest(HelloWorld) { eval => + val Right((result, evalCount)) = + eval.apply(HelloWorld.core.twirlVersion) + + assert( + result == "1.3.15", + evalCount > 0 + ) + } + } + 'compileTwirl - workspaceTest(HelloWorld) { eval => + val Right((result, evalCount)) = eval.apply(HelloWorld.core.compileTwirl) + + val outputFiles = os.walk(result.classes.path).filter(_.last.endsWith(".scala")) + val expectedClassfiles = compileClassfiles.map( + eval.outPath / 'core / 'compileTwirl / 'dest / 'html / _ + ) + + assert( + result.classes.path == eval.outPath / 'core / 'compileTwirl / 'dest, + outputFiles.nonEmpty, + outputFiles.forall(expectedClassfiles.contains), + outputFiles.size == 2, + evalCount > 0, + outputFiles.forall { p => + val lines = os.read.lines(p).map(_.trim) + (expectedDefaultImports ++ additionalImports.map(s => s"import $s")).forall(lines.contains) + } + ) + + // don't recompile if nothing changed + val Right((_, unchangedEvalCount)) = + eval.apply(HelloWorld.core.compileTwirl) + + assert(unchangedEvalCount == 0) + } + } +} diff --git a/contrib/twirllib/test/src/mill/twirllib/HelloWorldTests.scala b/contrib/twirllib/test/src/mill/twirllib/HelloWorldTests.scala deleted file mode 100644 index 71f25ff4..00000000 --- a/contrib/twirllib/test/src/mill/twirllib/HelloWorldTests.scala +++ /dev/null @@ -1,100 +0,0 @@ -package mill.twirllib - -import mill.util.{TestEvaluator, TestUtil} -import utest.framework.TestPath -import utest.{TestSuite, Tests, assert, _} - -import scala.io.Codec - -object HelloWorldTests extends TestSuite { - - trait HelloBase extends TestUtil.BaseModule { - override def millSourcePath: os.Path = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - } - - trait HelloWorldModule extends mill.twirllib.TwirlModule { - def twirlVersion = "1.0.0" - override def twirlAdditionalImports: Seq[String] = additionalImports - } - - object HelloWorld extends HelloBase { - - object core extends HelloWorldModule { - override def twirlVersion = "1.3.15" - } - } - - val resourcePath: os.Path = os.pwd / 'contrib / 'twirllib / 'test / 'resources / "hello-world" - - def workspaceTest[T]( - m: TestUtil.BaseModule, - resourcePath: os.Path = resourcePath - )(t: TestEvaluator => T)(implicit tp: TestPath): T = { - val eval = new TestEvaluator(m) - os.remove.all(m.millSourcePath) - os.remove.all(eval.outPath) - os.makeDir.all(m.millSourcePath / os.up) - os.copy(resourcePath, m.millSourcePath) - t(eval) - } - - def compileClassfiles: Seq[os.RelPath] = Seq[os.RelPath]( - "hello.template.scala", - "wrapper.template.scala" - ) - - def expectedDefaultImports: Seq[String] = Seq( - "import _root_.play.twirl.api.TwirlFeatureImports._", - "import _root_.play.twirl.api.TwirlHelperImports._", - "import _root_.play.twirl.api.Html", - "import _root_.play.twirl.api.JavaScript", - "import _root_.play.twirl.api.Txt", - "import _root_.play.twirl.api.Xml" - ) - - def additionalImports: Seq[String] = Seq( - "mill.twirl.test.AdditionalImport1._", - "mill.twirl.test.AdditionalImport2._" - ) - - def tests: Tests = Tests { - 'twirlVersion - { - - 'fromBuild - workspaceTest(HelloWorld) { eval => - val Right((result, evalCount)) = - eval.apply(HelloWorld.core.twirlVersion) - - assert( - result == "1.3.15", - evalCount > 0 - ) - } - } - 'compileTwirl - workspaceTest(HelloWorld) { eval => - val Right((result, evalCount)) = eval.apply(HelloWorld.core.compileTwirl) - - val outputFiles = os.walk(result.classes.path).filter(_.last.endsWith(".scala")) - val expectedClassfiles = compileClassfiles.map( - eval.outPath / 'core / 'compileTwirl / 'dest / 'html / _ - ) - - assert( - result.classes.path == eval.outPath / 'core / 'compileTwirl / 'dest, - outputFiles.nonEmpty, - outputFiles.forall(expectedClassfiles.contains), - outputFiles.size == 2, - evalCount > 0, - outputFiles.forall { p => - val lines = os.read.lines(p).map(_.trim) - (expectedDefaultImports ++ additionalImports.map(s => s"import $s")).forall(lines.contains) - } - ) - - // don't recompile if nothing changed - val Right((_, unchangedEvalCount)) = - eval.apply(HelloWorld.core.compileTwirl) - - assert(unchangedEvalCount == 0) - } - } -} diff --git a/docs/example-1/foo/src/Example.scala b/docs/example-1/foo/src/Example.scala new file mode 100644 index 00000000..f84f91f9 --- /dev/null +++ b/docs/example-1/foo/src/Example.scala @@ -0,0 +1,6 @@ +package foo +object Example{ + def main(args: Array[String]): Unit = { + println("Hello World") + } +} \ No newline at end of file diff --git a/docs/example-1/foo/src/foo/Example.scala b/docs/example-1/foo/src/foo/Example.scala deleted file mode 100644 index f84f91f9..00000000 --- a/docs/example-1/foo/src/foo/Example.scala +++ /dev/null @@ -1,6 +0,0 @@ -package foo -object Example{ - def main(args: Array[String]): Unit = { - println("Hello World") - } -} \ No newline at end of file diff --git a/docs/example-2/foo/src/Example.scala b/docs/example-2/foo/src/Example.scala new file mode 100644 index 00000000..f84f91f9 --- /dev/null +++ b/docs/example-2/foo/src/Example.scala @@ -0,0 +1,6 @@ +package foo +object Example{ + def main(args: Array[String]): Unit = { + println("Hello World") + } +} \ No newline at end of file diff --git a/docs/example-2/foo/src/foo/Example.scala b/docs/example-2/foo/src/foo/Example.scala deleted file mode 100644 index f84f91f9..00000000 --- a/docs/example-2/foo/src/foo/Example.scala +++ /dev/null @@ -1,6 +0,0 @@ -package foo -object Example{ - def main(args: Array[String]): Unit = { - println("Hello World") - } -} \ No newline at end of file diff --git a/integration/test/src/AcyclicTests.scala b/integration/test/src/AcyclicTests.scala new file mode 100644 index 00000000..145c106d --- /dev/null +++ b/integration/test/src/AcyclicTests.scala @@ -0,0 +1,31 @@ +package mill.integration + +import utest._ + +class AcyclicTests(fork: Boolean) + extends IntegrationTestSuite("MILL_ACYCLIC_REPO", "acyclic", fork) { + val tests = Tests{ + initWorkspace() + + def check(scalaVersion: String) = { + val firstCompile = eval(s"acyclic[$scalaVersion].compile") + + assert( + firstCompile, + os.walk(workspacePath).exists(_.last == "GraphAnalysis.class"), + os.walk(workspacePath).exists(_.last == "PluginPhase.class") + ) + for(scalaFile <- os.walk(workspacePath).filter(_.ext == "scala")){ + os.write.append(scalaFile, "\n}") + } + + val brokenCompile = eval(s"acyclic[$scalaVersion].compile") + + assert(!brokenCompile) + } + + 'scala2118 - mill.util.TestUtil.disableInJava9OrAbove(check("2.11.8")) + 'scala2124 - check("2.12.4") + + } +} diff --git a/integration/test/src/AmmoniteTests.scala b/integration/test/src/AmmoniteTests.scala new file mode 100644 index 00000000..5851a393 --- /dev/null +++ b/integration/test/src/AmmoniteTests.scala @@ -0,0 +1,35 @@ +package mill.integration + +import utest._ + +class AmmoniteTests(fork: Boolean) + extends IntegrationTestSuite("MILL_AMMONITE_REPO", "ammonite", fork) { + val tests = Tests{ + initWorkspace() + + def check(scalaVersion: String) = { + val replTests = eval( + s"amm.repl[$scalaVersion].test", "{ammonite.unit,ammonite.session.ProjectTests.guava}" + ) + val replTestMeta = meta(s"amm.repl[$scalaVersion].test.test") + assert( + replTests, + replTestMeta.contains("ammonite.session.ProjectTests.guava"), + replTestMeta.contains("ammonite.unit.SourceTests.objectInfo.thirdPartyJava") + ) + + val compileResult = eval( + "all", s"{shell,sshd,amm,integration}[$scalaVersion].test.compile" + ) + + assert( + compileResult, + os.walk(workspacePath / 'out / 'integration / scalaVersion / 'test / 'compile) + .exists(_.last == "ErrorTruncationTests.class") + ) + } + + 'scala2124 - check("2.12.4") + + } +} diff --git a/integration/test/src/BetterFilesTests.scala b/integration/test/src/BetterFilesTests.scala new file mode 100644 index 00000000..bdbaa2e3 --- /dev/null +++ b/integration/test/src/BetterFilesTests.scala @@ -0,0 +1,26 @@ +package mill.integration + +import utest._ + +class BetterFilesTests(fork: Boolean) + extends IntegrationTestSuite("MILL_BETTERFILES_REPO", "better-files", fork) { + val tests = Tests{ + initWorkspace() + 'test - { + + assert(eval("core.test")) + assert(eval("akka.test")) + assert(eval("benchmarks.test.compile")) + + val coreTestMeta = meta("core.test.test") + assert(coreTestMeta.contains("better.files.FileSpec")) + assert(coreTestMeta.contains("files should handle BOM")) + + for(scalaFile <- os.walk(workspacePath).filter(_.ext == "scala")){ + os.write.append(scalaFile, "\n}") + } + assert(!eval("akka.test")) + } + + } +} diff --git a/integration/test/src/CaffeineTests.scala b/integration/test/src/CaffeineTests.scala new file mode 100644 index 00000000..56c66b21 --- /dev/null +++ b/integration/test/src/CaffeineTests.scala @@ -0,0 +1,34 @@ +package mill.integration + +import utest._ + +class CaffeineTests(fork: Boolean) extends IntegrationTestSuite("MILL_CAFFEINE_REPO", "caffeine", fork) { + val tests = Tests{ + initWorkspace() + 'test - { + // Caffeine only can build using Java 9 or up. Java 8 results in weird + // type inference issues during the compile + if (mill.main.client.Util.isJava9OrAbove){ + assert(eval("caffeine.test.compile")) + + val suites = Seq( + "com.github.benmanes.caffeine.SingleConsumerQueueTest", + "com.github.benmanes.caffeine.cache.AsyncTest", + "com.github.benmanes.caffeine.cache.CaffeineTest", + "com.github.benmanes.caffeine.cache.TimerWheelTest" + ) + assert(eval( + "caffeine.test", + "-testclass", suites.mkString(",") + )) + assert(eval("guava.test.compile")) + assert(eval("guava.test")) + + assert(eval("jcache.test.compile")) + assert(eval("simulator.test.compile")) + + } + } + + } +} diff --git a/integration/test/src/DocAnnotationsTests.scala b/integration/test/src/DocAnnotationsTests.scala new file mode 100644 index 00000000..4d7ef11b --- /dev/null +++ b/integration/test/src/DocAnnotationsTests.scala @@ -0,0 +1,33 @@ +package mill.integration + +import mill.util.ScriptTestSuite +import utest._ + +class DocAnnotationsTests(fork: Boolean) extends ScriptTestSuite(fork) { + def workspaceSlug: String = "docannotations" + def scriptSourcePath: os.Path = os.pwd / 'integration / 'test / 'resources / workspaceSlug + val tests = Tests{ + initWorkspace() + 'test - { + assert(eval("inspect", "core.test.ivyDeps")) + val inheritedIvyDeps = ujson.read(meta("inspect"))("value").str + assert( + inheritedIvyDeps.contains("core.test.ivyDeps"), + inheritedIvyDeps.contains("Overriden ivyDeps Docs!!!"), + inheritedIvyDeps.contains("Any ivy dependencies you want to add to this Module"), + ) + + assert(eval("inspect", "core.task")) + val task = ujson.read(meta("inspect"))("value").str + assert( + task.contains("Core Task Docz!") + ) + + assert(eval("inspect", "inspect")) + val doc = ujson.read(meta("inspect"))("value").str + assert( + doc.contains("Displays metadata about the given task without actually running it.") + ) + } + } +} diff --git a/integration/test/src/IntegrationTestSuite.scala b/integration/test/src/IntegrationTestSuite.scala new file mode 100644 index 00000000..ca041eea --- /dev/null +++ b/integration/test/src/IntegrationTestSuite.scala @@ -0,0 +1,28 @@ +package mill.integration + +import mill.util.ScriptTestSuite +import utest._ + +abstract class IntegrationTestSuite(repoKey: String, val workspaceSlug: String, fork: Boolean) + extends ScriptTestSuite(fork){ + val buildFilePath = os.pwd / 'integration / 'test / 'resources / workspaceSlug + def scriptSourcePath = { + // The unzipped git repo snapshots we get from github come with a + // wrapper-folder inside the zip file, so copy the wrapper folder to the + // destination instead of the folder containing the wrapper. + + val path = sys.props(repoKey) + val Seq(wrapper) = os.list(os.Path(path)) + wrapper + } + + def buildFiles: Seq[os.Path] = os.walk(buildFilePath) + + override def initWorkspace() = { + super.initWorkspace() + buildFiles.foreach { file => + os.copy.over(file, workspacePath / file.last) + } + assert(!os.walk(workspacePath).exists(_.ext == "class")) + } +} diff --git a/integration/test/src/JawnTests.scala b/integration/test/src/JawnTests.scala new file mode 100644 index 00000000..eafd1009 --- /dev/null +++ b/integration/test/src/JawnTests.scala @@ -0,0 +1,31 @@ +package mill.integration + +import utest._ + +class JawnTests(fork: Boolean) + extends IntegrationTestSuite("MILL_JAWN_REPO", "jawn", fork) { + val tests = Tests{ + initWorkspace() + + def check(scalaVersion: String) = { + val firstCompile = eval(s"jawn[$scalaVersion].parser.test") + + assert( + firstCompile, + os.walk(workspacePath).exists(_.last == "AsyncParser.class"), + os.walk(workspacePath).exists(_.last == "CharBuilderSpec.class") + ) + + for(scalaFile <- os.walk(workspacePath).filter(_.ext == "scala")){ + os.write.append(scalaFile, "\n}") + } + + val brokenCompile = eval(s"jawn[$scalaVersion].parser.test") + + assert(!brokenCompile) + } + + 'scala21111 - check("2.11.11") + 'scala2123 - check("2.12.3") + } +} diff --git a/integration/test/src/PlayJsonTests.scala b/integration/test/src/PlayJsonTests.scala new file mode 100644 index 00000000..262dcb36 --- /dev/null +++ b/integration/test/src/PlayJsonTests.scala @@ -0,0 +1,57 @@ +package mill.integration + + +import utest._ + +class PlayJsonTests(fork: Boolean) extends IntegrationTestSuite("MILL_PLAY_JSON_REPO", "play-json", fork) { + + override def buildFiles: Seq[os.Path] = { + os.list(buildFilePath).filter(_.ext == "sc") + } + + val tests = Tests{ + initWorkspace() + + 'jvm - { + assert(eval("playJsonJvm[2.12.4].test")) + val jvmMeta = meta("playJsonJvm[2.12.4].test.test") + + assert( + jvmMeta.contains("play.api.libs.json.JsonSharedSpec"), + jvmMeta.contains("JSON should support basic array operations") + ) + + assert( + jvmMeta.contains("play.api.libs.json.JsonValidSpec"), + jvmMeta.contains("JSON reads should::validate Dates") + ) + } + 'js - { + assert(eval("playJsonJs[2.12.4].test")) + val jsMeta = meta("playJsonJs[2.12.4].test.test") + + assert( + jsMeta.contains("play.api.libs.json.JsonSharedSpec"), + jsMeta.contains("JSON should support basic array operations") + ) + + assert( + jsMeta.contains("play.api.libs.json.JsonSpec"), + jsMeta.contains("Complete JSON should create full object when lose precision when parsing BigDecimals") + ) + } + 'playJoda - { + assert(eval("playJoda[2.12.4].test")) + val metaFile = meta("playJoda[2.12.4].test.test") + + assert( + metaFile.contains("play.api.libs.json.JsonJodaValidSpec"), + metaFile.contains("JSON reads should::validate Dates") + ) + } + + 'benchmarks - { +// "benchmarks[2.12.4].runJmh" -i 1 -wi 1 -f1 -t1 + } + } +} diff --git a/integration/test/src/UpickleTests.scala b/integration/test/src/UpickleTests.scala new file mode 100644 index 00000000..133188d3 --- /dev/null +++ b/integration/test/src/UpickleTests.scala @@ -0,0 +1,30 @@ +package mill.integration + +import utest._ + +class UpickleTests(fork: Boolean) extends IntegrationTestSuite("MILL_UPICKLE_REPO", "upickle", fork) { + val tests = Tests{ + initWorkspace() + 'jvm21111 - { + mill.util.TestUtil.disableInJava9OrAbove({ + assert(eval("upickleJvm[2.11.11].test")) + val jvmMeta = meta("upickleJvm[2.11.11].test.test") + assert(jvmMeta.contains("example.ExampleTests.simple")) + assert(jvmMeta.contains("upickle.MacroTests.commonCustomStructures.simpleAdt")) + }) + } + 'jvm2124 - { + assert(eval("upickleJvm[2.12.4].test")) + val jvmMeta = meta("upickleJvm[2.12.4].test.test") + assert(jvmMeta.contains("example.ExampleTests.simple")) + assert(jvmMeta.contains("upickle.MacroTests.commonCustomStructures.simpleAdt")) + } + 'js - { + assert(eval("upickleJs[2.12.4].test")) + val jsMeta = meta("upickleJs[2.12.4].test.test") + assert(jsMeta .contains("example.ExampleTests.simple")) + assert(jsMeta .contains("upickle.MacroTests.commonCustomStructures.simpleAdt")) + } + + } +} diff --git a/integration/test/src/forked/Tests.scala b/integration/test/src/forked/Tests.scala new file mode 100644 index 00000000..41844b58 --- /dev/null +++ b/integration/test/src/forked/Tests.scala @@ -0,0 +1,10 @@ +package mill.integration.forked + +object AcyclicTests extends mill.integration.AcyclicTests(fork = true) +object AmmoniteTests extends mill.integration.AmmoniteTests(fork = true) +object BetterFilesTests extends mill.integration.BetterFilesTests(fork = true) +object JawnTests extends mill.integration.JawnTests(fork = true) +object UpickleTests extends mill.integration.UpickleTests(fork = true) +object PlayJsonTests extends mill.integration.PlayJsonTests(fork = true) +object CaffeineTests extends mill.integration.CaffeineTests(fork = true) +object DocAnnotationsTests extends mill.integration.DocAnnotationsTests(fork = true) diff --git a/integration/test/src/local/Tests.scala b/integration/test/src/local/Tests.scala new file mode 100644 index 00000000..e95aac54 --- /dev/null +++ b/integration/test/src/local/Tests.scala @@ -0,0 +1,10 @@ +package mill.integration.local + +object AcyclicTests extends mill.integration.AcyclicTests(fork = false) +object AmmoniteTests extends mill.integration.AmmoniteTests(fork = false) +object BetterFilesTests extends mill.integration.BetterFilesTests(fork = false) +object JawnTests extends mill.integration.JawnTests(fork = false) +object UpickleTests extends mill.integration.UpickleTests(fork = false) +object PlayJsonTests extends mill.integration.PlayJsonTests(fork = false) +object CaffeineTests extends mill.integration.CaffeineTests(fork = false) +object DocAnnotationsTests extends mill.integration.DocAnnotationsTests(fork = false) \ No newline at end of file diff --git a/integration/test/src/mill/integration/AcyclicTests.scala b/integration/test/src/mill/integration/AcyclicTests.scala deleted file mode 100644 index 145c106d..00000000 --- a/integration/test/src/mill/integration/AcyclicTests.scala +++ /dev/null @@ -1,31 +0,0 @@ -package mill.integration - -import utest._ - -class AcyclicTests(fork: Boolean) - extends IntegrationTestSuite("MILL_ACYCLIC_REPO", "acyclic", fork) { - val tests = Tests{ - initWorkspace() - - def check(scalaVersion: String) = { - val firstCompile = eval(s"acyclic[$scalaVersion].compile") - - assert( - firstCompile, - os.walk(workspacePath).exists(_.last == "GraphAnalysis.class"), - os.walk(workspacePath).exists(_.last == "PluginPhase.class") - ) - for(scalaFile <- os.walk(workspacePath).filter(_.ext == "scala")){ - os.write.append(scalaFile, "\n}") - } - - val brokenCompile = eval(s"acyclic[$scalaVersion].compile") - - assert(!brokenCompile) - } - - 'scala2118 - mill.util.TestUtil.disableInJava9OrAbove(check("2.11.8")) - 'scala2124 - check("2.12.4") - - } -} diff --git a/integration/test/src/mill/integration/AmmoniteTests.scala b/integration/test/src/mill/integration/AmmoniteTests.scala deleted file mode 100644 index 5851a393..00000000 --- a/integration/test/src/mill/integration/AmmoniteTests.scala +++ /dev/null @@ -1,35 +0,0 @@ -package mill.integration - -import utest._ - -class AmmoniteTests(fork: Boolean) - extends IntegrationTestSuite("MILL_AMMONITE_REPO", "ammonite", fork) { - val tests = Tests{ - initWorkspace() - - def check(scalaVersion: String) = { - val replTests = eval( - s"amm.repl[$scalaVersion].test", "{ammonite.unit,ammonite.session.ProjectTests.guava}" - ) - val replTestMeta = meta(s"amm.repl[$scalaVersion].test.test") - assert( - replTests, - replTestMeta.contains("ammonite.session.ProjectTests.guava"), - replTestMeta.contains("ammonite.unit.SourceTests.objectInfo.thirdPartyJava") - ) - - val compileResult = eval( - "all", s"{shell,sshd,amm,integration}[$scalaVersion].test.compile" - ) - - assert( - compileResult, - os.walk(workspacePath / 'out / 'integration / scalaVersion / 'test / 'compile) - .exists(_.last == "ErrorTruncationTests.class") - ) - } - - 'scala2124 - check("2.12.4") - - } -} diff --git a/integration/test/src/mill/integration/BetterFilesTests.scala b/integration/test/src/mill/integration/BetterFilesTests.scala deleted file mode 100644 index bdbaa2e3..00000000 --- a/integration/test/src/mill/integration/BetterFilesTests.scala +++ /dev/null @@ -1,26 +0,0 @@ -package mill.integration - -import utest._ - -class BetterFilesTests(fork: Boolean) - extends IntegrationTestSuite("MILL_BETTERFILES_REPO", "better-files", fork) { - val tests = Tests{ - initWorkspace() - 'test - { - - assert(eval("core.test")) - assert(eval("akka.test")) - assert(eval("benchmarks.test.compile")) - - val coreTestMeta = meta("core.test.test") - assert(coreTestMeta.contains("better.files.FileSpec")) - assert(coreTestMeta.contains("files should handle BOM")) - - for(scalaFile <- os.walk(workspacePath).filter(_.ext == "scala")){ - os.write.append(scalaFile, "\n}") - } - assert(!eval("akka.test")) - } - - } -} diff --git a/integration/test/src/mill/integration/CaffeineTests.scala b/integration/test/src/mill/integration/CaffeineTests.scala deleted file mode 100644 index 56c66b21..00000000 --- a/integration/test/src/mill/integration/CaffeineTests.scala +++ /dev/null @@ -1,34 +0,0 @@ -package mill.integration - -import utest._ - -class CaffeineTests(fork: Boolean) extends IntegrationTestSuite("MILL_CAFFEINE_REPO", "caffeine", fork) { - val tests = Tests{ - initWorkspace() - 'test - { - // Caffeine only can build using Java 9 or up. Java 8 results in weird - // type inference issues during the compile - if (mill.main.client.Util.isJava9OrAbove){ - assert(eval("caffeine.test.compile")) - - val suites = Seq( - "com.github.benmanes.caffeine.SingleConsumerQueueTest", - "com.github.benmanes.caffeine.cache.AsyncTest", - "com.github.benmanes.caffeine.cache.CaffeineTest", - "com.github.benmanes.caffeine.cache.TimerWheelTest" - ) - assert(eval( - "caffeine.test", - "-testclass", suites.mkString(",") - )) - assert(eval("guava.test.compile")) - assert(eval("guava.test")) - - assert(eval("jcache.test.compile")) - assert(eval("simulator.test.compile")) - - } - } - - } -} diff --git a/integration/test/src/mill/integration/DocAnnotationsTests.scala b/integration/test/src/mill/integration/DocAnnotationsTests.scala deleted file mode 100644 index 4d7ef11b..00000000 --- a/integration/test/src/mill/integration/DocAnnotationsTests.scala +++ /dev/null @@ -1,33 +0,0 @@ -package mill.integration - -import mill.util.ScriptTestSuite -import utest._ - -class DocAnnotationsTests(fork: Boolean) extends ScriptTestSuite(fork) { - def workspaceSlug: String = "docannotations" - def scriptSourcePath: os.Path = os.pwd / 'integration / 'test / 'resources / workspaceSlug - val tests = Tests{ - initWorkspace() - 'test - { - assert(eval("inspect", "core.test.ivyDeps")) - val inheritedIvyDeps = ujson.read(meta("inspect"))("value").str - assert( - inheritedIvyDeps.contains("core.test.ivyDeps"), - inheritedIvyDeps.contains("Overriden ivyDeps Docs!!!"), - inheritedIvyDeps.contains("Any ivy dependencies you want to add to this Module"), - ) - - assert(eval("inspect", "core.task")) - val task = ujson.read(meta("inspect"))("value").str - assert( - task.contains("Core Task Docz!") - ) - - assert(eval("inspect", "inspect")) - val doc = ujson.read(meta("inspect"))("value").str - assert( - doc.contains("Displays metadata about the given task without actually running it.") - ) - } - } -} diff --git a/integration/test/src/mill/integration/IntegrationTestSuite.scala b/integration/test/src/mill/integration/IntegrationTestSuite.scala deleted file mode 100644 index ca041eea..00000000 --- a/integration/test/src/mill/integration/IntegrationTestSuite.scala +++ /dev/null @@ -1,28 +0,0 @@ -package mill.integration - -import mill.util.ScriptTestSuite -import utest._ - -abstract class IntegrationTestSuite(repoKey: String, val workspaceSlug: String, fork: Boolean) - extends ScriptTestSuite(fork){ - val buildFilePath = os.pwd / 'integration / 'test / 'resources / workspaceSlug - def scriptSourcePath = { - // The unzipped git repo snapshots we get from github come with a - // wrapper-folder inside the zip file, so copy the wrapper folder to the - // destination instead of the folder containing the wrapper. - - val path = sys.props(repoKey) - val Seq(wrapper) = os.list(os.Path(path)) - wrapper - } - - def buildFiles: Seq[os.Path] = os.walk(buildFilePath) - - override def initWorkspace() = { - super.initWorkspace() - buildFiles.foreach { file => - os.copy.over(file, workspacePath / file.last) - } - assert(!os.walk(workspacePath).exists(_.ext == "class")) - } -} diff --git a/integration/test/src/mill/integration/JawnTests.scala b/integration/test/src/mill/integration/JawnTests.scala deleted file mode 100644 index eafd1009..00000000 --- a/integration/test/src/mill/integration/JawnTests.scala +++ /dev/null @@ -1,31 +0,0 @@ -package mill.integration - -import utest._ - -class JawnTests(fork: Boolean) - extends IntegrationTestSuite("MILL_JAWN_REPO", "jawn", fork) { - val tests = Tests{ - initWorkspace() - - def check(scalaVersion: String) = { - val firstCompile = eval(s"jawn[$scalaVersion].parser.test") - - assert( - firstCompile, - os.walk(workspacePath).exists(_.last == "AsyncParser.class"), - os.walk(workspacePath).exists(_.last == "CharBuilderSpec.class") - ) - - for(scalaFile <- os.walk(workspacePath).filter(_.ext == "scala")){ - os.write.append(scalaFile, "\n}") - } - - val brokenCompile = eval(s"jawn[$scalaVersion].parser.test") - - assert(!brokenCompile) - } - - 'scala21111 - check("2.11.11") - 'scala2123 - check("2.12.3") - } -} diff --git a/integration/test/src/mill/integration/PlayJsonTests.scala b/integration/test/src/mill/integration/PlayJsonTests.scala deleted file mode 100644 index 262dcb36..00000000 --- a/integration/test/src/mill/integration/PlayJsonTests.scala +++ /dev/null @@ -1,57 +0,0 @@ -package mill.integration - - -import utest._ - -class PlayJsonTests(fork: Boolean) extends IntegrationTestSuite("MILL_PLAY_JSON_REPO", "play-json", fork) { - - override def buildFiles: Seq[os.Path] = { - os.list(buildFilePath).filter(_.ext == "sc") - } - - val tests = Tests{ - initWorkspace() - - 'jvm - { - assert(eval("playJsonJvm[2.12.4].test")) - val jvmMeta = meta("playJsonJvm[2.12.4].test.test") - - assert( - jvmMeta.contains("play.api.libs.json.JsonSharedSpec"), - jvmMeta.contains("JSON should support basic array operations") - ) - - assert( - jvmMeta.contains("play.api.libs.json.JsonValidSpec"), - jvmMeta.contains("JSON reads should::validate Dates") - ) - } - 'js - { - assert(eval("playJsonJs[2.12.4].test")) - val jsMeta = meta("playJsonJs[2.12.4].test.test") - - assert( - jsMeta.contains("play.api.libs.json.JsonSharedSpec"), - jsMeta.contains("JSON should support basic array operations") - ) - - assert( - jsMeta.contains("play.api.libs.json.JsonSpec"), - jsMeta.contains("Complete JSON should create full object when lose precision when parsing BigDecimals") - ) - } - 'playJoda - { - assert(eval("playJoda[2.12.4].test")) - val metaFile = meta("playJoda[2.12.4].test.test") - - assert( - metaFile.contains("play.api.libs.json.JsonJodaValidSpec"), - metaFile.contains("JSON reads should::validate Dates") - ) - } - - 'benchmarks - { -// "benchmarks[2.12.4].runJmh" -i 1 -wi 1 -f1 -t1 - } - } -} diff --git a/integration/test/src/mill/integration/UpickleTests.scala b/integration/test/src/mill/integration/UpickleTests.scala deleted file mode 100644 index 133188d3..00000000 --- a/integration/test/src/mill/integration/UpickleTests.scala +++ /dev/null @@ -1,30 +0,0 @@ -package mill.integration - -import utest._ - -class UpickleTests(fork: Boolean) extends IntegrationTestSuite("MILL_UPICKLE_REPO", "upickle", fork) { - val tests = Tests{ - initWorkspace() - 'jvm21111 - { - mill.util.TestUtil.disableInJava9OrAbove({ - assert(eval("upickleJvm[2.11.11].test")) - val jvmMeta = meta("upickleJvm[2.11.11].test.test") - assert(jvmMeta.contains("example.ExampleTests.simple")) - assert(jvmMeta.contains("upickle.MacroTests.commonCustomStructures.simpleAdt")) - }) - } - 'jvm2124 - { - assert(eval("upickleJvm[2.12.4].test")) - val jvmMeta = meta("upickleJvm[2.12.4].test.test") - assert(jvmMeta.contains("example.ExampleTests.simple")) - assert(jvmMeta.contains("upickle.MacroTests.commonCustomStructures.simpleAdt")) - } - 'js - { - assert(eval("upickleJs[2.12.4].test")) - val jsMeta = meta("upickleJs[2.12.4].test.test") - assert(jsMeta .contains("example.ExampleTests.simple")) - assert(jsMeta .contains("upickle.MacroTests.commonCustomStructures.simpleAdt")) - } - - } -} diff --git a/integration/test/src/mill/integration/forked/Tests.scala b/integration/test/src/mill/integration/forked/Tests.scala deleted file mode 100644 index 41844b58..00000000 --- a/integration/test/src/mill/integration/forked/Tests.scala +++ /dev/null @@ -1,10 +0,0 @@ -package mill.integration.forked - -object AcyclicTests extends mill.integration.AcyclicTests(fork = true) -object AmmoniteTests extends mill.integration.AmmoniteTests(fork = true) -object BetterFilesTests extends mill.integration.BetterFilesTests(fork = true) -object JawnTests extends mill.integration.JawnTests(fork = true) -object UpickleTests extends mill.integration.UpickleTests(fork = true) -object PlayJsonTests extends mill.integration.PlayJsonTests(fork = true) -object CaffeineTests extends mill.integration.CaffeineTests(fork = true) -object DocAnnotationsTests extends mill.integration.DocAnnotationsTests(fork = true) diff --git a/integration/test/src/mill/integration/local/Tests.scala b/integration/test/src/mill/integration/local/Tests.scala deleted file mode 100644 index e95aac54..00000000 --- a/integration/test/src/mill/integration/local/Tests.scala +++ /dev/null @@ -1,10 +0,0 @@ -package mill.integration.local - -object AcyclicTests extends mill.integration.AcyclicTests(fork = false) -object AmmoniteTests extends mill.integration.AmmoniteTests(fork = false) -object BetterFilesTests extends mill.integration.BetterFilesTests(fork = false) -object JawnTests extends mill.integration.JawnTests(fork = false) -object UpickleTests extends mill.integration.UpickleTests(fork = false) -object PlayJsonTests extends mill.integration.PlayJsonTests(fork = false) -object CaffeineTests extends mill.integration.CaffeineTests(fork = false) -object DocAnnotationsTests extends mill.integration.DocAnnotationsTests(fork = false) \ No newline at end of file diff --git a/main/client/src/InputPumper.java b/main/client/src/InputPumper.java new file mode 100644 index 00000000..5205be0b --- /dev/null +++ b/main/client/src/InputPumper.java @@ -0,0 +1,37 @@ +package mill.main.client; + +import java.io.InputStream; +import java.io.OutputStream; + +public class InputPumper implements Runnable{ + private InputStream src; + private OutputStream dest; + private Boolean checkAvailable; + public InputPumper(InputStream src, + OutputStream dest, + Boolean checkAvailable){ + this.src = src; + this.dest = dest; + this.checkAvailable = checkAvailable; + } + + boolean running = true; + public void run() { + byte[] buffer = new byte[1024]; + try{ + while(running){ + if (checkAvailable && src.available() == 0) Thread.sleep(2); + else { + int n = src.read(buffer); + if (n == -1) running = false; + else { + dest.write(buffer, 0, n); + dest.flush(); + } + } + } + }catch(Exception e){ + throw new RuntimeException(e); + } + } +} \ No newline at end of file diff --git a/main/client/src/Lock.java b/main/client/src/Lock.java new file mode 100644 index 00000000..6e5f18b0 --- /dev/null +++ b/main/client/src/Lock.java @@ -0,0 +1,14 @@ +package mill.main.client; +public abstract class Lock implements AutoCloseable{ + abstract public Locked lock() throws Exception; + abstract public Locked tryLock() throws Exception; + + public void await() throws Exception{ + lock().release(); + } + + /** + * Returns `true` if the lock is *available for taking* + */ + abstract public boolean probe() throws Exception; +} \ No newline at end of file diff --git a/main/client/src/Locked.java b/main/client/src/Locked.java new file mode 100644 index 00000000..e6ad3d63 --- /dev/null +++ b/main/client/src/Locked.java @@ -0,0 +1,10 @@ +package mill.main.client; + +import java.io.RandomAccessFile; +import java.nio.channels.FileChannel; +import java.util.concurrent.locks.ReentrantLock; + + +public interface Locked{ + public void release() throws Exception; +} \ No newline at end of file diff --git a/main/client/src/Locks.java b/main/client/src/Locks.java new file mode 100644 index 00000000..64259293 --- /dev/null +++ b/main/client/src/Locks.java @@ -0,0 +1,107 @@ +package mill.main.client; + +import java.io.RandomAccessFile; +import java.nio.channels.FileChannel; +import java.util.concurrent.locks.ReentrantLock; + +public class Locks implements AutoCloseable{ + public Lock processLock; + public Lock serverLock; + public Lock clientLock; + public static Locks files(String lockBase) throws Exception{ + return new Locks(){{ + processLock = new FileLock(lockBase + "/pid"); + + serverLock = new FileLock(lockBase + "/serverLock"); + + clientLock = new FileLock(lockBase + "/clientLock"); + }}; + } + public static Locks memory(){ + return new Locks(){{ + this.processLock = new MemoryLock(); + this.serverLock = new MemoryLock(); + this.clientLock = new MemoryLock(); + }}; + } + + @Override + public void close() throws Exception { + processLock.close(); + serverLock.close(); + clientLock.close(); + } +} +class FileLocked implements Locked{ + private java.nio.channels.FileLock lock; + public FileLocked(java.nio.channels.FileLock lock){ + this.lock = lock; + } + public void release() throws Exception{ + this.lock.release(); + } +} + +class FileLock extends Lock{ + String path; + RandomAccessFile raf; + FileChannel chan; + public FileLock(String path) throws Exception{ + this.path = path; + raf = new RandomAccessFile(path, "rw"); + chan = raf.getChannel(); + } + + public Locked lock() throws Exception{ + return new FileLocked(chan.lock()); + } + public Locked tryLock() throws Exception{ + java.nio.channels.FileLock l = chan.tryLock(); + if (l == null) return null; + else return new FileLocked(l); + } + public boolean probe()throws Exception{ + java.nio.channels.FileLock l = chan.tryLock(); + if (l == null) return false; + else { + l.release(); + return true; + } + } + + @Override + public void close() throws Exception { + raf.close(); + chan.close(); + } +} +class MemoryLocked implements Locked{ + java.util.concurrent.locks.Lock l; + public MemoryLocked(java.util.concurrent.locks.Lock l){ + this.l = l; + } + public void release() throws Exception{ + l.unlock(); + } +} + +class MemoryLock extends Lock{ + ReentrantLock innerLock = new ReentrantLock(true); + + public boolean probe(){ + return !innerLock.isLocked(); + } + public Locked lock() { + innerLock.lock(); + return new MemoryLocked(innerLock); + } + public Locked tryLock() { + if (innerLock.tryLock()) return new MemoryLocked(innerLock); + else return null; + } + + @Override + public void close() throws Exception { + innerLock.unlock(); + } +} diff --git a/main/client/src/MillClientMain.java b/main/client/src/MillClientMain.java new file mode 100644 index 00000000..3857caff --- /dev/null +++ b/main/client/src/MillClientMain.java @@ -0,0 +1,162 @@ +package mill.main.client; + +import org.scalasbt.ipcsocket.*; + +import java.io.*; +import java.net.Socket; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.channels.FileChannel; +import java.util.*; + +public class MillClientMain { + static void initServer(String lockBase, boolean setJnaNoSys) throws IOException,URISyntaxException{ + String[] selfJars = System.getProperty("MILL_CLASSPATH").split(","); + + List l = new ArrayList<>(); + List vmOptions = new ArrayList<>(); + l.add("java"); + final Properties props = System.getProperties(); + for(final String k: props.stringPropertyNames()){ + if (k.startsWith("MILL_") && !"MILL_CLASSPATH".equals(k)) { + vmOptions.add("-D" + k + "=" + props.getProperty(k)); + } + } + if (setJnaNoSys) { + vmOptions.add("-Djna.nosys=true"); + } + if(!Util.isWindows){ + l.addAll(vmOptions); + } else { + final File vmOptionsFile = new File(lockBase, "vmoptions"); + try (PrintWriter out = new PrintWriter(vmOptionsFile)) { + for(String opt: vmOptions) + out.println(opt); + } + l.add("-XX:VMOptionsFile=" + vmOptionsFile.getCanonicalPath()); + } + l.add("-cp"); + l.add(String.join(File.pathSeparator, selfJars)); + l.add("mill.main.MillServerMain"); + l.add(lockBase); + + new ProcessBuilder() + .command(l) + .redirectOutput(new java.io.File(lockBase + "/logs")) + .redirectError(new java.io.File(lockBase + "/logs")) + .start(); + } + public static void main(String[] args) throws Exception{ + System.exit(main0(args)); + } + public static int main0(String[] args) throws Exception{ + boolean setJnaNoSys = System.getProperty("jna.nosys") == null; + Map env = System.getenv(); + if (setJnaNoSys) { + System.setProperty("jna.nosys", "true"); + } + int index = 0; + while (index < 5) { + index += 1; + String lockBase = "out/mill-worker-" + index; + new java.io.File(lockBase).mkdirs(); + + try(RandomAccessFile lockFile = new RandomAccessFile(lockBase + "/clientLock", "rw"); + FileChannel channel = lockFile.getChannel(); + java.nio.channels.FileLock tryLock = channel.tryLock(); + Locks locks = Locks.files(lockBase)){ + if (tryLock != null) { + int exitCode = MillClientMain.run( + lockBase, + new Runnable() { + @Override + public void run() { + try{ + initServer(lockBase, setJnaNoSys); + }catch(Exception e){ + throw new RuntimeException(e); + } + } + }, + locks, + System.in, + System.out, + System.err, + args, + env + ); + return exitCode; + } + } finally{ + + } + } + throw new Exception("Reached max process limit: " + 5); + } + + public static int run(String lockBase, + Runnable initServer, + Locks locks, + InputStream stdin, + OutputStream stdout, + OutputStream stderr, + String[] args, + Map env) throws Exception{ + + try(FileOutputStream f = new FileOutputStream(lockBase + "/run")){ + f.write(System.console() != null ? 1 : 0); + Util.writeString(f, System.getProperty("MILL_VERSION")); + Util.writeArgs(args, f); + Util.writeMap(env, f); + } + + boolean serverInit = false; + if (locks.processLock.probe()) { + serverInit = true; + initServer.run(); + } + while(locks.processLock.probe()) Thread.sleep(3); + + // Need to give sometime for Win32NamedPipeSocket to work + // if the server is just initialized + if (serverInit && Util.isWindows) Thread.sleep(1000); + + Socket ioSocket = null; + + long retryStart = System.currentTimeMillis(); + + while(ioSocket == null && System.currentTimeMillis() - retryStart < 1000){ + try{ + ioSocket = Util.isWindows? + new Win32NamedPipeSocket(Util.WIN32_PIPE_PREFIX + new File(lockBase).getName()) + : new UnixDomainSocket(lockBase + "/io"); + }catch(Throwable e){ + Thread.sleep(1); + } + } + if (ioSocket == null){ + throw new Exception("Failed to connect to server"); + } + + InputStream outErr = ioSocket.getInputStream(); + OutputStream in = ioSocket.getOutputStream(); + ProxyStreamPumper outPump = new ProxyStreamPumper(outErr, stdout, stderr); + InputPumper inPump = new InputPumper(stdin, in, true); + Thread outThread = new Thread(outPump); + outThread.setDaemon(true); + Thread inThread = new Thread(inPump); + inThread.setDaemon(true); + outThread.start(); + inThread.start(); + + locks.serverLock.await(); + + try(FileInputStream fos = new FileInputStream(lockBase + "/exitCode")){ + return Integer.parseInt(new BufferedReader(new InputStreamReader(fos)).readLine()); + } catch(Throwable e){ + return 1; + } finally{ + ioSocket.close(); + } + } +} diff --git a/main/client/src/ProxyOutputStream.java b/main/client/src/ProxyOutputStream.java new file mode 100644 index 00000000..339e0150 --- /dev/null +++ b/main/client/src/ProxyOutputStream.java @@ -0,0 +1,34 @@ +package mill.main.client; + +import java.io.IOException; + +public class ProxyOutputStream extends java.io.OutputStream { + private java.io.OutputStream out; + private int key; + public ProxyOutputStream(java.io.OutputStream out, int key){ + this.out = out; + this.key = key; + } + @Override synchronized public void write(int b) throws IOException { + out.write(key); + out.write(b); + } + @Override synchronized public void write(byte[] b) throws IOException { + write(b, 0, b.length); + } + @Override synchronized public void write(byte[] b, int off, int len) throws IOException { + int i = 0; + while(i < len && i + off < b.length){ + int chunkLength = Math.min(len - i, 127); + out.write(chunkLength * key); + out.write(b, off + i, Math.min(b.length - off - i, chunkLength)); + i += chunkLength; + } + } + @Override public void flush() throws IOException { + out.flush(); + } + @Override public void close() throws IOException { + out.close(); + } +} diff --git a/main/client/src/ProxyStreamPumper.java b/main/client/src/ProxyStreamPumper.java new file mode 100644 index 00000000..977323f3 --- /dev/null +++ b/main/client/src/ProxyStreamPumper.java @@ -0,0 +1,60 @@ +package mill.main.client; + + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +public class ProxyStreamPumper implements Runnable{ + private InputStream src; + private OutputStream dest1; + private OutputStream dest2; + public ProxyStreamPumper(InputStream src, OutputStream dest1, OutputStream dest2){ + this.src = src; + this.dest1 = dest1; + this.dest2 = dest2; + } + + public void run() { + byte[] buffer = new byte[1024]; + boolean running = true; + boolean first = true; + while (running) { + try { + int quantity0 = (byte)src.read(); + int quantity = Math.abs(quantity0); + int offset = 0; + int delta = -1; + while(offset < quantity){ + delta = src.read(buffer, offset, quantity - offset); + if (delta == -1) { + running = false; + break; + }else{ + offset += delta; + } + } + + if (delta != -1){ + if (quantity0 > 0) dest1.write(buffer, 0, offset); + else dest2.write(buffer, 0, offset); + } + } catch (IOException e) { + // Win32NamedPipeSocket input stream somehow doesn't return -1, + // instead it throws an IOException whose message contains "ReadFile()". + // However, if it throws an IOException before ever reading some bytes, + // it could not connect to the server, so exit. + if (Util.isWindows && e.getMessage().contains("ReadFile()")) { + if (first) { + System.err.println("Failed to connect to server"); + System.exit(1); + } else running = false; + } else { + e.printStackTrace(); + System.exit(1); + } + } + } + } + +} diff --git a/main/client/src/Util.java b/main/client/src/Util.java new file mode 100644 index 00000000..54361734 --- /dev/null +++ b/main/client/src/Util.java @@ -0,0 +1,95 @@ +package mill.main.client; + + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.HashMap; +import java.util.Map; + +public class Util { + public static boolean isWindows = System.getProperty("os.name").toLowerCase().startsWith("windows"); + public static boolean isJava9OrAbove = !System.getProperty("java.specification.version").startsWith("1."); + + // Windows named pipe prefix (see https://github.com/sbt/ipcsocket/blob/v1.0.0/README.md) + // Win32NamedPipeServerSocket automatically adds this as a prefix (if it is not already is prefixed), + // but Win32NamedPipeSocket does not + // https://github.com/sbt/ipcsocket/blob/v1.0.0/src/main/java/org/scalasbt/ipcsocket/Win32NamedPipeServerSocket.java#L36 + public static String WIN32_PIPE_PREFIX = "\\\\.\\pipe\\"; + + public static String[] parseArgs(InputStream argStream) throws IOException { + + int argsLength = readInt(argStream); + String[] args = new String[argsLength]; + for (int i = 0; i < args.length; i++) { + args[i] = readString(argStream); + } + return args; + } + public static void writeArgs(String[] args, + OutputStream argStream) throws IOException { + writeInt(argStream, args.length); + for(String arg: args){ + writeString(argStream, arg); + } + } + + /** + * This allows the mill client to pass the environment as he sees it to the + * server (as the server remains alive over the course of several runs and + * does not see the environment changes the client would) + */ + public static void writeMap(Map map, OutputStream argStream) throws IOException { + writeInt(argStream, map.size()); + for (Map.Entry kv : map.entrySet()) { + writeString(argStream, kv.getKey()); + writeString(argStream, kv.getValue()); + } + } + + public static Map parseMap(InputStream argStream) throws IOException { + Map env = new HashMap<>(); + int mapLength = readInt(argStream); + for (int i = 0; i < mapLength; i++) { + String key = readString(argStream); + String value = readString(argStream); + env.put(key, value); + } + return env; + } + + public static String readString(InputStream inputStream) throws IOException { + // Result is between 0 and 255, hence the loop. + int length = readInt(inputStream); + byte[] arr = new byte[length]; + int total = 0; + while(total < length){ + int res = inputStream.read(arr, total, length-total); + if (res == -1) throw new IOException("Incomplete String"); + else{ + total += res; + } + } + return new String(arr); + } + + public static void writeString(OutputStream outputStream, String string) throws IOException { + byte[] bytes = string.getBytes(); + writeInt(outputStream, bytes.length); + outputStream.write(bytes); + } + + public static void writeInt(OutputStream out, int i) throws IOException{ + out.write((byte)(i >>> 24)); + out.write((byte)(i >>> 16)); + out.write((byte)(i >>> 8)); + out.write((byte)i); + } + public static int readInt(InputStream in) throws IOException{ + return ((in.read() & 0xFF) << 24) + + ((in.read() & 0xFF) << 16) + + ((in.read() & 0xFF) << 8) + + (in.read() & 0xFF); + } + +} diff --git a/main/client/src/mill/main/client/InputPumper.java b/main/client/src/mill/main/client/InputPumper.java deleted file mode 100644 index 5205be0b..00000000 --- a/main/client/src/mill/main/client/InputPumper.java +++ /dev/null @@ -1,37 +0,0 @@ -package mill.main.client; - -import java.io.InputStream; -import java.io.OutputStream; - -public class InputPumper implements Runnable{ - private InputStream src; - private OutputStream dest; - private Boolean checkAvailable; - public InputPumper(InputStream src, - OutputStream dest, - Boolean checkAvailable){ - this.src = src; - this.dest = dest; - this.checkAvailable = checkAvailable; - } - - boolean running = true; - public void run() { - byte[] buffer = new byte[1024]; - try{ - while(running){ - if (checkAvailable && src.available() == 0) Thread.sleep(2); - else { - int n = src.read(buffer); - if (n == -1) running = false; - else { - dest.write(buffer, 0, n); - dest.flush(); - } - } - } - }catch(Exception e){ - throw new RuntimeException(e); - } - } -} \ No newline at end of file diff --git a/main/client/src/mill/main/client/Lock.java b/main/client/src/mill/main/client/Lock.java deleted file mode 100644 index 6e5f18b0..00000000 --- a/main/client/src/mill/main/client/Lock.java +++ /dev/null @@ -1,14 +0,0 @@ -package mill.main.client; -public abstract class Lock implements AutoCloseable{ - abstract public Locked lock() throws Exception; - abstract public Locked tryLock() throws Exception; - - public void await() throws Exception{ - lock().release(); - } - - /** - * Returns `true` if the lock is *available for taking* - */ - abstract public boolean probe() throws Exception; -} \ No newline at end of file diff --git a/main/client/src/mill/main/client/Locked.java b/main/client/src/mill/main/client/Locked.java deleted file mode 100644 index e6ad3d63..00000000 --- a/main/client/src/mill/main/client/Locked.java +++ /dev/null @@ -1,10 +0,0 @@ -package mill.main.client; - -import java.io.RandomAccessFile; -import java.nio.channels.FileChannel; -import java.util.concurrent.locks.ReentrantLock; - - -public interface Locked{ - public void release() throws Exception; -} \ No newline at end of file diff --git a/main/client/src/mill/main/client/Locks.java b/main/client/src/mill/main/client/Locks.java deleted file mode 100644 index 64259293..00000000 --- a/main/client/src/mill/main/client/Locks.java +++ /dev/null @@ -1,107 +0,0 @@ -package mill.main.client; - -import java.io.RandomAccessFile; -import java.nio.channels.FileChannel; -import java.util.concurrent.locks.ReentrantLock; - -public class Locks implements AutoCloseable{ - public Lock processLock; - public Lock serverLock; - public Lock clientLock; - public static Locks files(String lockBase) throws Exception{ - return new Locks(){{ - processLock = new FileLock(lockBase + "/pid"); - - serverLock = new FileLock(lockBase + "/serverLock"); - - clientLock = new FileLock(lockBase + "/clientLock"); - }}; - } - public static Locks memory(){ - return new Locks(){{ - this.processLock = new MemoryLock(); - this.serverLock = new MemoryLock(); - this.clientLock = new MemoryLock(); - }}; - } - - @Override - public void close() throws Exception { - processLock.close(); - serverLock.close(); - clientLock.close(); - } -} -class FileLocked implements Locked{ - private java.nio.channels.FileLock lock; - public FileLocked(java.nio.channels.FileLock lock){ - this.lock = lock; - } - public void release() throws Exception{ - this.lock.release(); - } -} - -class FileLock extends Lock{ - String path; - RandomAccessFile raf; - FileChannel chan; - public FileLock(String path) throws Exception{ - this.path = path; - raf = new RandomAccessFile(path, "rw"); - chan = raf.getChannel(); - } - - public Locked lock() throws Exception{ - return new FileLocked(chan.lock()); - } - public Locked tryLock() throws Exception{ - java.nio.channels.FileLock l = chan.tryLock(); - if (l == null) return null; - else return new FileLocked(l); - } - public boolean probe()throws Exception{ - java.nio.channels.FileLock l = chan.tryLock(); - if (l == null) return false; - else { - l.release(); - return true; - } - } - - @Override - public void close() throws Exception { - raf.close(); - chan.close(); - } -} -class MemoryLocked implements Locked{ - java.util.concurrent.locks.Lock l; - public MemoryLocked(java.util.concurrent.locks.Lock l){ - this.l = l; - } - public void release() throws Exception{ - l.unlock(); - } -} - -class MemoryLock extends Lock{ - ReentrantLock innerLock = new ReentrantLock(true); - - public boolean probe(){ - return !innerLock.isLocked(); - } - public Locked lock() { - innerLock.lock(); - return new MemoryLocked(innerLock); - } - public Locked tryLock() { - if (innerLock.tryLock()) return new MemoryLocked(innerLock); - else return null; - } - - @Override - public void close() throws Exception { - innerLock.unlock(); - } -} diff --git a/main/client/src/mill/main/client/MillClientMain.java b/main/client/src/mill/main/client/MillClientMain.java deleted file mode 100644 index 3857caff..00000000 --- a/main/client/src/mill/main/client/MillClientMain.java +++ /dev/null @@ -1,162 +0,0 @@ -package mill.main.client; - -import org.scalasbt.ipcsocket.*; - -import java.io.*; -import java.net.Socket; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.channels.FileChannel; -import java.util.*; - -public class MillClientMain { - static void initServer(String lockBase, boolean setJnaNoSys) throws IOException,URISyntaxException{ - String[] selfJars = System.getProperty("MILL_CLASSPATH").split(","); - - List l = new ArrayList<>(); - List vmOptions = new ArrayList<>(); - l.add("java"); - final Properties props = System.getProperties(); - for(final String k: props.stringPropertyNames()){ - if (k.startsWith("MILL_") && !"MILL_CLASSPATH".equals(k)) { - vmOptions.add("-D" + k + "=" + props.getProperty(k)); - } - } - if (setJnaNoSys) { - vmOptions.add("-Djna.nosys=true"); - } - if(!Util.isWindows){ - l.addAll(vmOptions); - } else { - final File vmOptionsFile = new File(lockBase, "vmoptions"); - try (PrintWriter out = new PrintWriter(vmOptionsFile)) { - for(String opt: vmOptions) - out.println(opt); - } - l.add("-XX:VMOptionsFile=" + vmOptionsFile.getCanonicalPath()); - } - l.add("-cp"); - l.add(String.join(File.pathSeparator, selfJars)); - l.add("mill.main.MillServerMain"); - l.add(lockBase); - - new ProcessBuilder() - .command(l) - .redirectOutput(new java.io.File(lockBase + "/logs")) - .redirectError(new java.io.File(lockBase + "/logs")) - .start(); - } - public static void main(String[] args) throws Exception{ - System.exit(main0(args)); - } - public static int main0(String[] args) throws Exception{ - boolean setJnaNoSys = System.getProperty("jna.nosys") == null; - Map env = System.getenv(); - if (setJnaNoSys) { - System.setProperty("jna.nosys", "true"); - } - int index = 0; - while (index < 5) { - index += 1; - String lockBase = "out/mill-worker-" + index; - new java.io.File(lockBase).mkdirs(); - - try(RandomAccessFile lockFile = new RandomAccessFile(lockBase + "/clientLock", "rw"); - FileChannel channel = lockFile.getChannel(); - java.nio.channels.FileLock tryLock = channel.tryLock(); - Locks locks = Locks.files(lockBase)){ - if (tryLock != null) { - int exitCode = MillClientMain.run( - lockBase, - new Runnable() { - @Override - public void run() { - try{ - initServer(lockBase, setJnaNoSys); - }catch(Exception e){ - throw new RuntimeException(e); - } - } - }, - locks, - System.in, - System.out, - System.err, - args, - env - ); - return exitCode; - } - } finally{ - - } - } - throw new Exception("Reached max process limit: " + 5); - } - - public static int run(String lockBase, - Runnable initServer, - Locks locks, - InputStream stdin, - OutputStream stdout, - OutputStream stderr, - String[] args, - Map env) throws Exception{ - - try(FileOutputStream f = new FileOutputStream(lockBase + "/run")){ - f.write(System.console() != null ? 1 : 0); - Util.writeString(f, System.getProperty("MILL_VERSION")); - Util.writeArgs(args, f); - Util.writeMap(env, f); - } - - boolean serverInit = false; - if (locks.processLock.probe()) { - serverInit = true; - initServer.run(); - } - while(locks.processLock.probe()) Thread.sleep(3); - - // Need to give sometime for Win32NamedPipeSocket to work - // if the server is just initialized - if (serverInit && Util.isWindows) Thread.sleep(1000); - - Socket ioSocket = null; - - long retryStart = System.currentTimeMillis(); - - while(ioSocket == null && System.currentTimeMillis() - retryStart < 1000){ - try{ - ioSocket = Util.isWindows? - new Win32NamedPipeSocket(Util.WIN32_PIPE_PREFIX + new File(lockBase).getName()) - : new UnixDomainSocket(lockBase + "/io"); - }catch(Throwable e){ - Thread.sleep(1); - } - } - if (ioSocket == null){ - throw new Exception("Failed to connect to server"); - } - - InputStream outErr = ioSocket.getInputStream(); - OutputStream in = ioSocket.getOutputStream(); - ProxyStreamPumper outPump = new ProxyStreamPumper(outErr, stdout, stderr); - InputPumper inPump = new InputPumper(stdin, in, true); - Thread outThread = new Thread(outPump); - outThread.setDaemon(true); - Thread inThread = new Thread(inPump); - inThread.setDaemon(true); - outThread.start(); - inThread.start(); - - locks.serverLock.await(); - - try(FileInputStream fos = new FileInputStream(lockBase + "/exitCode")){ - return Integer.parseInt(new BufferedReader(new InputStreamReader(fos)).readLine()); - } catch(Throwable e){ - return 1; - } finally{ - ioSocket.close(); - } - } -} diff --git a/main/client/src/mill/main/client/ProxyOutputStream.java b/main/client/src/mill/main/client/ProxyOutputStream.java deleted file mode 100644 index 339e0150..00000000 --- a/main/client/src/mill/main/client/ProxyOutputStream.java +++ /dev/null @@ -1,34 +0,0 @@ -package mill.main.client; - -import java.io.IOException; - -public class ProxyOutputStream extends java.io.OutputStream { - private java.io.OutputStream out; - private int key; - public ProxyOutputStream(java.io.OutputStream out, int key){ - this.out = out; - this.key = key; - } - @Override synchronized public void write(int b) throws IOException { - out.write(key); - out.write(b); - } - @Override synchronized public void write(byte[] b) throws IOException { - write(b, 0, b.length); - } - @Override synchronized public void write(byte[] b, int off, int len) throws IOException { - int i = 0; - while(i < len && i + off < b.length){ - int chunkLength = Math.min(len - i, 127); - out.write(chunkLength * key); - out.write(b, off + i, Math.min(b.length - off - i, chunkLength)); - i += chunkLength; - } - } - @Override public void flush() throws IOException { - out.flush(); - } - @Override public void close() throws IOException { - out.close(); - } -} diff --git a/main/client/src/mill/main/client/ProxyStreamPumper.java b/main/client/src/mill/main/client/ProxyStreamPumper.java deleted file mode 100644 index 977323f3..00000000 --- a/main/client/src/mill/main/client/ProxyStreamPumper.java +++ /dev/null @@ -1,60 +0,0 @@ -package mill.main.client; - - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -public class ProxyStreamPumper implements Runnable{ - private InputStream src; - private OutputStream dest1; - private OutputStream dest2; - public ProxyStreamPumper(InputStream src, OutputStream dest1, OutputStream dest2){ - this.src = src; - this.dest1 = dest1; - this.dest2 = dest2; - } - - public void run() { - byte[] buffer = new byte[1024]; - boolean running = true; - boolean first = true; - while (running) { - try { - int quantity0 = (byte)src.read(); - int quantity = Math.abs(quantity0); - int offset = 0; - int delta = -1; - while(offset < quantity){ - delta = src.read(buffer, offset, quantity - offset); - if (delta == -1) { - running = false; - break; - }else{ - offset += delta; - } - } - - if (delta != -1){ - if (quantity0 > 0) dest1.write(buffer, 0, offset); - else dest2.write(buffer, 0, offset); - } - } catch (IOException e) { - // Win32NamedPipeSocket input stream somehow doesn't return -1, - // instead it throws an IOException whose message contains "ReadFile()". - // However, if it throws an IOException before ever reading some bytes, - // it could not connect to the server, so exit. - if (Util.isWindows && e.getMessage().contains("ReadFile()")) { - if (first) { - System.err.println("Failed to connect to server"); - System.exit(1); - } else running = false; - } else { - e.printStackTrace(); - System.exit(1); - } - } - } - } - -} diff --git a/main/client/src/mill/main/client/Util.java b/main/client/src/mill/main/client/Util.java deleted file mode 100644 index 54361734..00000000 --- a/main/client/src/mill/main/client/Util.java +++ /dev/null @@ -1,95 +0,0 @@ -package mill.main.client; - - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.HashMap; -import java.util.Map; - -public class Util { - public static boolean isWindows = System.getProperty("os.name").toLowerCase().startsWith("windows"); - public static boolean isJava9OrAbove = !System.getProperty("java.specification.version").startsWith("1."); - - // Windows named pipe prefix (see https://github.com/sbt/ipcsocket/blob/v1.0.0/README.md) - // Win32NamedPipeServerSocket automatically adds this as a prefix (if it is not already is prefixed), - // but Win32NamedPipeSocket does not - // https://github.com/sbt/ipcsocket/blob/v1.0.0/src/main/java/org/scalasbt/ipcsocket/Win32NamedPipeServerSocket.java#L36 - public static String WIN32_PIPE_PREFIX = "\\\\.\\pipe\\"; - - public static String[] parseArgs(InputStream argStream) throws IOException { - - int argsLength = readInt(argStream); - String[] args = new String[argsLength]; - for (int i = 0; i < args.length; i++) { - args[i] = readString(argStream); - } - return args; - } - public static void writeArgs(String[] args, - OutputStream argStream) throws IOException { - writeInt(argStream, args.length); - for(String arg: args){ - writeString(argStream, arg); - } - } - - /** - * This allows the mill client to pass the environment as he sees it to the - * server (as the server remains alive over the course of several runs and - * does not see the environment changes the client would) - */ - public static void writeMap(Map map, OutputStream argStream) throws IOException { - writeInt(argStream, map.size()); - for (Map.Entry kv : map.entrySet()) { - writeString(argStream, kv.getKey()); - writeString(argStream, kv.getValue()); - } - } - - public static Map parseMap(InputStream argStream) throws IOException { - Map env = new HashMap<>(); - int mapLength = readInt(argStream); - for (int i = 0; i < mapLength; i++) { - String key = readString(argStream); - String value = readString(argStream); - env.put(key, value); - } - return env; - } - - public static String readString(InputStream inputStream) throws IOException { - // Result is between 0 and 255, hence the loop. - int length = readInt(inputStream); - byte[] arr = new byte[length]; - int total = 0; - while(total < length){ - int res = inputStream.read(arr, total, length-total); - if (res == -1) throw new IOException("Incomplete String"); - else{ - total += res; - } - } - return new String(arr); - } - - public static void writeString(OutputStream outputStream, String string) throws IOException { - byte[] bytes = string.getBytes(); - writeInt(outputStream, bytes.length); - outputStream.write(bytes); - } - - public static void writeInt(OutputStream out, int i) throws IOException{ - out.write((byte)(i >>> 24)); - out.write((byte)(i >>> 16)); - out.write((byte)(i >>> 8)); - out.write((byte)i); - } - public static int readInt(InputStream in) throws IOException{ - return ((in.read() & 0xFF) << 24) + - ((in.read() & 0xFF) << 16) + - ((in.read() & 0xFF) << 8) + - (in.read() & 0xFF); - } - -} diff --git a/main/client/test/src/ClientTests.java b/main/client/test/src/ClientTests.java new file mode 100644 index 00000000..f3fcf154 --- /dev/null +++ b/main/client/test/src/ClientTests.java @@ -0,0 +1,151 @@ +package mill.main.client; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.util.*; + +public class ClientTests { + @Test + public void readWriteInt() throws Exception{ + int[] examples = { + 0, 1, 126, 127, 128, 254, 255, 256, 1024, 99999, 1234567, + Integer.MAX_VALUE, Integer.MAX_VALUE / 2, Integer.MIN_VALUE + }; + for(int example0: examples){ + for(int example: new int[]{-example0, example0}){ + ByteArrayOutputStream o = new ByteArrayOutputStream(); + Util.writeInt(o, example); + ByteArrayInputStream i = new ByteArrayInputStream(o.toByteArray()); + int s = Util.readInt(i); + assertEquals(example, s); + assertEquals(i.available(), 0); + } + } + } + @Test + public void readWriteString() throws Exception{ + String[] examples = { + "", + "hello", + "i am cow", + "i am cow\nhear me moo\ni weight twice as much as you", + "我是一个叉烧包", + }; + for(String example: examples){ + checkStringRoundTrip(example); + } + } + + @Test + public void readWriteBigString() throws Exception{ + int[] lengths = {0, 1, 126, 127, 128, 254, 255, 256, 1024, 99999, 1234567}; + for(int i = 0; i < lengths.length; i++){ + final char[] bigChars = new char[lengths[i]]; + java.util.Arrays.fill(bigChars, 'X'); + checkStringRoundTrip(new String(bigChars)); + } + } + + public void checkStringRoundTrip(String example) throws Exception{ + ByteArrayOutputStream o = new ByteArrayOutputStream(); + Util.writeString(o, example); + ByteArrayInputStream i = new ByteArrayInputStream(o.toByteArray()); + String s = Util.readString(i); + assertEquals(example, s); + assertEquals(i.available(), 0); + } + + public byte[] readSamples(String ...samples) throws Exception{ + ByteArrayOutputStream out = new ByteArrayOutputStream(); + for(String sample: samples) { + byte[] bytes = java.nio.file.Files.readAllBytes( + java.nio.file.Paths.get(getClass().getResource(sample).getFile()) + ); + out.write(bytes); + } + return out.toByteArray(); + } + @Test + public void tinyProxyInputOutputStream() throws Exception{ + proxyInputOutputStreams( + Arrays.copyOf(readSamples("/bandung.jpg"), 30), + readSamples(), + 10 + ); + } + @Test + public void leftProxyInputOutputStream() throws Exception{ + proxyInputOutputStreams( + readSamples("/bandung.jpg", "/akanon.mid", "/gettysburg.txt", "/pip.tar.gz"), + readSamples(), + 2950 + ); + } + @Test + public void rightProxyInputOutputStream() throws Exception{ + proxyInputOutputStreams( + readSamples(), + readSamples("/bandung.jpg", "/akanon.mid", "/gettysburg.txt", "/pip.tar.gz"), + 3000 + ); + } + @Test + public void mixedProxyInputOutputStream() throws Exception{ + proxyInputOutputStreams( + readSamples("/bandung.jpg", "/gettysburg.txt"), + readSamples("/akanon.mid", "/pip.tar.gz"), + 3050 + ); + } + + /** + * Make sure that when we shove data through both ProxyOutputStreams in + * variously sized chunks, we get the exact same bytes back out from the + * ProxyStreamPumper. + */ + public void proxyInputOutputStreams(byte[] samples1, + byte[] samples2, + int chunkMax) throws Exception{ + + ByteArrayOutputStream pipe = new ByteArrayOutputStream(); + OutputStream src1 = new ProxyOutputStream(pipe, 1); + OutputStream src2 = new ProxyOutputStream(pipe, -1); + + Random random = new Random(31337); + + int i1 = 0; + int i2 = 0; + while(i1 < samples1.length || i2 < samples2.length){ + int chunk = random.nextInt(chunkMax); + if (random.nextBoolean() && i1 < samples1.length){ + src1.write(samples1, i1, Math.min(samples1.length-i1, chunk)); + src1.flush(); + i1 += chunk; + }else if (i2 < samples2.length){ + src2.write(samples2, i2, Math.min(samples2.length-i2, chunk)); + src2.flush(); + i2 += chunk; + } + } + + byte[] bytes = pipe.toByteArray(); + + + ByteArrayOutputStream dest1 = new ByteArrayOutputStream(); + ByteArrayOutputStream dest2 = new ByteArrayOutputStream(); + ProxyStreamPumper pumper = new ProxyStreamPumper( + new ByteArrayInputStream(bytes), + dest1, dest2 + ); + pumper.run(); + assertTrue(Arrays.equals(samples1, dest1.toByteArray())); + assertTrue(Arrays.equals(samples2, dest2.toByteArray())); + } + +} \ No newline at end of file diff --git a/main/client/test/src/mill/main/client/ClientTests.java b/main/client/test/src/mill/main/client/ClientTests.java deleted file mode 100644 index f3fcf154..00000000 --- a/main/client/test/src/mill/main/client/ClientTests.java +++ /dev/null @@ -1,151 +0,0 @@ -package mill.main.client; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.OutputStream; -import java.util.*; - -public class ClientTests { - @Test - public void readWriteInt() throws Exception{ - int[] examples = { - 0, 1, 126, 127, 128, 254, 255, 256, 1024, 99999, 1234567, - Integer.MAX_VALUE, Integer.MAX_VALUE / 2, Integer.MIN_VALUE - }; - for(int example0: examples){ - for(int example: new int[]{-example0, example0}){ - ByteArrayOutputStream o = new ByteArrayOutputStream(); - Util.writeInt(o, example); - ByteArrayInputStream i = new ByteArrayInputStream(o.toByteArray()); - int s = Util.readInt(i); - assertEquals(example, s); - assertEquals(i.available(), 0); - } - } - } - @Test - public void readWriteString() throws Exception{ - String[] examples = { - "", - "hello", - "i am cow", - "i am cow\nhear me moo\ni weight twice as much as you", - "我是一个叉烧包", - }; - for(String example: examples){ - checkStringRoundTrip(example); - } - } - - @Test - public void readWriteBigString() throws Exception{ - int[] lengths = {0, 1, 126, 127, 128, 254, 255, 256, 1024, 99999, 1234567}; - for(int i = 0; i < lengths.length; i++){ - final char[] bigChars = new char[lengths[i]]; - java.util.Arrays.fill(bigChars, 'X'); - checkStringRoundTrip(new String(bigChars)); - } - } - - public void checkStringRoundTrip(String example) throws Exception{ - ByteArrayOutputStream o = new ByteArrayOutputStream(); - Util.writeString(o, example); - ByteArrayInputStream i = new ByteArrayInputStream(o.toByteArray()); - String s = Util.readString(i); - assertEquals(example, s); - assertEquals(i.available(), 0); - } - - public byte[] readSamples(String ...samples) throws Exception{ - ByteArrayOutputStream out = new ByteArrayOutputStream(); - for(String sample: samples) { - byte[] bytes = java.nio.file.Files.readAllBytes( - java.nio.file.Paths.get(getClass().getResource(sample).getFile()) - ); - out.write(bytes); - } - return out.toByteArray(); - } - @Test - public void tinyProxyInputOutputStream() throws Exception{ - proxyInputOutputStreams( - Arrays.copyOf(readSamples("/bandung.jpg"), 30), - readSamples(), - 10 - ); - } - @Test - public void leftProxyInputOutputStream() throws Exception{ - proxyInputOutputStreams( - readSamples("/bandung.jpg", "/akanon.mid", "/gettysburg.txt", "/pip.tar.gz"), - readSamples(), - 2950 - ); - } - @Test - public void rightProxyInputOutputStream() throws Exception{ - proxyInputOutputStreams( - readSamples(), - readSamples("/bandung.jpg", "/akanon.mid", "/gettysburg.txt", "/pip.tar.gz"), - 3000 - ); - } - @Test - public void mixedProxyInputOutputStream() throws Exception{ - proxyInputOutputStreams( - readSamples("/bandung.jpg", "/gettysburg.txt"), - readSamples("/akanon.mid", "/pip.tar.gz"), - 3050 - ); - } - - /** - * Make sure that when we shove data through both ProxyOutputStreams in - * variously sized chunks, we get the exact same bytes back out from the - * ProxyStreamPumper. - */ - public void proxyInputOutputStreams(byte[] samples1, - byte[] samples2, - int chunkMax) throws Exception{ - - ByteArrayOutputStream pipe = new ByteArrayOutputStream(); - OutputStream src1 = new ProxyOutputStream(pipe, 1); - OutputStream src2 = new ProxyOutputStream(pipe, -1); - - Random random = new Random(31337); - - int i1 = 0; - int i2 = 0; - while(i1 < samples1.length || i2 < samples2.length){ - int chunk = random.nextInt(chunkMax); - if (random.nextBoolean() && i1 < samples1.length){ - src1.write(samples1, i1, Math.min(samples1.length-i1, chunk)); - src1.flush(); - i1 += chunk; - }else if (i2 < samples2.length){ - src2.write(samples2, i2, Math.min(samples2.length-i2, chunk)); - src2.flush(); - i2 += chunk; - } - } - - byte[] bytes = pipe.toByteArray(); - - - ByteArrayOutputStream dest1 = new ByteArrayOutputStream(); - ByteArrayOutputStream dest2 = new ByteArrayOutputStream(); - ProxyStreamPumper pumper = new ProxyStreamPumper( - new ByteArrayInputStream(bytes), - dest1, dest2 - ); - pumper.run(); - assertTrue(Arrays.equals(samples1, dest1.toByteArray())); - assertTrue(Arrays.equals(samples2, dest2.toByteArray())); - } - -} \ No newline at end of file diff --git a/main/core/src/define/Applicative.scala b/main/core/src/define/Applicative.scala new file mode 100644 index 00000000..5e63b1cc --- /dev/null +++ b/main/core/src/define/Applicative.scala @@ -0,0 +1,108 @@ +package mill.define + +import scala.annotation.{StaticAnnotation, compileTimeOnly} +import scala.language.higherKinds +import scala.reflect.macros.blackbox.Context + +/** + * A generic Applicative-functor macro: translates calls to + * + * Applier.apply{ ... applyable1.apply() ... applyable2.apply() ... } + * + * into + * + * Applier.zipMap(applyable1, applyable2){ (a1, a2, ctx) => ... a1 ... a2 ... } + */ +object Applicative { + trait ApplyHandler[M[+_]]{ + def apply[T](t: M[T]): T + } + object ApplyHandler{ + @compileTimeOnly("Target#apply() can only be used with a T{...} block") + implicit def defaultApplyHandler[M[+_]]: ApplyHandler[M] = ??? + } + trait Applyable[M[+_], +T]{ + def self: M[T] + def apply()(implicit handler: ApplyHandler[M]): T = handler(self) + } + + type Id[+T] = T + + trait Applyer[W[_], T[_], Z[_], Ctx] extends ApplyerGenerated[T, Z, Ctx] { + def ctx()(implicit c: Ctx) = c + def underlying[A](v: W[A]): T[_] + + def zipMap[R]()(cb: Ctx => Z[R]) = mapCtx(zip()){ (_, ctx) => cb(ctx)} + def zipMap[A, R](a: T[A])(f: (A, Ctx) => Z[R]) = mapCtx(a)(f) + def zip(): T[Unit] + def zip[A](a: T[A]): T[Tuple1[A]] + } + + def impl[M[_], T: c.WeakTypeTag, Ctx: c.WeakTypeTag](c: Context) + (t: c.Expr[T]): c.Expr[M[T]] = { + impl0(c)(t.tree)(implicitly[c.WeakTypeTag[T]], implicitly[c.WeakTypeTag[Ctx]]) + } + def impl0[M[_], T: c.WeakTypeTag, Ctx: c.WeakTypeTag](c: Context) + (t: c.Tree): c.Expr[M[T]] = { + import c.universe._ + def rec(t: Tree): Iterator[c.Tree] = Iterator(t) ++ t.children.flatMap(rec(_)) + + val bound = collection.mutable.Buffer.empty[(c.Tree, ValDef)] + val targetApplySym = typeOf[Applyable[Nothing, _]].member(TermName("apply")) + + // Derived from @olafurpg's + // https://gist.github.com/olafurpg/596d62f87bf3360a29488b725fbc7608 + val defs = rec(t).filter(_.isDef).map(_.symbol).toSet + + val ctxName = TermName(c.freshName("ctx")) + val ctxSym = c.internal.newTermSymbol(c.internal.enclosingOwner, ctxName) + c.internal.setInfo(ctxSym, weakTypeOf[Ctx]) + + val transformed = c.internal.typingTransform(t) { + case (t @ q"$fun.apply()($handler)", api) if t.symbol == targetApplySym => + + val localDefs = rec(fun).filter(_.isDef).map(_.symbol).toSet + val banned = rec(t).filter(x => defs(x.symbol) && !localDefs(x.symbol)) + + if (banned.hasNext){ + val banned0 = banned.next() + c.abort( + banned0.pos, + "Target#apply() call cannot use `" + banned0.symbol + "` defined within the T{...} block" + ) + } + val tempName = c.freshName(TermName("tmp")) + val tempSym = c.internal.newTermSymbol(c.internal.enclosingOwner, tempName) + c.internal.setInfo(tempSym, t.tpe) + val tempIdent = Ident(tempSym) + c.internal.setType(tempIdent, t.tpe) + c.internal.setFlag(tempSym, (1L << 44).asInstanceOf[c.universe.FlagSet]) + bound.append((q"${c.prefix}.underlying($fun)", c.internal.valDef(tempSym))) + tempIdent + case (t, api) + if t.symbol != null + && t.symbol.annotations.exists(_.tree.tpe =:= typeOf[mill.api.Ctx.ImplicitStub]) => + + val tempIdent = Ident(ctxSym) + c.internal.setType(tempIdent, t.tpe) + c.internal.setFlag(ctxSym, (1L << 44).asInstanceOf[c.universe.FlagSet]) + tempIdent + + case (t, api) => api.default(t) + } + + val (exprs, bindings) = bound.unzip + + + val ctxBinding = c.internal.valDef(ctxSym) + + val callback = c.typecheck(q"(..$bindings, $ctxBinding) => $transformed ") + + val res = q"${c.prefix}.zipMap(..$exprs){ $callback }" + + c.internal.changeOwner(transformed, c.internal.enclosingOwner, callback.symbol) + + c.Expr[M[T]](res) + } + +} diff --git a/main/core/src/define/BaseModule.scala b/main/core/src/define/BaseModule.scala new file mode 100644 index 00000000..cd79f73e --- /dev/null +++ b/main/core/src/define/BaseModule.scala @@ -0,0 +1,56 @@ +package mill.define + + +object BaseModule{ + case class Implicit(value: BaseModule) +} + +abstract class BaseModule(millSourcePath0: os.Path, + external0: Boolean = false, + foreign0 : Boolean = false) + (implicit millModuleEnclosing0: sourcecode.Enclosing, + millModuleLine0: sourcecode.Line, + millName0: sourcecode.Name, + millFile0: sourcecode.File, + caller: Caller) + extends Module()( + mill.define.Ctx.make( + implicitly, + implicitly, + implicitly, + BasePath(millSourcePath0), + Segments(), + mill.util.Router.Overrides(0), + Ctx.External(external0), + Ctx.Foreign(foreign0), + millFile0, + caller + ) + ){ + // A BaseModule should provide an empty Segments list to it's children, since + // it is the root of the module tree, and thus must not include it's own + // sourcecode.Name as part of the list, + override implicit def millModuleSegments: Segments = Segments() + override def millSourcePath = millOuterCtx.millSourcePath + override implicit def millModuleBasePath: BasePath = BasePath(millSourcePath) + implicit def millImplicitBaseModule: BaseModule.Implicit = BaseModule.Implicit(this) + def millDiscover: Discover[this.type] +} + + +abstract class ExternalModule(implicit millModuleEnclosing0: sourcecode.Enclosing, + millModuleLine0: sourcecode.Line, + millName0: sourcecode.Name) + extends BaseModule(ammonite.ops.pwd, external0 = true, foreign0 = false)( + implicitly, implicitly, implicitly, implicitly, Caller(()) + ){ + + implicit def millDiscoverImplicit: Discover[_] = millDiscover + assert( + !" #".exists(millModuleEnclosing0.value.contains(_)), + "External modules must be at a top-level static path, not " + millModuleEnclosing0.value + ) + override implicit def millModuleSegments = { + Segments(millModuleEnclosing0.value.split('.').map(Segment.Label):_*) + } +} diff --git a/main/core/src/define/Caller.scala b/main/core/src/define/Caller.scala new file mode 100644 index 00000000..6d2d4d1d --- /dev/null +++ b/main/core/src/define/Caller.scala @@ -0,0 +1,13 @@ +package mill.define + +import sourcecode.Compat.Context +import language.experimental.macros +case class Caller(value: Any) +object Caller { + def apply()(implicit c: Caller) = c.value + implicit def generate: Caller = macro impl + def impl(c: Context): c.Tree = { + import c.universe._ + q"new _root_.mill.define.Caller(this)" + } +} \ No newline at end of file diff --git a/main/core/src/define/Cross.scala b/main/core/src/define/Cross.scala new file mode 100644 index 00000000..aa730e0d --- /dev/null +++ b/main/core/src/define/Cross.scala @@ -0,0 +1,90 @@ +package mill.define +import language.experimental.macros +import scala.reflect.macros.blackbox + + +object Cross{ + case class Factory[T](make: (Product, mill.define.Ctx) => T) + + object Factory{ + implicit def make[T]: Factory[T] = macro makeImpl[T] + def makeImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Factory[T]] = { + import c.universe._ + val tpe = weakTypeOf[T] + + val primaryConstructorArgs = + tpe.typeSymbol.asClass.primaryConstructor.typeSignature.paramLists.head + + val argTupleValues = + for((a, n) <- primaryConstructorArgs.zipWithIndex) + yield q"v.productElement($n).asInstanceOf[${a.info}]" + + val instance = c.Expr[(Product, mill.define.Ctx) => T]( + q"{ (v, ctx0) => new $tpe(..$argTupleValues){ override def millOuterCtx = ctx0 } }" + ) + + reify { mill.define.Cross.Factory[T](instance.splice) } + } + } + + trait Resolver[-T]{ + def resolve[V <: T](c: Cross[V]): V + } +} + +/** + * Models "cross-builds": sets of duplicate builds which differ only in the + * value of one or more "case" variables whose values are determined at runtime. + * Used via: + * + * object foo extends Cross[FooModule]("bar", "baz", "qux") + * class FooModule(v: String) extends Module{ + * ... + * } + */ +class Cross[T](cases: Any*) + (implicit ci: Cross.Factory[T], + ctx: mill.define.Ctx) extends mill.define.Module()(ctx) { + + override lazy val millModuleDirectChildren = + this.millInternal.reflectNestedObjects[Module] ++ + items.collect{case (k, v: mill.define.Module) => v} + + val items = for(c0 <- cases.toList) yield{ + val c = c0 match{ + case p: Product => p + case v => Tuple1(v) + } + val crossValues = c.productIterator.toList + val relPath = ctx.segment.pathSegments + val sub = ci.make( + c, + ctx.copy( + segments = ctx.segments ++ Seq(ctx.segment), + millSourcePath = ctx.millSourcePath / relPath, + segment = Segment.Cross(crossValues) + ) + ) + (crossValues, sub) + } + val itemMap = items.toMap + + /** + * Fetch the cross module corresponding to the given cross values + */ + def get(args: Seq[Any]) = itemMap(args.toList) + + /** + * Fetch the cross module corresponding to the given cross values + */ + def apply(arg0: Any, args: Any*) = itemMap(arg0 :: args.toList) + + /** + * Fetch the relevant cross module given the implicit resolver you have in + * scope. This is often the first cross module whose cross-version is + * compatible with the current module. + */ + def apply[V >: T]()(implicit resolver: Cross.Resolver[V]): T = { + resolver.resolve(this.asInstanceOf[Cross[V]]).asInstanceOf[T] + } +} \ No newline at end of file diff --git a/main/core/src/define/Ctx.scala b/main/core/src/define/Ctx.scala new file mode 100644 index 00000000..c21e53b4 --- /dev/null +++ b/main/core/src/define/Ctx.scala @@ -0,0 +1,100 @@ +package mill.define + + +import scala.annotation.implicitNotFound + +sealed trait Segment{ + def pathSegments: Seq[String] = this match{ + case Segment.Label(s) => List(s) + case Segment.Cross(vs) => vs.map(_.toString) + } +} +object Segment{ + case class Label(value: String) extends Segment{ + assert(!value.contains('.')) + } + case class Cross(value: Seq[Any]) extends Segment +} + +case class BasePath(value: os.Path) + + +/** + * Models a path with the Mill build hierarchy, e.g. + * + * amm.util[2.11].test.compile + * + * .-separated segments are [[Segment.Label]]s, while []-delimited + * segments are [[Segment.Cross]]s + */ +case class Segments(value: Segment*){ + def ++(other: Seq[Segment]): Segments = Segments(value ++ other:_*) + def ++(other: Segments): Segments = Segments(value ++ other.value:_*) + def parts = value.toList match { + case Nil => Nil + case Segment.Label(head) :: rest => + val stringSegments = rest.flatMap{ + case Segment.Label(s) => Seq(s) + case Segment.Cross(vs) => vs.map(_.toString) + } + head +: stringSegments + } + def last : Segments = Segments(value.last) + def render = value.toList match { + case Nil => "" + case Segment.Label(head) :: rest => + val stringSegments = rest.map{ + case Segment.Label(s) => "." + s + case Segment.Cross(vs) => "[" + vs.mkString(",") + "]" + } + head + stringSegments.mkString + } +} + +object Segments { + + def labels(values : String*) : Segments = + Segments(values.map(Segment.Label):_*) + +} + +@implicitNotFound("Modules, Targets and Commands can only be defined within a mill Module") +case class Ctx(enclosing: String, + lineNum: Int, + segment: Segment, + millSourcePath: os.Path, + segments: Segments, + overrides: Int, + external: Boolean, + foreign: Boolean, + fileName: String, + enclosingCls: Class[_]){ +} + +object Ctx{ + case class External(value: Boolean) + case class Foreign(value : Boolean) + implicit def make(implicit millModuleEnclosing0: sourcecode.Enclosing, + millModuleLine0: sourcecode.Line, + millName0: sourcecode.Name, + millModuleBasePath0: BasePath, + segments0: Segments, + overrides0: mill.util.Router.Overrides, + external0: External, + foreign0: Foreign, + fileName: sourcecode.File, + enclosing: Caller): Ctx = { + Ctx( + millModuleEnclosing0.value, + millModuleLine0.value, + Segment.Label(millName0.value), + millModuleBasePath0.value, + segments0, + overrides0.value, + external0.value, + foreign0.value, + fileName.value, + enclosing.value.getClass + ) + } +} diff --git a/main/core/src/define/Discover.scala b/main/core/src/define/Discover.scala new file mode 100644 index 00000000..f0c668e6 --- /dev/null +++ b/main/core/src/define/Discover.scala @@ -0,0 +1,89 @@ +package mill.define +import mill.util.Router.EntryPoint + +import language.experimental.macros +import sourcecode.Compat.Context + +import scala.collection.mutable +import scala.reflect.macros.blackbox + + + +case class Discover[T](value: Map[Class[_], Seq[(Int, EntryPoint[_])]]) +object Discover { + def apply[T]: Discover[T] = macro applyImpl[T] + + def applyImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Discover[T]] = { + import c.universe._ + import compat._ + val seen = mutable.Set.empty[Type] + def rec(tpe: Type): Unit = { + if (!seen(tpe)){ + seen.add(tpe) + for{ + m <- tpe.members + memberTpe = m.typeSignature + if memberTpe.resultType <:< typeOf[mill.define.Module] && memberTpe.paramLists.isEmpty + } rec(memberTpe.resultType) + + if (tpe <:< typeOf[mill.define.Cross[_]]){ + val inner = typeOf[Cross[_]] + .typeSymbol + .asClass + .typeParams + .head + .asType + .toType + .asSeenFrom(tpe, typeOf[Cross[_]].typeSymbol) + + rec(inner) + } + } + } + rec(weakTypeOf[T]) + + def assertParamListCounts(methods: Iterable[router.c.universe.MethodSymbol], + cases: (c.Type, Int, String)*) = { + for (m <- methods.toList){ + for ((tt, n, label) <- cases){ + if (m.returnType <:< tt.asInstanceOf[router.c.Type] && + m.paramLists.length != n){ + c.abort( + m.pos.asInstanceOf[c.Position], + s"$label definitions must have $n parameter list" + (if (n == 1) "" else "s") + ) + } + } + } + } + val router = new mill.util.Router(c) + val mapping = for{ + discoveredModuleType <- seen + val curCls = discoveredModuleType.asInstanceOf[router.c.Type] + val methods = router.getValsOrMeths(curCls) + val overridesRoutes = { + assertParamListCounts( + methods, + (weakTypeOf[mill.define.Sources], 0, "`T.sources`"), + (weakTypeOf[mill.define.Input[_]], 0, "`T.input`"), + (weakTypeOf[mill.define.Persistent[_]], 0, "`T.persistent`"), + (weakTypeOf[mill.define.Target[_]], 0, "`T{...}`"), + (weakTypeOf[mill.define.Command[_]], 1, "`T.command`") + ) + + for{ + m <- methods.toList + if m.returnType <:< weakTypeOf[mill.define.Command[_]].asInstanceOf[router.c.Type] + } yield (m.overrides.length, router.extractMethod(m, curCls).asInstanceOf[c.Tree]) + + } + if overridesRoutes.nonEmpty + } yield { + val lhs = q"classOf[${discoveredModuleType.typeSymbol.asClass}]" + val rhs = q"scala.Seq[(Int, mill.util.Router.EntryPoint[_])](..$overridesRoutes)" + q"$lhs -> $rhs" + } + + c.Expr[Discover[T]](q"mill.define.Discover(scala.collection.immutable.Map(..$mapping))") + } +} diff --git a/main/core/src/define/Graph.scala b/main/core/src/define/Graph.scala new file mode 100644 index 00000000..3119f2fb --- /dev/null +++ b/main/core/src/define/Graph.scala @@ -0,0 +1,72 @@ +package mill.define + +import mill.eval.Tarjans +import mill.util.MultiBiMap +import mill.util.Strict.Agg + +object Graph { + + /** + * The `values` [[Agg]] is guaranteed to be topological sorted and cycle free. + * That's why the constructor is package private. + * @see [[Graph.topoSorted]] + */ + class TopoSorted private[Graph] (val values: Agg[Task[_]]) + + def groupAroundImportantTargets[T](topoSortedTargets: TopoSorted) + (important: PartialFunction[Task[_], T]): MultiBiMap[T, Task[_]] = { + + val output = new MultiBiMap.Mutable[T, Task[_]]() + for ((target, t) <- topoSortedTargets.values.flatMap(t => important.lift(t).map((t, _)))) { + + val transitiveTargets = new Agg.Mutable[Task[_]] + def rec(t: Task[_]): Unit = { + if (transitiveTargets.contains(t)) () // do nothing + else if (important.isDefinedAt(t) && t != target) () // do nothing + else { + transitiveTargets.append(t) + t.inputs.foreach(rec) + } + } + rec(target) + output.addAll(t, topoSorted(transitiveTargets).values) + } + output + } + + /** + * Collects all transitive dependencies (targets) of the given targets, + * including the given targets. + */ + def transitiveTargets(sourceTargets: Agg[Task[_]]): Agg[Task[_]] = { + val transitiveTargets = new Agg.Mutable[Task[_]] + def rec(t: Task[_]): Unit = { + if (transitiveTargets.contains(t)) () // do nothing + else { + transitiveTargets.append(t) + t.inputs.foreach(rec) + } + } + + sourceTargets.items.foreach(rec) + transitiveTargets + } + /** + * Takes the given targets, finds all the targets they transitively depend + * on, and sort them topologically. Fails if there are dependency cycles + */ + def topoSorted(transitiveTargets: Agg[Task[_]]): TopoSorted = { + + val indexed = transitiveTargets.indexed + val targetIndices = indexed.zipWithIndex.toMap + + val numberedEdges = + for(t <- transitiveTargets.items) + yield t.inputs.collect(targetIndices) + + val sortedClusters = Tarjans(numberedEdges) + val nonTrivialClusters = sortedClusters.filter(_.length > 1) + assert(nonTrivialClusters.isEmpty, nonTrivialClusters) + new TopoSorted(Agg.from(sortedClusters.flatten.map(indexed))) + } +} diff --git a/main/core/src/define/Module.scala b/main/core/src/define/Module.scala new file mode 100644 index 00000000..a8fc5be7 --- /dev/null +++ b/main/core/src/define/Module.scala @@ -0,0 +1,96 @@ +package mill.define + +import java.lang.reflect.Modifier + +import mill.util.ParseArgs + +import scala.language.experimental.macros +import scala.reflect.ClassTag +import scala.reflect.NameTransformer.decode + + +/** + * `Module` is a class meant to be extended by `trait`s *only*, in order to + * propagate the implicit parameters forward to the final concrete + * instantiation site so they can capture the enclosing/line information of + * the concrete instance. + */ +class Module(implicit outerCtx0: mill.define.Ctx) + extends mill.moduledefs.Cacher{ outer => + + /** + * Miscellaneous machinery around traversing & querying the build hierarchy, + * that should not be needed by normal users of Mill + */ + object millInternal extends Module.Internal(this) + + lazy val millModuleDirectChildren = millInternal.reflectNestedObjects[Module].toSeq + def millOuterCtx = outerCtx0 + def millSourcePath: os.Path = millOuterCtx.millSourcePath / millOuterCtx.segment.pathSegments + implicit def millModuleExternal: Ctx.External = Ctx.External(millOuterCtx.external) + implicit def millModuleShared: Ctx.Foreign = Ctx.Foreign(millOuterCtx.foreign) + implicit def millModuleBasePath: BasePath = BasePath(millSourcePath) + implicit def millModuleSegments: Segments = { + millOuterCtx.segments ++ Seq(millOuterCtx.segment) + } + override def toString = millModuleSegments.render +} + +object Module{ + class Internal(outer: Module){ + def traverse[T](f: Module => Seq[T]): Seq[T] = { + def rec(m: Module): Seq[T] = f(m) ++ m.millModuleDirectChildren.flatMap(rec) + rec(outer) + } + + lazy val modules = traverse(Seq(_)) + lazy val segmentsToModules = modules.map(m => (m.millModuleSegments, m)).toMap + + lazy val targets = traverse{_.millInternal.reflectAll[Target[_]]}.toSet + + lazy val segmentsToTargets = targets + .map(t => (t.ctx.segments, t)) + .toMap + + // Ensure we do not propagate the implicit parameters as implicits within + // the body of any inheriting class/trait/objects, as it would screw up any + // one else trying to use sourcecode.{Enclosing,Line} to capture debug info + lazy val millModuleEnclosing = outer.millOuterCtx.enclosing + lazy val millModuleLine = outer.millOuterCtx.lineNum + + private def reflect[T: ClassTag](filter: (String) => Boolean): Array[T] = { + val runtimeCls = implicitly[ClassTag[T]].runtimeClass + for{ + m <- outer.getClass.getMethods.sortBy(_.getName) + n = decode(m.getName) + if + filter(n) && + ParseArgs.isLegalIdentifier(n) && + m.getParameterCount == 0 && + (m.getModifiers & Modifier.STATIC) == 0 && + (m.getModifiers & Modifier.ABSTRACT) == 0 && + runtimeCls.isAssignableFrom(m.getReturnType) + } yield m.invoke(outer).asInstanceOf[T] + } + + def reflectAll[T: ClassTag]: Array[T] = reflect(Function.const(true)) + + def reflectSingle[T: ClassTag](label: String): Option[T] = reflect(_ == label).headOption + + // For some reason, this fails to pick up concrete `object`s nested directly within + // another top-level concrete `object`. This is fine for now, since Mill's Ammonite + // script/REPL runner always wraps user code in a wrapper object/trait + def reflectNestedObjects[T: ClassTag] = { + (reflectAll[T] ++ + outer + .getClass + .getClasses + .filter(implicitly[ClassTag[T]].runtimeClass isAssignableFrom _) + .flatMap(c => c.getFields.find(_.getName == "MODULE$").map(_.get(c).asInstanceOf[T])) + ).distinct + } + } +} +trait TaskModule extends Module { + def defaultCommandName(): String +} diff --git a/main/core/src/define/Task.scala b/main/core/src/define/Task.scala new file mode 100644 index 00000000..a464bf18 --- /dev/null +++ b/main/core/src/define/Task.scala @@ -0,0 +1,344 @@ +package mill.define + +import ammonite.main.Router.Overrides +import mill.define.Applicative.Applyable +import mill.eval.{PathRef, Result} +import mill.util.EnclosingClass +import sourcecode.Compat.Context +import upickle.default.{ReadWriter => RW, Reader => R, Writer => W} + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + + +/** + * Models a single node in the Mill build graph, with a list of inputs and a + * single output of type [[T]]. + * + * Generally not instantiated manually, but instead constructed via the + * [[Target.apply]] & similar macros. + */ +abstract class Task[+T] extends Task.Ops[T] with Applyable[Task, T]{ + /** + * What other Targets does this Target depend on? + */ + val inputs: Seq[Task[_]] + + /** + * Evaluate this target + */ + def evaluate(args: mill.api.Ctx): Result[T] + + /** + * Even if this target's inputs did not change, does it need to re-evaluate + * anyway? + */ + def sideHash: Int = 0 + + def flushDest: Boolean = true + + def asTarget: Option[Target[T]] = None + def asCommand: Option[Command[T]] = None + def asWorker: Option[Worker[T]] = None + def self = this +} + +trait NamedTask[+T] extends Task[T]{ + def ctx: mill.define.Ctx + def label = ctx.segment match{case Segment.Label(v) => v} + override def toString = ctx.segments.render +} +trait Target[+T] extends NamedTask[T]{ + override def asTarget = Some(this) + def readWrite: RW[_] +} + +object Target extends TargetGenerated with Applicative.Applyer[Task, Task, Result, mill.api.Ctx] { + + implicit def apply[T](t: T) + (implicit rw: RW[T], + ctx: mill.define.Ctx): Target[T] = macro targetImpl[T] + + def targetImpl[T: c.WeakTypeTag](c: Context) + (t: c.Expr[T]) + (rw: c.Expr[RW[T]], + ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = { + import c.universe._ + val lhs = Applicative.impl0[Task, T, mill.api.Ctx](c)(reify(Result.Success(t.splice)).tree) + + mill.moduledefs.Cacher.impl0[TargetImpl[T]](c)( + reify( + new TargetImpl[T](lhs.splice, ctx.splice, rw.splice) + ) + ) + } + + implicit def apply[T](t: Result[T]) + (implicit rw: RW[T], + ctx: mill.define.Ctx): Target[T] = macro targetResultImpl[T] + + def targetResultImpl[T: c.WeakTypeTag](c: Context) + (t: c.Expr[Result[T]]) + (rw: c.Expr[RW[T]], + ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = { + import c.universe._ + mill.moduledefs.Cacher.impl0[Target[T]](c)( + reify( + new TargetImpl[T]( + Applicative.impl0[Task, T, mill.api.Ctx](c)(t.tree).splice, + ctx.splice, + rw.splice + ) + ) + ) + } + + def apply[T](t: Task[T]) + (implicit rw: RW[T], + ctx: mill.define.Ctx): Target[T] = macro targetTaskImpl[T] + + def targetTaskImpl[T: c.WeakTypeTag](c: Context) + (t: c.Expr[Task[T]]) + (rw: c.Expr[RW[T]], + ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = { + import c.universe._ + mill.moduledefs.Cacher.impl0[Target[T]](c)( + reify( + new TargetImpl[T](t.splice, ctx.splice, rw.splice) + ) + ) + } + + def sources(values: Result[os.Path]*) + (implicit ctx: mill.define.Ctx): Sources = macro sourcesImpl1 + + def sourcesImpl1(c: Context) + (values: c.Expr[Result[os.Path]]*) + (ctx: c.Expr[mill.define.Ctx]): c.Expr[Sources] = { + import c.universe._ + val wrapped = + for (value <- values.toList) + yield Applicative.impl0[Task, PathRef, mill.api.Ctx](c)( + reify(value.splice.map(PathRef(_))).tree + ).tree + + mill.moduledefs.Cacher.impl0[Sources](c)( + reify( + new Sources( + Task.sequence(c.Expr[List[Task[PathRef]]](q"scala.List(..$wrapped)").splice), + ctx.splice + ) + ) + ) + } + + def sources(values: Result[Seq[PathRef]]) + (implicit ctx: mill.define.Ctx): Sources = macro sourcesImpl2 + + def sourcesImpl2(c: Context) + (values: c.Expr[Result[Seq[PathRef]]]) + (ctx: c.Expr[mill.define.Ctx]): c.Expr[Sources] = { + import c.universe._ + + + mill.moduledefs.Cacher.impl0[Sources](c)( + reify( + new Sources( + Applicative.impl0[Task, Seq[PathRef], mill.api.Ctx](c)(values.tree).splice, + ctx.splice + ) + ) + ) + } + def input[T](value: Result[T]) + (implicit rw: RW[T], + ctx: mill.define.Ctx): Input[T] = macro inputImpl[T] + + def inputImpl[T: c.WeakTypeTag](c: Context) + (value: c.Expr[T]) + (rw: c.Expr[RW[T]], + ctx: c.Expr[mill.define.Ctx]): c.Expr[Input[T]] = { + import c.universe._ + + mill.moduledefs.Cacher.impl0[Input[T]](c)( + reify( + new Input[T]( + Applicative.impl[Task, T, mill.api.Ctx](c)(value).splice, + ctx.splice, + rw.splice + ) + ) + ) + } + + def command[T](t: Task[T]) + (implicit ctx: mill.define.Ctx, + w: W[T], + cls: EnclosingClass, + overrides: Overrides): Command[T] = { + new Command(t, ctx, w, cls.value, overrides.value) + } + + def command[T](t: Result[T]) + (implicit w: W[T], + ctx: mill.define.Ctx, + cls: EnclosingClass, + overrides: Overrides): Command[T] = macro commandImpl[T] + + def commandImpl[T: c.WeakTypeTag](c: Context) + (t: c.Expr[T]) + (w: c.Expr[W[T]], + ctx: c.Expr[mill.define.Ctx], + cls: c.Expr[EnclosingClass], + overrides: c.Expr[Overrides]): c.Expr[Command[T]] = { + import c.universe._ + reify( + new Command[T]( + Applicative.impl[Task, T, mill.api.Ctx](c)(t).splice, + ctx.splice, + w.splice, + cls.splice.value, + overrides.splice.value + ) + ) + } + + def worker[T](t: Task[T]) + (implicit ctx: mill.define.Ctx): Worker[T] = new Worker(t, ctx) + + def worker[T](t: Result[T]) + (implicit ctx: mill.define.Ctx): Worker[T] = macro workerImpl[T] + + def workerImpl[T: c.WeakTypeTag](c: Context) + (t: c.Expr[T]) + (ctx: c.Expr[mill.define.Ctx]): c.Expr[Worker[T]] = { + import c.universe._ + reify( + new Worker[T](Applicative.impl[Task, T, mill.api.Ctx](c)(t).splice, ctx.splice) + ) + } + + def task[T](t: Result[T]): Task[T] = macro Applicative.impl[Task, T, mill.api.Ctx] + + def persistent[T](t: Result[T])(implicit rw: RW[T], + ctx: mill.define.Ctx): Persistent[T] = macro persistentImpl[T] + + def persistentImpl[T: c.WeakTypeTag](c: Context) + (t: c.Expr[T]) + (rw: c.Expr[RW[T]], + ctx: c.Expr[mill.define.Ctx]): c.Expr[Persistent[T]] = { + import c.universe._ + + + mill.moduledefs.Cacher.impl0[Persistent[T]](c)( + reify( + new Persistent[T]( + Applicative.impl[Task, T, mill.api.Ctx](c)(t).splice, + ctx.splice, + rw.splice + ) + ) + ) + } + + type TT[+X] = Task[X] + def makeT[X](inputs0: Seq[TT[_]], evaluate0: mill.api.Ctx => Result[X]) = new Task[X] { + val inputs = inputs0 + def evaluate(x: mill.api.Ctx) = evaluate0(x) + } + + def underlying[A](v: Task[A]) = v + def mapCtx[A, B](t: Task[A])(f: (A, mill.api.Ctx) => Result[B]) = t.mapDest(f) + def zip() = new Task.Task0(()) + def zip[A](a: Task[A]) = a.map(Tuple1(_)) + def zip[A, B](a: Task[A], b: Task[B]) = a.zip(b) +} + +abstract class NamedTaskImpl[+T](ctx0: mill.define.Ctx, t: Task[T]) extends NamedTask[T]{ + def evaluate(args: mill.api.Ctx) = args[T](0) + val ctx = ctx0.copy(segments = ctx0.segments ++ Seq(ctx0.segment)) + val inputs = Seq(t) +} + +class TargetImpl[+T](t: Task[T], + ctx0: mill.define.Ctx, + val readWrite: RW[_]) extends NamedTaskImpl[T](ctx0, t) with Target[T] { +} + +class Command[+T](t: Task[T], + ctx0: mill.define.Ctx, + val writer: W[_], + val cls: Class[_], + val overrides: Int) extends NamedTaskImpl[T](ctx0, t) { + override def asCommand = Some(this) +} + +class Worker[+T](t: Task[T], ctx0: mill.define.Ctx) extends NamedTaskImpl[T](ctx0, t) { + override def flushDest = false + override def asWorker = Some(this) +} +class Persistent[+T](t: Task[T], + ctx0: mill.define.Ctx, + readWrite: RW[_]) + extends TargetImpl[T](t, ctx0, readWrite) { + + override def flushDest = false +} +class Input[T](t: Task[T], + ctx0: mill.define.Ctx, + val readWrite: RW[_]) extends NamedTaskImpl[T](ctx0, t) with Target[T]{ + override def sideHash = util.Random.nextInt() +} +class Sources(t: Task[Seq[PathRef]], + ctx0: mill.define.Ctx) extends Input[Seq[PathRef]]( + t, + ctx0, + RW.join( + upickle.default.SeqLikeReader[Seq, PathRef], + upickle.default.SeqLikeWriter[Seq, PathRef] + ) +) +object Task { + + class Task0[T](t: T) extends Task[T]{ + lazy val t0 = t + val inputs = Nil + def evaluate(args: mill.api.Ctx) = t0 + } + + abstract class Ops[+T]{ this: Task[T] => + def map[V](f: T => V) = new Task.Mapped(this, f) + def mapDest[V](f: (T, mill.api.Ctx) => Result[V]) = new Task.MappedDest(this, f) + + def filter(f: T => Boolean) = this + def withFilter(f: T => Boolean) = this + def zip[V](other: Task[V]) = new Task.Zipped(this, other) + + } + + def traverse[T, V](source: Seq[T])(f: T => Task[V]) = { + new Sequence[V](source.map(f)) + } + def sequence[T](source: Seq[Task[T]]) = new Sequence[T](source) + + class Sequence[+T](inputs0: Seq[Task[T]]) extends Task[Seq[T]]{ + val inputs = inputs0 + def evaluate(args: mill.api.Ctx) = { + for (i <- 0 until args.length) + yield args(i).asInstanceOf[T] + } + + } + class Mapped[+T, +V](source: Task[T], f: T => V) extends Task[V]{ + def evaluate(args: mill.api.Ctx) = f(args(0)) + val inputs = List(source) + } + class MappedDest[+T, +V](source: Task[T], f: (T, mill.api.Ctx) => Result[V]) extends Task[V]{ + def evaluate(args: mill.api.Ctx) = f(args(0), args) + val inputs = List(source) + } + class Zipped[+T, +V](source1: Task[T], source2: Task[V]) extends Task[(T, V)]{ + def evaluate(args: mill.api.Ctx) = (args(0), args(1)) + val inputs = List(source1, source2) + } +} diff --git a/main/core/src/eval/Evaluator.scala b/main/core/src/eval/Evaluator.scala new file mode 100644 index 00000000..8709064e --- /dev/null +++ b/main/core/src/eval/Evaluator.scala @@ -0,0 +1,443 @@ +package mill.eval + +import java.net.URLClassLoader + +import scala.collection.JavaConverters._ + +import mill.util.Router.EntryPoint +import ammonite.runtime.SpecialClassLoader +import mill.define.{Ctx => _, _} +import mill.api.Result.OuterStack +import mill.util +import mill.util._ +import mill.util.Strict.Agg + +import scala.collection.mutable +import scala.util.control.NonFatal +case class Labelled[T](task: NamedTask[T], + segments: Segments){ + def format = task match{ + case t: Target[T] => Some(t.readWrite.asInstanceOf[upickle.default.ReadWriter[T]]) + case _ => None + } + def writer = task match{ + case t: mill.define.Command[T] => Some(t.writer.asInstanceOf[upickle.default.Writer[T]]) + case t: Target[T] => Some(t.readWrite.asInstanceOf[upickle.default.ReadWriter[T]]) + case _ => None + } +} + +case class Evaluator(home: os.Path, + outPath: os.Path, + externalOutPath: os.Path, + rootModule: mill.define.BaseModule, + log: Logger, + classLoaderSig: Seq[(Either[String, os.Path], Long)] = Evaluator.classLoaderSig, + workerCache: mutable.Map[Segments, (Int, Any)] = mutable.Map.empty, + env : Map[String, String] = Evaluator.defaultEnv){ + val classLoaderSignHash = classLoaderSig.hashCode() + def evaluate(goals: Agg[Task[_]]): Evaluator.Results = { + os.makeDir.all(outPath) + + val (sortedGroups, transitive) = Evaluator.plan(rootModule, goals) + + val evaluated = new Agg.Mutable[Task[_]] + val results = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]] + + val timings = mutable.ArrayBuffer.empty[(Either[Task[_], Labelled[_]], Int, Boolean)] + for (((terminal, group), i) <- sortedGroups.items().zipWithIndex){ + val startTime = System.currentTimeMillis() + // Increment the counter message by 1 to go from 1/10 to 10/10 instead of 0/10 to 9/10 + val counterMsg = (i+1) + "/" + sortedGroups.keyCount + val (newResults, newEvaluated, cached) = evaluateGroupCached( + terminal, + group, + results, + counterMsg + ) + + for(ev <- newEvaluated){ + evaluated.append(ev) + } + for((k, v) <- newResults) { + results.put(k, v) + } + val endTime = System.currentTimeMillis() + + timings.append((terminal, (endTime - startTime).toInt, cached)) + } + + val failing = new util.MultiBiMap.Mutable[Either[Task[_], Labelled[_]], Result.Failing[_]] + for((k, vs) <- sortedGroups.items()){ + failing.addAll( + k, + vs.items.flatMap(results.get).collect{case f: Result.Failing[_] => f.map(_._1)} + ) + } + os.write.over( + outPath / "mill-profile.json", + upickle.default.write( + timings .map{case (k, v, b) => + Evaluator.Timing(k.fold(_ => null, s => s.segments.render), v, b) + }, + indent = 4 + ) + ) + Evaluator.Results( + goals.indexed.map(results(_).map(_._1)), + evaluated, + transitive, + failing, + timings, + results.map{case (k, v) => (k, v.map(_._1))} + ) + } + + + def evaluateGroupCached(terminal: Either[Task[_], Labelled[_]], + group: Agg[Task[_]], + results: collection.Map[Task[_], Result[(Any, Int)]], + counterMsg: String): (collection.Map[Task[_], Result[(Any, Int)]], Seq[Task[_]], Boolean) = { + + val externalInputsHash = scala.util.hashing.MurmurHash3.orderedHash( + group.items.flatMap(_.inputs).filter(!group.contains(_)) + .flatMap(results(_).asSuccess.map(_.value._2)) + ) + + val sideHashes = scala.util.hashing.MurmurHash3.orderedHash( + group.toIterator.map(_.sideHash) + ) + + val inputsHash = externalInputsHash + sideHashes + classLoaderSignHash + + terminal match{ + case Left(task) => + val (newResults, newEvaluated) = evaluateGroup( + group, + results, + inputsHash, + paths = None, + maybeTargetLabel = None, + counterMsg = counterMsg + ) + (newResults, newEvaluated, false) + case Right(labelledNamedTask) => + + val out = if (!labelledNamedTask.task.ctx.external) outPath + else externalOutPath + + val paths = Evaluator.resolveDestPaths( + out, + destSegments(labelledNamedTask) + ) + + if (!os.exists(paths.out)) os.makeDir.all(paths.out) + val cached = for{ + cached <- + try Some(upickle.default.read[Evaluator.Cached](paths.meta.toIO)) + catch {case e: Throwable => None} + + if cached.inputsHash == inputsHash + reader <- labelledNamedTask.format + parsed <- + try Some(upickle.default.read(cached.value)(reader)) + catch {case e: Throwable => None} + } yield (parsed, cached.valueHash) + + val workerCached = labelledNamedTask.task.asWorker + .flatMap{w => workerCache.get(w.ctx.segments)} + .collect{case (`inputsHash`, v) => v} + + workerCached.map((_, inputsHash)) orElse cached match{ + case Some((v, hashCode)) => + val newResults = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]] + newResults(labelledNamedTask.task) = Result.Success((v, hashCode)) + + (newResults, Nil, true) + + case _ => + val Seq(first, rest @_*) = labelledNamedTask.segments.value + val msgParts = Seq(first.asInstanceOf[Segment.Label].value) ++ rest.map{ + case Segment.Label(s) => "." + s + case Segment.Cross(s) => "[" + s.mkString(",") + "]" + } + + if (labelledNamedTask.task.flushDest) os.remove.all(paths.dest) + + val (newResults, newEvaluated) = evaluateGroup( + group, + results, + inputsHash, + paths = Some(paths), + maybeTargetLabel = Some(msgParts.mkString), + counterMsg = counterMsg + ) + + newResults(labelledNamedTask.task) match{ + case Result.Failure(_, Some((v, hashCode))) => + handleTaskResult(v, v.##, paths.meta, inputsHash, labelledNamedTask) + + case Result.Success((v, hashCode)) => + handleTaskResult(v, v.##, paths.meta, inputsHash, labelledNamedTask) + + case _ => + // Wipe out any cached meta.json file that exists, so + // a following run won't look at the cached metadata file and + // assume it's associated with the possibly-borked state of the + // destPath after an evaluation failure. + os.remove.all(paths.meta) + } + + (newResults, newEvaluated, false) + } + } + } + + def destSegments(labelledTask : Labelled[_]) : Segments = { + import labelledTask.task.ctx + if (ctx.foreign) { + val prefix = "foreign-modules" + // Computing a path in "out" that uniquely reflects the location + // of the foreign module relatively to the current build. + val relative = labelledTask.task + .ctx.millSourcePath + .relativeTo(rootModule.millSourcePath) + // Encoding the number of `/..` + val ups = if (relative.ups > 0) Segments.labels(s"up-${relative.ups}") + else Segments() + Segments.labels(prefix) + .++(ups) + .++(Segments.labels(relative.segments: _*)) + .++(labelledTask.segments.last) + } else labelledTask.segments + } + + + def handleTaskResult(v: Any, + hashCode: Int, + metaPath: os.Path, + inputsHash: Int, + labelledNamedTask: Labelled[_]) = { + labelledNamedTask.task.asWorker match{ + case Some(w) => workerCache(w.ctx.segments) = (inputsHash, v) + case None => + val terminalResult = labelledNamedTask + .writer + .asInstanceOf[Option[upickle.default.Writer[Any]]] + .map(w => upickle.default.writeJs(v)(w) -> v) + + for((json, v) <- terminalResult){ + os.write.over( + metaPath, + upickle.default.write( + Evaluator.Cached(json, hashCode, inputsHash), + indent = 4 + ) + ) + } + } + } + + def evaluateGroup(group: Agg[Task[_]], + results: collection.Map[Task[_], Result[(Any, Int)]], + inputsHash: Int, + paths: Option[Evaluator.Paths], + maybeTargetLabel: Option[String], + counterMsg: String) = { + + + val newEvaluated = mutable.Buffer.empty[Task[_]] + val newResults = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]] + + val nonEvaluatedTargets = group.indexed.filterNot(results.contains) + + maybeTargetLabel.foreach { targetLabel => + val inputResults = for { + target <- nonEvaluatedTargets + item <- target.inputs.filterNot(group.contains) + } yield results(item).map(_._1) + + val logRun = inputResults.forall(_.isInstanceOf[Result.Success[_]]) + + if(logRun) { log.ticker(s"[$counterMsg] $targetLabel ") } + } + + val multiLogger = resolveLogger(paths.map(_.log)) + var usedDest = Option.empty[(Task[_], Array[StackTraceElement])] + for (task <- nonEvaluatedTargets) { + newEvaluated.append(task) + val targetInputValues = task.inputs + .map{x => newResults.getOrElse(x, results(x))} + .collect{ case Result.Success((v, hashCode)) => v } + + val res = + if (targetInputValues.length != task.inputs.length) Result.Skipped + else { + val args = new Ctx( + targetInputValues.toArray[Any], + () => usedDest match{ + case Some((earlierTask, earlierStack)) if earlierTask != task => + val inner = new Exception("Earlier usage of `dest`") + inner.setStackTrace(earlierStack) + throw new Exception( + "`dest` can only be used in one place within each Target[T]", + inner + ) + case _ => + + + paths match{ + case Some(dest) => + if (usedDest.isEmpty) os.makeDir.all(dest.dest) + usedDest = Some((task, new Exception().getStackTrace)) + dest.dest + case None => + throw new Exception("No `dest` folder available here") + } + }, + multiLogger, + home, + env + ) + + val out = System.out + val in = System.in + val err = System.err + try{ + System.setIn(multiLogger.inStream) + System.setErr(multiLogger.errorStream) + System.setOut(multiLogger.outputStream) + Console.withIn(multiLogger.inStream){ + Console.withOut(multiLogger.outputStream){ + Console.withErr(multiLogger.errorStream){ + try task.evaluate(args) + catch { case NonFatal(e) => + Result.Exception(e, new OuterStack(new Exception().getStackTrace)) + } + } + } + } + }finally{ + System.setErr(err) + System.setOut(out) + System.setIn(in) + } + } + + newResults(task) = for(v <- res) yield { + (v, + if (task.isInstanceOf[Worker[_]]) inputsHash + else v.## + ) + } + } + + multiLogger.close() + + (newResults, newEvaluated) + } + + def resolveLogger(logPath: Option[os.Path]): Logger = logPath match{ + case None => log + case Some(path) => MultiLogger(log.colored, log, FileLogger(log.colored, path, debugEnabled = true)) + } +} + + +object Evaluator{ + case class Cached(value: ujson.Value, + valueHash: Int, + inputsHash: Int) + object Cached{ + implicit val rw: upickle.default.ReadWriter[Cached] = upickle.default.macroRW + } + case class State(rootModule: mill.define.BaseModule, + classLoaderSig: Seq[(Either[String, os.Path], Long)], + workerCache: mutable.Map[Segments, (Int, Any)], + watched: Seq[(os.Path, Long)]) + // This needs to be a ThreadLocal because we need to pass it into the body of + // the TargetScopt#read call, which does not accept additional parameters. + // Until we migrate our CLI parsing off of Scopt (so we can pass the BaseModule + // in directly) we are forced to pass it in via a ThreadLocal + val currentEvaluator = new ThreadLocal[mill.eval.Evaluator] + + val defaultEnv: Map[String, String] = System.getenv().asScala.toMap + + case class Paths(out: os.Path, + dest: os.Path, + meta: os.Path, + log: os.Path) + def makeSegmentStrings(segments: Segments) = segments.value.flatMap{ + case Segment.Label(s) => Seq(s) + case Segment.Cross(values) => values.map(_.toString) + } + def resolveDestPaths(workspacePath: os.Path, segments: Segments): Paths = { + val segmentStrings = makeSegmentStrings(segments) + val targetPath = workspacePath / segmentStrings + Paths(targetPath, targetPath / 'dest, targetPath / "meta.json", targetPath / 'log) + } + + // check if the build itself has changed + def classLoaderSig = Thread.currentThread().getContextClassLoader match { + case scl: SpecialClassLoader => scl.classpathSignature + case ucl: URLClassLoader => + SpecialClassLoader.initialClasspathSignature(ucl) + case _ => Nil + } + case class Timing(label: String, + millis: Int, + cached: Boolean) + object Timing{ + implicit val readWrite: upickle.default.ReadWriter[Timing] = upickle.default.macroRW + } + case class Results(rawValues: Seq[Result[Any]], + evaluated: Agg[Task[_]], + transitive: Agg[Task[_]], + failing: MultiBiMap[Either[Task[_], Labelled[_]], Result.Failing[_]], + timings: IndexedSeq[(Either[Task[_], Labelled[_]], Int, Boolean)], + results: collection.Map[Task[_], Result[Any]]){ + def values = rawValues.collect{case Result.Success(v) => v} + } + def plan(rootModule: BaseModule, goals: Agg[Task[_]]) = { + val transitive = Graph.transitiveTargets(goals) + val topoSorted = Graph.topoSorted(transitive) + val sortedGroups = Graph.groupAroundImportantTargets(topoSorted){ + case t: NamedTask[Any] => + val segments = t.ctx.segments + val finalTaskOverrides = t match{ + case t: Target[_] => + rootModule.millInternal.segmentsToTargets.get(segments).fold(0)(_.ctx.overrides) + + case c: mill.define.Command[_] => + def findMatching(cls: Class[_]): Option[Seq[(Int, EntryPoint[_])]] = { + rootModule.millDiscover.value.get(cls) match{ + case Some(v) => Some(v) + case None => + cls.getSuperclass match{ + case null => None + case superCls => findMatching(superCls) + } + } + } + + findMatching(c.cls) match{ + case Some(v) => + v.find(_._2.name == c.ctx.segment.pathSegments.head).get._1 + // For now we don't properly support overrides for external modules + // that do not appear in the Evaluator's main Discovered listing + case None => 0 + } + + case c: mill.define.Worker[_] => 0 + } + + val additional = + if (finalTaskOverrides == t.ctx.overrides) Nil + else Seq(Segment.Label("overriden")) ++ t.ctx.enclosing.split("\\.|#| ").map(Segment.Label) + + Right(Labelled(t, segments ++ additional)) + case t if goals.contains(t) => Left(t) + } + (sortedGroups, transitive) + } +} diff --git a/main/core/src/eval/Tarjans.scala b/main/core/src/eval/Tarjans.scala new file mode 100644 index 00000000..ade335a9 --- /dev/null +++ b/main/core/src/eval/Tarjans.scala @@ -0,0 +1,51 @@ +package mill.eval + +import scala.collection.mutable + +// Adapted from +// https://github.com/indy256/codelibrary/blob/c52247216258e84aac442a23273b7d8306ef757b/java/src/SCCTarjan.java +object Tarjans { + def apply(graph0: TraversableOnce[TraversableOnce[Int]]): Seq[Seq[Int]] = { + val graph = graph0.map(_.toArray).toArray + val n = graph.length + val visited = new Array[Boolean](n) + val stack = mutable.ArrayBuffer.empty[Integer] + var time = 0 + val lowlink = new Array[Int](n) + val components = mutable.ArrayBuffer.empty[Seq[Int]] + + + for (u <- 0 until n) { + if (!visited(u)) dfs(u) + } + + def dfs(u: Int): Unit = { + lowlink(u) = time + time += 1 + visited(u) = true + stack.append(u) + var isComponentRoot = true + for (v <- graph(u)) { + if (!visited(v)) dfs(v) + if (lowlink(u) > lowlink(v)) { + lowlink(u) = lowlink(v) + isComponentRoot = false + } + } + if (isComponentRoot) { + val component = mutable.Buffer.empty[Int] + + var done = false + while (!done) { + val x = stack.last + stack.remove(stack.length - 1) + component.append(x) + lowlink(x) = Integer.MAX_VALUE + if (x == u) done = true + } + components.append(component) + } + } + components + } +} diff --git a/main/core/src/eval/package.scala b/main/core/src/eval/package.scala new file mode 100644 index 00000000..433f9074 --- /dev/null +++ b/main/core/src/eval/package.scala @@ -0,0 +1,12 @@ +package mill + +package object eval { + // Backwards compatibility forwarders + val Result = mill.api.Result + type Result[+T] = mill.api.Result[T] + + val PathRef = mill.api.PathRef + type PathRef = mill.api.PathRef + + type Logger = mill.api.Logger +} diff --git a/main/core/src/mill/define/Applicative.scala b/main/core/src/mill/define/Applicative.scala deleted file mode 100644 index 5e63b1cc..00000000 --- a/main/core/src/mill/define/Applicative.scala +++ /dev/null @@ -1,108 +0,0 @@ -package mill.define - -import scala.annotation.{StaticAnnotation, compileTimeOnly} -import scala.language.higherKinds -import scala.reflect.macros.blackbox.Context - -/** - * A generic Applicative-functor macro: translates calls to - * - * Applier.apply{ ... applyable1.apply() ... applyable2.apply() ... } - * - * into - * - * Applier.zipMap(applyable1, applyable2){ (a1, a2, ctx) => ... a1 ... a2 ... } - */ -object Applicative { - trait ApplyHandler[M[+_]]{ - def apply[T](t: M[T]): T - } - object ApplyHandler{ - @compileTimeOnly("Target#apply() can only be used with a T{...} block") - implicit def defaultApplyHandler[M[+_]]: ApplyHandler[M] = ??? - } - trait Applyable[M[+_], +T]{ - def self: M[T] - def apply()(implicit handler: ApplyHandler[M]): T = handler(self) - } - - type Id[+T] = T - - trait Applyer[W[_], T[_], Z[_], Ctx] extends ApplyerGenerated[T, Z, Ctx] { - def ctx()(implicit c: Ctx) = c - def underlying[A](v: W[A]): T[_] - - def zipMap[R]()(cb: Ctx => Z[R]) = mapCtx(zip()){ (_, ctx) => cb(ctx)} - def zipMap[A, R](a: T[A])(f: (A, Ctx) => Z[R]) = mapCtx(a)(f) - def zip(): T[Unit] - def zip[A](a: T[A]): T[Tuple1[A]] - } - - def impl[M[_], T: c.WeakTypeTag, Ctx: c.WeakTypeTag](c: Context) - (t: c.Expr[T]): c.Expr[M[T]] = { - impl0(c)(t.tree)(implicitly[c.WeakTypeTag[T]], implicitly[c.WeakTypeTag[Ctx]]) - } - def impl0[M[_], T: c.WeakTypeTag, Ctx: c.WeakTypeTag](c: Context) - (t: c.Tree): c.Expr[M[T]] = { - import c.universe._ - def rec(t: Tree): Iterator[c.Tree] = Iterator(t) ++ t.children.flatMap(rec(_)) - - val bound = collection.mutable.Buffer.empty[(c.Tree, ValDef)] - val targetApplySym = typeOf[Applyable[Nothing, _]].member(TermName("apply")) - - // Derived from @olafurpg's - // https://gist.github.com/olafurpg/596d62f87bf3360a29488b725fbc7608 - val defs = rec(t).filter(_.isDef).map(_.symbol).toSet - - val ctxName = TermName(c.freshName("ctx")) - val ctxSym = c.internal.newTermSymbol(c.internal.enclosingOwner, ctxName) - c.internal.setInfo(ctxSym, weakTypeOf[Ctx]) - - val transformed = c.internal.typingTransform(t) { - case (t @ q"$fun.apply()($handler)", api) if t.symbol == targetApplySym => - - val localDefs = rec(fun).filter(_.isDef).map(_.symbol).toSet - val banned = rec(t).filter(x => defs(x.symbol) && !localDefs(x.symbol)) - - if (banned.hasNext){ - val banned0 = banned.next() - c.abort( - banned0.pos, - "Target#apply() call cannot use `" + banned0.symbol + "` defined within the T{...} block" - ) - } - val tempName = c.freshName(TermName("tmp")) - val tempSym = c.internal.newTermSymbol(c.internal.enclosingOwner, tempName) - c.internal.setInfo(tempSym, t.tpe) - val tempIdent = Ident(tempSym) - c.internal.setType(tempIdent, t.tpe) - c.internal.setFlag(tempSym, (1L << 44).asInstanceOf[c.universe.FlagSet]) - bound.append((q"${c.prefix}.underlying($fun)", c.internal.valDef(tempSym))) - tempIdent - case (t, api) - if t.symbol != null - && t.symbol.annotations.exists(_.tree.tpe =:= typeOf[mill.api.Ctx.ImplicitStub]) => - - val tempIdent = Ident(ctxSym) - c.internal.setType(tempIdent, t.tpe) - c.internal.setFlag(ctxSym, (1L << 44).asInstanceOf[c.universe.FlagSet]) - tempIdent - - case (t, api) => api.default(t) - } - - val (exprs, bindings) = bound.unzip - - - val ctxBinding = c.internal.valDef(ctxSym) - - val callback = c.typecheck(q"(..$bindings, $ctxBinding) => $transformed ") - - val res = q"${c.prefix}.zipMap(..$exprs){ $callback }" - - c.internal.changeOwner(transformed, c.internal.enclosingOwner, callback.symbol) - - c.Expr[M[T]](res) - } - -} diff --git a/main/core/src/mill/define/BaseModule.scala b/main/core/src/mill/define/BaseModule.scala deleted file mode 100644 index cd79f73e..00000000 --- a/main/core/src/mill/define/BaseModule.scala +++ /dev/null @@ -1,56 +0,0 @@ -package mill.define - - -object BaseModule{ - case class Implicit(value: BaseModule) -} - -abstract class BaseModule(millSourcePath0: os.Path, - external0: Boolean = false, - foreign0 : Boolean = false) - (implicit millModuleEnclosing0: sourcecode.Enclosing, - millModuleLine0: sourcecode.Line, - millName0: sourcecode.Name, - millFile0: sourcecode.File, - caller: Caller) - extends Module()( - mill.define.Ctx.make( - implicitly, - implicitly, - implicitly, - BasePath(millSourcePath0), - Segments(), - mill.util.Router.Overrides(0), - Ctx.External(external0), - Ctx.Foreign(foreign0), - millFile0, - caller - ) - ){ - // A BaseModule should provide an empty Segments list to it's children, since - // it is the root of the module tree, and thus must not include it's own - // sourcecode.Name as part of the list, - override implicit def millModuleSegments: Segments = Segments() - override def millSourcePath = millOuterCtx.millSourcePath - override implicit def millModuleBasePath: BasePath = BasePath(millSourcePath) - implicit def millImplicitBaseModule: BaseModule.Implicit = BaseModule.Implicit(this) - def millDiscover: Discover[this.type] -} - - -abstract class ExternalModule(implicit millModuleEnclosing0: sourcecode.Enclosing, - millModuleLine0: sourcecode.Line, - millName0: sourcecode.Name) - extends BaseModule(ammonite.ops.pwd, external0 = true, foreign0 = false)( - implicitly, implicitly, implicitly, implicitly, Caller(()) - ){ - - implicit def millDiscoverImplicit: Discover[_] = millDiscover - assert( - !" #".exists(millModuleEnclosing0.value.contains(_)), - "External modules must be at a top-level static path, not " + millModuleEnclosing0.value - ) - override implicit def millModuleSegments = { - Segments(millModuleEnclosing0.value.split('.').map(Segment.Label):_*) - } -} diff --git a/main/core/src/mill/define/Caller.scala b/main/core/src/mill/define/Caller.scala deleted file mode 100644 index 6d2d4d1d..00000000 --- a/main/core/src/mill/define/Caller.scala +++ /dev/null @@ -1,13 +0,0 @@ -package mill.define - -import sourcecode.Compat.Context -import language.experimental.macros -case class Caller(value: Any) -object Caller { - def apply()(implicit c: Caller) = c.value - implicit def generate: Caller = macro impl - def impl(c: Context): c.Tree = { - import c.universe._ - q"new _root_.mill.define.Caller(this)" - } -} \ No newline at end of file diff --git a/main/core/src/mill/define/Cross.scala b/main/core/src/mill/define/Cross.scala deleted file mode 100644 index aa730e0d..00000000 --- a/main/core/src/mill/define/Cross.scala +++ /dev/null @@ -1,90 +0,0 @@ -package mill.define -import language.experimental.macros -import scala.reflect.macros.blackbox - - -object Cross{ - case class Factory[T](make: (Product, mill.define.Ctx) => T) - - object Factory{ - implicit def make[T]: Factory[T] = macro makeImpl[T] - def makeImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Factory[T]] = { - import c.universe._ - val tpe = weakTypeOf[T] - - val primaryConstructorArgs = - tpe.typeSymbol.asClass.primaryConstructor.typeSignature.paramLists.head - - val argTupleValues = - for((a, n) <- primaryConstructorArgs.zipWithIndex) - yield q"v.productElement($n).asInstanceOf[${a.info}]" - - val instance = c.Expr[(Product, mill.define.Ctx) => T]( - q"{ (v, ctx0) => new $tpe(..$argTupleValues){ override def millOuterCtx = ctx0 } }" - ) - - reify { mill.define.Cross.Factory[T](instance.splice) } - } - } - - trait Resolver[-T]{ - def resolve[V <: T](c: Cross[V]): V - } -} - -/** - * Models "cross-builds": sets of duplicate builds which differ only in the - * value of one or more "case" variables whose values are determined at runtime. - * Used via: - * - * object foo extends Cross[FooModule]("bar", "baz", "qux") - * class FooModule(v: String) extends Module{ - * ... - * } - */ -class Cross[T](cases: Any*) - (implicit ci: Cross.Factory[T], - ctx: mill.define.Ctx) extends mill.define.Module()(ctx) { - - override lazy val millModuleDirectChildren = - this.millInternal.reflectNestedObjects[Module] ++ - items.collect{case (k, v: mill.define.Module) => v} - - val items = for(c0 <- cases.toList) yield{ - val c = c0 match{ - case p: Product => p - case v => Tuple1(v) - } - val crossValues = c.productIterator.toList - val relPath = ctx.segment.pathSegments - val sub = ci.make( - c, - ctx.copy( - segments = ctx.segments ++ Seq(ctx.segment), - millSourcePath = ctx.millSourcePath / relPath, - segment = Segment.Cross(crossValues) - ) - ) - (crossValues, sub) - } - val itemMap = items.toMap - - /** - * Fetch the cross module corresponding to the given cross values - */ - def get(args: Seq[Any]) = itemMap(args.toList) - - /** - * Fetch the cross module corresponding to the given cross values - */ - def apply(arg0: Any, args: Any*) = itemMap(arg0 :: args.toList) - - /** - * Fetch the relevant cross module given the implicit resolver you have in - * scope. This is often the first cross module whose cross-version is - * compatible with the current module. - */ - def apply[V >: T]()(implicit resolver: Cross.Resolver[V]): T = { - resolver.resolve(this.asInstanceOf[Cross[V]]).asInstanceOf[T] - } -} \ No newline at end of file diff --git a/main/core/src/mill/define/Ctx.scala b/main/core/src/mill/define/Ctx.scala deleted file mode 100644 index c21e53b4..00000000 --- a/main/core/src/mill/define/Ctx.scala +++ /dev/null @@ -1,100 +0,0 @@ -package mill.define - - -import scala.annotation.implicitNotFound - -sealed trait Segment{ - def pathSegments: Seq[String] = this match{ - case Segment.Label(s) => List(s) - case Segment.Cross(vs) => vs.map(_.toString) - } -} -object Segment{ - case class Label(value: String) extends Segment{ - assert(!value.contains('.')) - } - case class Cross(value: Seq[Any]) extends Segment -} - -case class BasePath(value: os.Path) - - -/** - * Models a path with the Mill build hierarchy, e.g. - * - * amm.util[2.11].test.compile - * - * .-separated segments are [[Segment.Label]]s, while []-delimited - * segments are [[Segment.Cross]]s - */ -case class Segments(value: Segment*){ - def ++(other: Seq[Segment]): Segments = Segments(value ++ other:_*) - def ++(other: Segments): Segments = Segments(value ++ other.value:_*) - def parts = value.toList match { - case Nil => Nil - case Segment.Label(head) :: rest => - val stringSegments = rest.flatMap{ - case Segment.Label(s) => Seq(s) - case Segment.Cross(vs) => vs.map(_.toString) - } - head +: stringSegments - } - def last : Segments = Segments(value.last) - def render = value.toList match { - case Nil => "" - case Segment.Label(head) :: rest => - val stringSegments = rest.map{ - case Segment.Label(s) => "." + s - case Segment.Cross(vs) => "[" + vs.mkString(",") + "]" - } - head + stringSegments.mkString - } -} - -object Segments { - - def labels(values : String*) : Segments = - Segments(values.map(Segment.Label):_*) - -} - -@implicitNotFound("Modules, Targets and Commands can only be defined within a mill Module") -case class Ctx(enclosing: String, - lineNum: Int, - segment: Segment, - millSourcePath: os.Path, - segments: Segments, - overrides: Int, - external: Boolean, - foreign: Boolean, - fileName: String, - enclosingCls: Class[_]){ -} - -object Ctx{ - case class External(value: Boolean) - case class Foreign(value : Boolean) - implicit def make(implicit millModuleEnclosing0: sourcecode.Enclosing, - millModuleLine0: sourcecode.Line, - millName0: sourcecode.Name, - millModuleBasePath0: BasePath, - segments0: Segments, - overrides0: mill.util.Router.Overrides, - external0: External, - foreign0: Foreign, - fileName: sourcecode.File, - enclosing: Caller): Ctx = { - Ctx( - millModuleEnclosing0.value, - millModuleLine0.value, - Segment.Label(millName0.value), - millModuleBasePath0.value, - segments0, - overrides0.value, - external0.value, - foreign0.value, - fileName.value, - enclosing.value.getClass - ) - } -} diff --git a/main/core/src/mill/define/Discover.scala b/main/core/src/mill/define/Discover.scala deleted file mode 100644 index f0c668e6..00000000 --- a/main/core/src/mill/define/Discover.scala +++ /dev/null @@ -1,89 +0,0 @@ -package mill.define -import mill.util.Router.EntryPoint - -import language.experimental.macros -import sourcecode.Compat.Context - -import scala.collection.mutable -import scala.reflect.macros.blackbox - - - -case class Discover[T](value: Map[Class[_], Seq[(Int, EntryPoint[_])]]) -object Discover { - def apply[T]: Discover[T] = macro applyImpl[T] - - def applyImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Discover[T]] = { - import c.universe._ - import compat._ - val seen = mutable.Set.empty[Type] - def rec(tpe: Type): Unit = { - if (!seen(tpe)){ - seen.add(tpe) - for{ - m <- tpe.members - memberTpe = m.typeSignature - if memberTpe.resultType <:< typeOf[mill.define.Module] && memberTpe.paramLists.isEmpty - } rec(memberTpe.resultType) - - if (tpe <:< typeOf[mill.define.Cross[_]]){ - val inner = typeOf[Cross[_]] - .typeSymbol - .asClass - .typeParams - .head - .asType - .toType - .asSeenFrom(tpe, typeOf[Cross[_]].typeSymbol) - - rec(inner) - } - } - } - rec(weakTypeOf[T]) - - def assertParamListCounts(methods: Iterable[router.c.universe.MethodSymbol], - cases: (c.Type, Int, String)*) = { - for (m <- methods.toList){ - for ((tt, n, label) <- cases){ - if (m.returnType <:< tt.asInstanceOf[router.c.Type] && - m.paramLists.length != n){ - c.abort( - m.pos.asInstanceOf[c.Position], - s"$label definitions must have $n parameter list" + (if (n == 1) "" else "s") - ) - } - } - } - } - val router = new mill.util.Router(c) - val mapping = for{ - discoveredModuleType <- seen - val curCls = discoveredModuleType.asInstanceOf[router.c.Type] - val methods = router.getValsOrMeths(curCls) - val overridesRoutes = { - assertParamListCounts( - methods, - (weakTypeOf[mill.define.Sources], 0, "`T.sources`"), - (weakTypeOf[mill.define.Input[_]], 0, "`T.input`"), - (weakTypeOf[mill.define.Persistent[_]], 0, "`T.persistent`"), - (weakTypeOf[mill.define.Target[_]], 0, "`T{...}`"), - (weakTypeOf[mill.define.Command[_]], 1, "`T.command`") - ) - - for{ - m <- methods.toList - if m.returnType <:< weakTypeOf[mill.define.Command[_]].asInstanceOf[router.c.Type] - } yield (m.overrides.length, router.extractMethod(m, curCls).asInstanceOf[c.Tree]) - - } - if overridesRoutes.nonEmpty - } yield { - val lhs = q"classOf[${discoveredModuleType.typeSymbol.asClass}]" - val rhs = q"scala.Seq[(Int, mill.util.Router.EntryPoint[_])](..$overridesRoutes)" - q"$lhs -> $rhs" - } - - c.Expr[Discover[T]](q"mill.define.Discover(scala.collection.immutable.Map(..$mapping))") - } -} diff --git a/main/core/src/mill/define/Graph.scala b/main/core/src/mill/define/Graph.scala deleted file mode 100644 index 3119f2fb..00000000 --- a/main/core/src/mill/define/Graph.scala +++ /dev/null @@ -1,72 +0,0 @@ -package mill.define - -import mill.eval.Tarjans -import mill.util.MultiBiMap -import mill.util.Strict.Agg - -object Graph { - - /** - * The `values` [[Agg]] is guaranteed to be topological sorted and cycle free. - * That's why the constructor is package private. - * @see [[Graph.topoSorted]] - */ - class TopoSorted private[Graph] (val values: Agg[Task[_]]) - - def groupAroundImportantTargets[T](topoSortedTargets: TopoSorted) - (important: PartialFunction[Task[_], T]): MultiBiMap[T, Task[_]] = { - - val output = new MultiBiMap.Mutable[T, Task[_]]() - for ((target, t) <- topoSortedTargets.values.flatMap(t => important.lift(t).map((t, _)))) { - - val transitiveTargets = new Agg.Mutable[Task[_]] - def rec(t: Task[_]): Unit = { - if (transitiveTargets.contains(t)) () // do nothing - else if (important.isDefinedAt(t) && t != target) () // do nothing - else { - transitiveTargets.append(t) - t.inputs.foreach(rec) - } - } - rec(target) - output.addAll(t, topoSorted(transitiveTargets).values) - } - output - } - - /** - * Collects all transitive dependencies (targets) of the given targets, - * including the given targets. - */ - def transitiveTargets(sourceTargets: Agg[Task[_]]): Agg[Task[_]] = { - val transitiveTargets = new Agg.Mutable[Task[_]] - def rec(t: Task[_]): Unit = { - if (transitiveTargets.contains(t)) () // do nothing - else { - transitiveTargets.append(t) - t.inputs.foreach(rec) - } - } - - sourceTargets.items.foreach(rec) - transitiveTargets - } - /** - * Takes the given targets, finds all the targets they transitively depend - * on, and sort them topologically. Fails if there are dependency cycles - */ - def topoSorted(transitiveTargets: Agg[Task[_]]): TopoSorted = { - - val indexed = transitiveTargets.indexed - val targetIndices = indexed.zipWithIndex.toMap - - val numberedEdges = - for(t <- transitiveTargets.items) - yield t.inputs.collect(targetIndices) - - val sortedClusters = Tarjans(numberedEdges) - val nonTrivialClusters = sortedClusters.filter(_.length > 1) - assert(nonTrivialClusters.isEmpty, nonTrivialClusters) - new TopoSorted(Agg.from(sortedClusters.flatten.map(indexed))) - } -} diff --git a/main/core/src/mill/define/Module.scala b/main/core/src/mill/define/Module.scala deleted file mode 100644 index a8fc5be7..00000000 --- a/main/core/src/mill/define/Module.scala +++ /dev/null @@ -1,96 +0,0 @@ -package mill.define - -import java.lang.reflect.Modifier - -import mill.util.ParseArgs - -import scala.language.experimental.macros -import scala.reflect.ClassTag -import scala.reflect.NameTransformer.decode - - -/** - * `Module` is a class meant to be extended by `trait`s *only*, in order to - * propagate the implicit parameters forward to the final concrete - * instantiation site so they can capture the enclosing/line information of - * the concrete instance. - */ -class Module(implicit outerCtx0: mill.define.Ctx) - extends mill.moduledefs.Cacher{ outer => - - /** - * Miscellaneous machinery around traversing & querying the build hierarchy, - * that should not be needed by normal users of Mill - */ - object millInternal extends Module.Internal(this) - - lazy val millModuleDirectChildren = millInternal.reflectNestedObjects[Module].toSeq - def millOuterCtx = outerCtx0 - def millSourcePath: os.Path = millOuterCtx.millSourcePath / millOuterCtx.segment.pathSegments - implicit def millModuleExternal: Ctx.External = Ctx.External(millOuterCtx.external) - implicit def millModuleShared: Ctx.Foreign = Ctx.Foreign(millOuterCtx.foreign) - implicit def millModuleBasePath: BasePath = BasePath(millSourcePath) - implicit def millModuleSegments: Segments = { - millOuterCtx.segments ++ Seq(millOuterCtx.segment) - } - override def toString = millModuleSegments.render -} - -object Module{ - class Internal(outer: Module){ - def traverse[T](f: Module => Seq[T]): Seq[T] = { - def rec(m: Module): Seq[T] = f(m) ++ m.millModuleDirectChildren.flatMap(rec) - rec(outer) - } - - lazy val modules = traverse(Seq(_)) - lazy val segmentsToModules = modules.map(m => (m.millModuleSegments, m)).toMap - - lazy val targets = traverse{_.millInternal.reflectAll[Target[_]]}.toSet - - lazy val segmentsToTargets = targets - .map(t => (t.ctx.segments, t)) - .toMap - - // Ensure we do not propagate the implicit parameters as implicits within - // the body of any inheriting class/trait/objects, as it would screw up any - // one else trying to use sourcecode.{Enclosing,Line} to capture debug info - lazy val millModuleEnclosing = outer.millOuterCtx.enclosing - lazy val millModuleLine = outer.millOuterCtx.lineNum - - private def reflect[T: ClassTag](filter: (String) => Boolean): Array[T] = { - val runtimeCls = implicitly[ClassTag[T]].runtimeClass - for{ - m <- outer.getClass.getMethods.sortBy(_.getName) - n = decode(m.getName) - if - filter(n) && - ParseArgs.isLegalIdentifier(n) && - m.getParameterCount == 0 && - (m.getModifiers & Modifier.STATIC) == 0 && - (m.getModifiers & Modifier.ABSTRACT) == 0 && - runtimeCls.isAssignableFrom(m.getReturnType) - } yield m.invoke(outer).asInstanceOf[T] - } - - def reflectAll[T: ClassTag]: Array[T] = reflect(Function.const(true)) - - def reflectSingle[T: ClassTag](label: String): Option[T] = reflect(_ == label).headOption - - // For some reason, this fails to pick up concrete `object`s nested directly within - // another top-level concrete `object`. This is fine for now, since Mill's Ammonite - // script/REPL runner always wraps user code in a wrapper object/trait - def reflectNestedObjects[T: ClassTag] = { - (reflectAll[T] ++ - outer - .getClass - .getClasses - .filter(implicitly[ClassTag[T]].runtimeClass isAssignableFrom _) - .flatMap(c => c.getFields.find(_.getName == "MODULE$").map(_.get(c).asInstanceOf[T])) - ).distinct - } - } -} -trait TaskModule extends Module { - def defaultCommandName(): String -} diff --git a/main/core/src/mill/define/Task.scala b/main/core/src/mill/define/Task.scala deleted file mode 100644 index a464bf18..00000000 --- a/main/core/src/mill/define/Task.scala +++ /dev/null @@ -1,344 +0,0 @@ -package mill.define - -import ammonite.main.Router.Overrides -import mill.define.Applicative.Applyable -import mill.eval.{PathRef, Result} -import mill.util.EnclosingClass -import sourcecode.Compat.Context -import upickle.default.{ReadWriter => RW, Reader => R, Writer => W} - -import scala.language.experimental.macros -import scala.reflect.macros.blackbox.Context - - -/** - * Models a single node in the Mill build graph, with a list of inputs and a - * single output of type [[T]]. - * - * Generally not instantiated manually, but instead constructed via the - * [[Target.apply]] & similar macros. - */ -abstract class Task[+T] extends Task.Ops[T] with Applyable[Task, T]{ - /** - * What other Targets does this Target depend on? - */ - val inputs: Seq[Task[_]] - - /** - * Evaluate this target - */ - def evaluate(args: mill.api.Ctx): Result[T] - - /** - * Even if this target's inputs did not change, does it need to re-evaluate - * anyway? - */ - def sideHash: Int = 0 - - def flushDest: Boolean = true - - def asTarget: Option[Target[T]] = None - def asCommand: Option[Command[T]] = None - def asWorker: Option[Worker[T]] = None - def self = this -} - -trait NamedTask[+T] extends Task[T]{ - def ctx: mill.define.Ctx - def label = ctx.segment match{case Segment.Label(v) => v} - override def toString = ctx.segments.render -} -trait Target[+T] extends NamedTask[T]{ - override def asTarget = Some(this) - def readWrite: RW[_] -} - -object Target extends TargetGenerated with Applicative.Applyer[Task, Task, Result, mill.api.Ctx] { - - implicit def apply[T](t: T) - (implicit rw: RW[T], - ctx: mill.define.Ctx): Target[T] = macro targetImpl[T] - - def targetImpl[T: c.WeakTypeTag](c: Context) - (t: c.Expr[T]) - (rw: c.Expr[RW[T]], - ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = { - import c.universe._ - val lhs = Applicative.impl0[Task, T, mill.api.Ctx](c)(reify(Result.Success(t.splice)).tree) - - mill.moduledefs.Cacher.impl0[TargetImpl[T]](c)( - reify( - new TargetImpl[T](lhs.splice, ctx.splice, rw.splice) - ) - ) - } - - implicit def apply[T](t: Result[T]) - (implicit rw: RW[T], - ctx: mill.define.Ctx): Target[T] = macro targetResultImpl[T] - - def targetResultImpl[T: c.WeakTypeTag](c: Context) - (t: c.Expr[Result[T]]) - (rw: c.Expr[RW[T]], - ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = { - import c.universe._ - mill.moduledefs.Cacher.impl0[Target[T]](c)( - reify( - new TargetImpl[T]( - Applicative.impl0[Task, T, mill.api.Ctx](c)(t.tree).splice, - ctx.splice, - rw.splice - ) - ) - ) - } - - def apply[T](t: Task[T]) - (implicit rw: RW[T], - ctx: mill.define.Ctx): Target[T] = macro targetTaskImpl[T] - - def targetTaskImpl[T: c.WeakTypeTag](c: Context) - (t: c.Expr[Task[T]]) - (rw: c.Expr[RW[T]], - ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = { - import c.universe._ - mill.moduledefs.Cacher.impl0[Target[T]](c)( - reify( - new TargetImpl[T](t.splice, ctx.splice, rw.splice) - ) - ) - } - - def sources(values: Result[os.Path]*) - (implicit ctx: mill.define.Ctx): Sources = macro sourcesImpl1 - - def sourcesImpl1(c: Context) - (values: c.Expr[Result[os.Path]]*) - (ctx: c.Expr[mill.define.Ctx]): c.Expr[Sources] = { - import c.universe._ - val wrapped = - for (value <- values.toList) - yield Applicative.impl0[Task, PathRef, mill.api.Ctx](c)( - reify(value.splice.map(PathRef(_))).tree - ).tree - - mill.moduledefs.Cacher.impl0[Sources](c)( - reify( - new Sources( - Task.sequence(c.Expr[List[Task[PathRef]]](q"scala.List(..$wrapped)").splice), - ctx.splice - ) - ) - ) - } - - def sources(values: Result[Seq[PathRef]]) - (implicit ctx: mill.define.Ctx): Sources = macro sourcesImpl2 - - def sourcesImpl2(c: Context) - (values: c.Expr[Result[Seq[PathRef]]]) - (ctx: c.Expr[mill.define.Ctx]): c.Expr[Sources] = { - import c.universe._ - - - mill.moduledefs.Cacher.impl0[Sources](c)( - reify( - new Sources( - Applicative.impl0[Task, Seq[PathRef], mill.api.Ctx](c)(values.tree).splice, - ctx.splice - ) - ) - ) - } - def input[T](value: Result[T]) - (implicit rw: RW[T], - ctx: mill.define.Ctx): Input[T] = macro inputImpl[T] - - def inputImpl[T: c.WeakTypeTag](c: Context) - (value: c.Expr[T]) - (rw: c.Expr[RW[T]], - ctx: c.Expr[mill.define.Ctx]): c.Expr[Input[T]] = { - import c.universe._ - - mill.moduledefs.Cacher.impl0[Input[T]](c)( - reify( - new Input[T]( - Applicative.impl[Task, T, mill.api.Ctx](c)(value).splice, - ctx.splice, - rw.splice - ) - ) - ) - } - - def command[T](t: Task[T]) - (implicit ctx: mill.define.Ctx, - w: W[T], - cls: EnclosingClass, - overrides: Overrides): Command[T] = { - new Command(t, ctx, w, cls.value, overrides.value) - } - - def command[T](t: Result[T]) - (implicit w: W[T], - ctx: mill.define.Ctx, - cls: EnclosingClass, - overrides: Overrides): Command[T] = macro commandImpl[T] - - def commandImpl[T: c.WeakTypeTag](c: Context) - (t: c.Expr[T]) - (w: c.Expr[W[T]], - ctx: c.Expr[mill.define.Ctx], - cls: c.Expr[EnclosingClass], - overrides: c.Expr[Overrides]): c.Expr[Command[T]] = { - import c.universe._ - reify( - new Command[T]( - Applicative.impl[Task, T, mill.api.Ctx](c)(t).splice, - ctx.splice, - w.splice, - cls.splice.value, - overrides.splice.value - ) - ) - } - - def worker[T](t: Task[T]) - (implicit ctx: mill.define.Ctx): Worker[T] = new Worker(t, ctx) - - def worker[T](t: Result[T]) - (implicit ctx: mill.define.Ctx): Worker[T] = macro workerImpl[T] - - def workerImpl[T: c.WeakTypeTag](c: Context) - (t: c.Expr[T]) - (ctx: c.Expr[mill.define.Ctx]): c.Expr[Worker[T]] = { - import c.universe._ - reify( - new Worker[T](Applicative.impl[Task, T, mill.api.Ctx](c)(t).splice, ctx.splice) - ) - } - - def task[T](t: Result[T]): Task[T] = macro Applicative.impl[Task, T, mill.api.Ctx] - - def persistent[T](t: Result[T])(implicit rw: RW[T], - ctx: mill.define.Ctx): Persistent[T] = macro persistentImpl[T] - - def persistentImpl[T: c.WeakTypeTag](c: Context) - (t: c.Expr[T]) - (rw: c.Expr[RW[T]], - ctx: c.Expr[mill.define.Ctx]): c.Expr[Persistent[T]] = { - import c.universe._ - - - mill.moduledefs.Cacher.impl0[Persistent[T]](c)( - reify( - new Persistent[T]( - Applicative.impl[Task, T, mill.api.Ctx](c)(t).splice, - ctx.splice, - rw.splice - ) - ) - ) - } - - type TT[+X] = Task[X] - def makeT[X](inputs0: Seq[TT[_]], evaluate0: mill.api.Ctx => Result[X]) = new Task[X] { - val inputs = inputs0 - def evaluate(x: mill.api.Ctx) = evaluate0(x) - } - - def underlying[A](v: Task[A]) = v - def mapCtx[A, B](t: Task[A])(f: (A, mill.api.Ctx) => Result[B]) = t.mapDest(f) - def zip() = new Task.Task0(()) - def zip[A](a: Task[A]) = a.map(Tuple1(_)) - def zip[A, B](a: Task[A], b: Task[B]) = a.zip(b) -} - -abstract class NamedTaskImpl[+T](ctx0: mill.define.Ctx, t: Task[T]) extends NamedTask[T]{ - def evaluate(args: mill.api.Ctx) = args[T](0) - val ctx = ctx0.copy(segments = ctx0.segments ++ Seq(ctx0.segment)) - val inputs = Seq(t) -} - -class TargetImpl[+T](t: Task[T], - ctx0: mill.define.Ctx, - val readWrite: RW[_]) extends NamedTaskImpl[T](ctx0, t) with Target[T] { -} - -class Command[+T](t: Task[T], - ctx0: mill.define.Ctx, - val writer: W[_], - val cls: Class[_], - val overrides: Int) extends NamedTaskImpl[T](ctx0, t) { - override def asCommand = Some(this) -} - -class Worker[+T](t: Task[T], ctx0: mill.define.Ctx) extends NamedTaskImpl[T](ctx0, t) { - override def flushDest = false - override def asWorker = Some(this) -} -class Persistent[+T](t: Task[T], - ctx0: mill.define.Ctx, - readWrite: RW[_]) - extends TargetImpl[T](t, ctx0, readWrite) { - - override def flushDest = false -} -class Input[T](t: Task[T], - ctx0: mill.define.Ctx, - val readWrite: RW[_]) extends NamedTaskImpl[T](ctx0, t) with Target[T]{ - override def sideHash = util.Random.nextInt() -} -class Sources(t: Task[Seq[PathRef]], - ctx0: mill.define.Ctx) extends Input[Seq[PathRef]]( - t, - ctx0, - RW.join( - upickle.default.SeqLikeReader[Seq, PathRef], - upickle.default.SeqLikeWriter[Seq, PathRef] - ) -) -object Task { - - class Task0[T](t: T) extends Task[T]{ - lazy val t0 = t - val inputs = Nil - def evaluate(args: mill.api.Ctx) = t0 - } - - abstract class Ops[+T]{ this: Task[T] => - def map[V](f: T => V) = new Task.Mapped(this, f) - def mapDest[V](f: (T, mill.api.Ctx) => Result[V]) = new Task.MappedDest(this, f) - - def filter(f: T => Boolean) = this - def withFilter(f: T => Boolean) = this - def zip[V](other: Task[V]) = new Task.Zipped(this, other) - - } - - def traverse[T, V](source: Seq[T])(f: T => Task[V]) = { - new Sequence[V](source.map(f)) - } - def sequence[T](source: Seq[Task[T]]) = new Sequence[T](source) - - class Sequence[+T](inputs0: Seq[Task[T]]) extends Task[Seq[T]]{ - val inputs = inputs0 - def evaluate(args: mill.api.Ctx) = { - for (i <- 0 until args.length) - yield args(i).asInstanceOf[T] - } - - } - class Mapped[+T, +V](source: Task[T], f: T => V) extends Task[V]{ - def evaluate(args: mill.api.Ctx) = f(args(0)) - val inputs = List(source) - } - class MappedDest[+T, +V](source: Task[T], f: (T, mill.api.Ctx) => Result[V]) extends Task[V]{ - def evaluate(args: mill.api.Ctx) = f(args(0), args) - val inputs = List(source) - } - class Zipped[+T, +V](source1: Task[T], source2: Task[V]) extends Task[(T, V)]{ - def evaluate(args: mill.api.Ctx) = (args(0), args(1)) - val inputs = List(source1, source2) - } -} diff --git a/main/core/src/mill/eval/Evaluator.scala b/main/core/src/mill/eval/Evaluator.scala deleted file mode 100644 index 8709064e..00000000 --- a/main/core/src/mill/eval/Evaluator.scala +++ /dev/null @@ -1,443 +0,0 @@ -package mill.eval - -import java.net.URLClassLoader - -import scala.collection.JavaConverters._ - -import mill.util.Router.EntryPoint -import ammonite.runtime.SpecialClassLoader -import mill.define.{Ctx => _, _} -import mill.api.Result.OuterStack -import mill.util -import mill.util._ -import mill.util.Strict.Agg - -import scala.collection.mutable -import scala.util.control.NonFatal -case class Labelled[T](task: NamedTask[T], - segments: Segments){ - def format = task match{ - case t: Target[T] => Some(t.readWrite.asInstanceOf[upickle.default.ReadWriter[T]]) - case _ => None - } - def writer = task match{ - case t: mill.define.Command[T] => Some(t.writer.asInstanceOf[upickle.default.Writer[T]]) - case t: Target[T] => Some(t.readWrite.asInstanceOf[upickle.default.ReadWriter[T]]) - case _ => None - } -} - -case class Evaluator(home: os.Path, - outPath: os.Path, - externalOutPath: os.Path, - rootModule: mill.define.BaseModule, - log: Logger, - classLoaderSig: Seq[(Either[String, os.Path], Long)] = Evaluator.classLoaderSig, - workerCache: mutable.Map[Segments, (Int, Any)] = mutable.Map.empty, - env : Map[String, String] = Evaluator.defaultEnv){ - val classLoaderSignHash = classLoaderSig.hashCode() - def evaluate(goals: Agg[Task[_]]): Evaluator.Results = { - os.makeDir.all(outPath) - - val (sortedGroups, transitive) = Evaluator.plan(rootModule, goals) - - val evaluated = new Agg.Mutable[Task[_]] - val results = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]] - - val timings = mutable.ArrayBuffer.empty[(Either[Task[_], Labelled[_]], Int, Boolean)] - for (((terminal, group), i) <- sortedGroups.items().zipWithIndex){ - val startTime = System.currentTimeMillis() - // Increment the counter message by 1 to go from 1/10 to 10/10 instead of 0/10 to 9/10 - val counterMsg = (i+1) + "/" + sortedGroups.keyCount - val (newResults, newEvaluated, cached) = evaluateGroupCached( - terminal, - group, - results, - counterMsg - ) - - for(ev <- newEvaluated){ - evaluated.append(ev) - } - for((k, v) <- newResults) { - results.put(k, v) - } - val endTime = System.currentTimeMillis() - - timings.append((terminal, (endTime - startTime).toInt, cached)) - } - - val failing = new util.MultiBiMap.Mutable[Either[Task[_], Labelled[_]], Result.Failing[_]] - for((k, vs) <- sortedGroups.items()){ - failing.addAll( - k, - vs.items.flatMap(results.get).collect{case f: Result.Failing[_] => f.map(_._1)} - ) - } - os.write.over( - outPath / "mill-profile.json", - upickle.default.write( - timings .map{case (k, v, b) => - Evaluator.Timing(k.fold(_ => null, s => s.segments.render), v, b) - }, - indent = 4 - ) - ) - Evaluator.Results( - goals.indexed.map(results(_).map(_._1)), - evaluated, - transitive, - failing, - timings, - results.map{case (k, v) => (k, v.map(_._1))} - ) - } - - - def evaluateGroupCached(terminal: Either[Task[_], Labelled[_]], - group: Agg[Task[_]], - results: collection.Map[Task[_], Result[(Any, Int)]], - counterMsg: String): (collection.Map[Task[_], Result[(Any, Int)]], Seq[Task[_]], Boolean) = { - - val externalInputsHash = scala.util.hashing.MurmurHash3.orderedHash( - group.items.flatMap(_.inputs).filter(!group.contains(_)) - .flatMap(results(_).asSuccess.map(_.value._2)) - ) - - val sideHashes = scala.util.hashing.MurmurHash3.orderedHash( - group.toIterator.map(_.sideHash) - ) - - val inputsHash = externalInputsHash + sideHashes + classLoaderSignHash - - terminal match{ - case Left(task) => - val (newResults, newEvaluated) = evaluateGroup( - group, - results, - inputsHash, - paths = None, - maybeTargetLabel = None, - counterMsg = counterMsg - ) - (newResults, newEvaluated, false) - case Right(labelledNamedTask) => - - val out = if (!labelledNamedTask.task.ctx.external) outPath - else externalOutPath - - val paths = Evaluator.resolveDestPaths( - out, - destSegments(labelledNamedTask) - ) - - if (!os.exists(paths.out)) os.makeDir.all(paths.out) - val cached = for{ - cached <- - try Some(upickle.default.read[Evaluator.Cached](paths.meta.toIO)) - catch {case e: Throwable => None} - - if cached.inputsHash == inputsHash - reader <- labelledNamedTask.format - parsed <- - try Some(upickle.default.read(cached.value)(reader)) - catch {case e: Throwable => None} - } yield (parsed, cached.valueHash) - - val workerCached = labelledNamedTask.task.asWorker - .flatMap{w => workerCache.get(w.ctx.segments)} - .collect{case (`inputsHash`, v) => v} - - workerCached.map((_, inputsHash)) orElse cached match{ - case Some((v, hashCode)) => - val newResults = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]] - newResults(labelledNamedTask.task) = Result.Success((v, hashCode)) - - (newResults, Nil, true) - - case _ => - val Seq(first, rest @_*) = labelledNamedTask.segments.value - val msgParts = Seq(first.asInstanceOf[Segment.Label].value) ++ rest.map{ - case Segment.Label(s) => "." + s - case Segment.Cross(s) => "[" + s.mkString(",") + "]" - } - - if (labelledNamedTask.task.flushDest) os.remove.all(paths.dest) - - val (newResults, newEvaluated) = evaluateGroup( - group, - results, - inputsHash, - paths = Some(paths), - maybeTargetLabel = Some(msgParts.mkString), - counterMsg = counterMsg - ) - - newResults(labelledNamedTask.task) match{ - case Result.Failure(_, Some((v, hashCode))) => - handleTaskResult(v, v.##, paths.meta, inputsHash, labelledNamedTask) - - case Result.Success((v, hashCode)) => - handleTaskResult(v, v.##, paths.meta, inputsHash, labelledNamedTask) - - case _ => - // Wipe out any cached meta.json file that exists, so - // a following run won't look at the cached metadata file and - // assume it's associated with the possibly-borked state of the - // destPath after an evaluation failure. - os.remove.all(paths.meta) - } - - (newResults, newEvaluated, false) - } - } - } - - def destSegments(labelledTask : Labelled[_]) : Segments = { - import labelledTask.task.ctx - if (ctx.foreign) { - val prefix = "foreign-modules" - // Computing a path in "out" that uniquely reflects the location - // of the foreign module relatively to the current build. - val relative = labelledTask.task - .ctx.millSourcePath - .relativeTo(rootModule.millSourcePath) - // Encoding the number of `/..` - val ups = if (relative.ups > 0) Segments.labels(s"up-${relative.ups}") - else Segments() - Segments.labels(prefix) - .++(ups) - .++(Segments.labels(relative.segments: _*)) - .++(labelledTask.segments.last) - } else labelledTask.segments - } - - - def handleTaskResult(v: Any, - hashCode: Int, - metaPath: os.Path, - inputsHash: Int, - labelledNamedTask: Labelled[_]) = { - labelledNamedTask.task.asWorker match{ - case Some(w) => workerCache(w.ctx.segments) = (inputsHash, v) - case None => - val terminalResult = labelledNamedTask - .writer - .asInstanceOf[Option[upickle.default.Writer[Any]]] - .map(w => upickle.default.writeJs(v)(w) -> v) - - for((json, v) <- terminalResult){ - os.write.over( - metaPath, - upickle.default.write( - Evaluator.Cached(json, hashCode, inputsHash), - indent = 4 - ) - ) - } - } - } - - def evaluateGroup(group: Agg[Task[_]], - results: collection.Map[Task[_], Result[(Any, Int)]], - inputsHash: Int, - paths: Option[Evaluator.Paths], - maybeTargetLabel: Option[String], - counterMsg: String) = { - - - val newEvaluated = mutable.Buffer.empty[Task[_]] - val newResults = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]] - - val nonEvaluatedTargets = group.indexed.filterNot(results.contains) - - maybeTargetLabel.foreach { targetLabel => - val inputResults = for { - target <- nonEvaluatedTargets - item <- target.inputs.filterNot(group.contains) - } yield results(item).map(_._1) - - val logRun = inputResults.forall(_.isInstanceOf[Result.Success[_]]) - - if(logRun) { log.ticker(s"[$counterMsg] $targetLabel ") } - } - - val multiLogger = resolveLogger(paths.map(_.log)) - var usedDest = Option.empty[(Task[_], Array[StackTraceElement])] - for (task <- nonEvaluatedTargets) { - newEvaluated.append(task) - val targetInputValues = task.inputs - .map{x => newResults.getOrElse(x, results(x))} - .collect{ case Result.Success((v, hashCode)) => v } - - val res = - if (targetInputValues.length != task.inputs.length) Result.Skipped - else { - val args = new Ctx( - targetInputValues.toArray[Any], - () => usedDest match{ - case Some((earlierTask, earlierStack)) if earlierTask != task => - val inner = new Exception("Earlier usage of `dest`") - inner.setStackTrace(earlierStack) - throw new Exception( - "`dest` can only be used in one place within each Target[T]", - inner - ) - case _ => - - - paths match{ - case Some(dest) => - if (usedDest.isEmpty) os.makeDir.all(dest.dest) - usedDest = Some((task, new Exception().getStackTrace)) - dest.dest - case None => - throw new Exception("No `dest` folder available here") - } - }, - multiLogger, - home, - env - ) - - val out = System.out - val in = System.in - val err = System.err - try{ - System.setIn(multiLogger.inStream) - System.setErr(multiLogger.errorStream) - System.setOut(multiLogger.outputStream) - Console.withIn(multiLogger.inStream){ - Console.withOut(multiLogger.outputStream){ - Console.withErr(multiLogger.errorStream){ - try task.evaluate(args) - catch { case NonFatal(e) => - Result.Exception(e, new OuterStack(new Exception().getStackTrace)) - } - } - } - } - }finally{ - System.setErr(err) - System.setOut(out) - System.setIn(in) - } - } - - newResults(task) = for(v <- res) yield { - (v, - if (task.isInstanceOf[Worker[_]]) inputsHash - else v.## - ) - } - } - - multiLogger.close() - - (newResults, newEvaluated) - } - - def resolveLogger(logPath: Option[os.Path]): Logger = logPath match{ - case None => log - case Some(path) => MultiLogger(log.colored, log, FileLogger(log.colored, path, debugEnabled = true)) - } -} - - -object Evaluator{ - case class Cached(value: ujson.Value, - valueHash: Int, - inputsHash: Int) - object Cached{ - implicit val rw: upickle.default.ReadWriter[Cached] = upickle.default.macroRW - } - case class State(rootModule: mill.define.BaseModule, - classLoaderSig: Seq[(Either[String, os.Path], Long)], - workerCache: mutable.Map[Segments, (Int, Any)], - watched: Seq[(os.Path, Long)]) - // This needs to be a ThreadLocal because we need to pass it into the body of - // the TargetScopt#read call, which does not accept additional parameters. - // Until we migrate our CLI parsing off of Scopt (so we can pass the BaseModule - // in directly) we are forced to pass it in via a ThreadLocal - val currentEvaluator = new ThreadLocal[mill.eval.Evaluator] - - val defaultEnv: Map[String, String] = System.getenv().asScala.toMap - - case class Paths(out: os.Path, - dest: os.Path, - meta: os.Path, - log: os.Path) - def makeSegmentStrings(segments: Segments) = segments.value.flatMap{ - case Segment.Label(s) => Seq(s) - case Segment.Cross(values) => values.map(_.toString) - } - def resolveDestPaths(workspacePath: os.Path, segments: Segments): Paths = { - val segmentStrings = makeSegmentStrings(segments) - val targetPath = workspacePath / segmentStrings - Paths(targetPath, targetPath / 'dest, targetPath / "meta.json", targetPath / 'log) - } - - // check if the build itself has changed - def classLoaderSig = Thread.currentThread().getContextClassLoader match { - case scl: SpecialClassLoader => scl.classpathSignature - case ucl: URLClassLoader => - SpecialClassLoader.initialClasspathSignature(ucl) - case _ => Nil - } - case class Timing(label: String, - millis: Int, - cached: Boolean) - object Timing{ - implicit val readWrite: upickle.default.ReadWriter[Timing] = upickle.default.macroRW - } - case class Results(rawValues: Seq[Result[Any]], - evaluated: Agg[Task[_]], - transitive: Agg[Task[_]], - failing: MultiBiMap[Either[Task[_], Labelled[_]], Result.Failing[_]], - timings: IndexedSeq[(Either[Task[_], Labelled[_]], Int, Boolean)], - results: collection.Map[Task[_], Result[Any]]){ - def values = rawValues.collect{case Result.Success(v) => v} - } - def plan(rootModule: BaseModule, goals: Agg[Task[_]]) = { - val transitive = Graph.transitiveTargets(goals) - val topoSorted = Graph.topoSorted(transitive) - val sortedGroups = Graph.groupAroundImportantTargets(topoSorted){ - case t: NamedTask[Any] => - val segments = t.ctx.segments - val finalTaskOverrides = t match{ - case t: Target[_] => - rootModule.millInternal.segmentsToTargets.get(segments).fold(0)(_.ctx.overrides) - - case c: mill.define.Command[_] => - def findMatching(cls: Class[_]): Option[Seq[(Int, EntryPoint[_])]] = { - rootModule.millDiscover.value.get(cls) match{ - case Some(v) => Some(v) - case None => - cls.getSuperclass match{ - case null => None - case superCls => findMatching(superCls) - } - } - } - - findMatching(c.cls) match{ - case Some(v) => - v.find(_._2.name == c.ctx.segment.pathSegments.head).get._1 - // For now we don't properly support overrides for external modules - // that do not appear in the Evaluator's main Discovered listing - case None => 0 - } - - case c: mill.define.Worker[_] => 0 - } - - val additional = - if (finalTaskOverrides == t.ctx.overrides) Nil - else Seq(Segment.Label("overriden")) ++ t.ctx.enclosing.split("\\.|#| ").map(Segment.Label) - - Right(Labelled(t, segments ++ additional)) - case t if goals.contains(t) => Left(t) - } - (sortedGroups, transitive) - } -} diff --git a/main/core/src/mill/eval/Tarjans.scala b/main/core/src/mill/eval/Tarjans.scala deleted file mode 100644 index ade335a9..00000000 --- a/main/core/src/mill/eval/Tarjans.scala +++ /dev/null @@ -1,51 +0,0 @@ -package mill.eval - -import scala.collection.mutable - -// Adapted from -// https://github.com/indy256/codelibrary/blob/c52247216258e84aac442a23273b7d8306ef757b/java/src/SCCTarjan.java -object Tarjans { - def apply(graph0: TraversableOnce[TraversableOnce[Int]]): Seq[Seq[Int]] = { - val graph = graph0.map(_.toArray).toArray - val n = graph.length - val visited = new Array[Boolean](n) - val stack = mutable.ArrayBuffer.empty[Integer] - var time = 0 - val lowlink = new Array[Int](n) - val components = mutable.ArrayBuffer.empty[Seq[Int]] - - - for (u <- 0 until n) { - if (!visited(u)) dfs(u) - } - - def dfs(u: Int): Unit = { - lowlink(u) = time - time += 1 - visited(u) = true - stack.append(u) - var isComponentRoot = true - for (v <- graph(u)) { - if (!visited(v)) dfs(v) - if (lowlink(u) > lowlink(v)) { - lowlink(u) = lowlink(v) - isComponentRoot = false - } - } - if (isComponentRoot) { - val component = mutable.Buffer.empty[Int] - - var done = false - while (!done) { - val x = stack.last - stack.remove(stack.length - 1) - component.append(x) - lowlink(x) = Integer.MAX_VALUE - if (x == u) done = true - } - components.append(component) - } - } - components - } -} diff --git a/main/core/src/mill/eval/package.scala b/main/core/src/mill/eval/package.scala deleted file mode 100644 index 433f9074..00000000 --- a/main/core/src/mill/eval/package.scala +++ /dev/null @@ -1,12 +0,0 @@ -package mill - -package object eval { - // Backwards compatibility forwarders - val Result = mill.api.Result - type Result[+T] = mill.api.Result[T] - - val PathRef = mill.api.PathRef - type PathRef = mill.api.PathRef - - type Logger = mill.api.Logger -} diff --git a/main/core/src/mill/util/AggWrapper.scala b/main/core/src/mill/util/AggWrapper.scala deleted file mode 100644 index 6c107875..00000000 --- a/main/core/src/mill/util/AggWrapper.scala +++ /dev/null @@ -1,119 +0,0 @@ -package mill.util - - - -import scala.collection.mutable -object Strict extends AggWrapper(true) -object Loose extends AggWrapper(false) -sealed class AggWrapper(strictUniqueness: Boolean){ - /** - * A collection with enforced uniqueness, fast contains and deterministic - * ordering. Raises an exception if a duplicate is found; call - * `toSeq.distinct` if you explicitly want to make it swallow duplicates - */ - trait Agg[V] extends TraversableOnce[V]{ - def contains(v: V): Boolean - def items: Iterator[V] - def indexed: IndexedSeq[V] - def flatMap[T](f: V => TraversableOnce[T]): Agg[T] - def map[T](f: V => T): Agg[T] - def filter(f: V => Boolean): Agg[V] - def withFilter(f: V => Boolean): Agg[V] - def collect[T](f: PartialFunction[V, T]): Agg[T] - def zipWithIndex: Agg[(V, Int)] - def reverse: Agg[V] - def zip[T](other: Agg[T]): Agg[(V, T)] - def ++[T >: V](other: TraversableOnce[T]): Agg[T] - def length: Int - } - - object Agg{ - def empty[V]: Agg[V] = new Agg.Mutable[V] - implicit def jsonFormat[T: upickle.default.ReadWriter]: upickle.default.ReadWriter[Agg[T]] = - upickle.default.readwriter[Seq[T]].bimap[Agg[T]]( - _.toList, - Agg.from(_) - ) - - def apply[V](items: V*) = from(items) - - implicit def from[V](items: TraversableOnce[V]): Agg[V] = { - val set = new Agg.Mutable[V]() - items.foreach(set.append) - set - } - - - class Mutable[V]() extends Agg[V]{ - - private[this] val set0 = mutable.LinkedHashSet.empty[V] - def contains(v: V) = set0.contains(v) - def append(v: V) = if (!contains(v)){ - set0.add(v) - - }else if (strictUniqueness){ - throw new Exception("Duplicated item inserted into OrderedSet: " + v) - } - def appendAll(vs: Seq[V]) = vs.foreach(append) - def items = set0.iterator - def indexed: IndexedSeq[V] = items.toIndexedSeq - def set: collection.Set[V] = set0 - - def map[T](f: V => T): Agg[T] = { - val output = new Agg.Mutable[T] - for(i <- items) output.append(f(i)) - output - } - def flatMap[T](f: V => TraversableOnce[T]): Agg[T] = { - val output = new Agg.Mutable[T] - for(i <- items) for(i0 <- f(i)) output.append(i0) - output - } - def filter(f: V => Boolean): Agg[V] = { - val output = new Agg.Mutable[V] - for(i <- items) if (f(i)) output.append(i) - output - } - def withFilter(f: V => Boolean): Agg[V] = filter(f) - - def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x)) - - def zipWithIndex = { - var i = 0 - this.map{ x => - i += 1 - (x, i-1) - } - } - - def reverse = Agg.from(indexed.reverseIterator) - - def zip[T](other: Agg[T]) = Agg.from(items.zip(other.items)) - def ++[T >: V](other: TraversableOnce[T]) = Agg.from(items ++ other) - def length: Int = set0.size - - // Members declared in scala.collection.GenTraversableOnce - def isTraversableAgain: Boolean = items.isTraversableAgain - def toIterator: Iterator[V] = items.toIterator - def toStream: Stream[V] = items.toStream - - // Members declared in scala.collection.TraversableOnce - def copyToArray[B >: V](xs: Array[B], start: Int,len: Int): Unit = items.copyToArray(xs, start, len) - def exists(p: V => Boolean): Boolean = items.exists(p) - def find(p: V => Boolean): Option[V] = items.find(p) - def forall(p: V => Boolean): Boolean = items.forall(p) - def foreach[U](f: V => U): Unit = items.foreach(f) - def hasDefiniteSize: Boolean = items.hasDefiniteSize - def isEmpty: Boolean = items.isEmpty - def seq: scala.collection.TraversableOnce[V] = items - def toTraversable: Traversable[V] = items.toTraversable - - override def hashCode() = items.map(_.hashCode()).sum - override def equals(other: Any) = other match{ - case s: Agg[_] => items.sameElements(s.items) - case _ => super.equals(other) - } - override def toString = items.mkString("Agg(", ", ", ")") - } - } -} diff --git a/main/core/src/mill/util/EitherOps.scala b/main/core/src/mill/util/EitherOps.scala deleted file mode 100644 index da2552c8..00000000 --- a/main/core/src/mill/util/EitherOps.scala +++ /dev/null @@ -1,18 +0,0 @@ -package mill.util - -import scala.collection.generic.CanBuildFrom -import scala.collection.mutable -import scala.language.higherKinds - -object EitherOps { - - // implementation similar to scala.concurrent.Future#sequence - def sequence[A, B, M[X] <: TraversableOnce[X]](in: M[Either[A, B]])( - implicit cbf: CanBuildFrom[M[Either[A, B]], B, M[B]]): Either[A, M[B]] = { - in.foldLeft[Either[A, mutable.Builder[B, M[B]]]](Right(cbf(in))) { - case (acc, el) => - for (a <- acc; e <- el) yield a += e - } - .map(_.result()) - } -} diff --git a/main/core/src/mill/util/EnclosingClass.scala b/main/core/src/mill/util/EnclosingClass.scala deleted file mode 100644 index a69cc525..00000000 --- a/main/core/src/mill/util/EnclosingClass.scala +++ /dev/null @@ -1,15 +0,0 @@ -package mill.util - -import sourcecode.Compat.Context -import language.experimental.macros -case class EnclosingClass(value: Class[_]) -object EnclosingClass{ - def apply()(implicit c: EnclosingClass) = c.value - implicit def generate: EnclosingClass = macro impl - def impl(c: Context): c.Tree = { - import c.universe._ - val cls = c.internal.enclosingOwner.owner.asType.asClass - // q"new _root_.mill.define.EnclosingClass(classOf[$cls])" - q"new _root_.mill.util.EnclosingClass(this.getClass)" - } -} diff --git a/main/core/src/mill/util/JsonFormatters.scala b/main/core/src/mill/util/JsonFormatters.scala deleted file mode 100644 index 830782c6..00000000 --- a/main/core/src/mill/util/JsonFormatters.scala +++ /dev/null @@ -1,10 +0,0 @@ -package mill.util - -import upickle.default.{ReadWriter => RW} - -trait JsonFormatters extends mill.api.JsonFormatters{ - implicit lazy val modFormat: RW[coursier.Module] = upickle.default.macroRW - implicit lazy val depFormat: RW[coursier.Dependency]= upickle.default.macroRW - implicit lazy val attrFormat: RW[coursier.Attributes] = upickle.default.macroRW -} -object JsonFormatters extends JsonFormatters diff --git a/main/core/src/mill/util/Loggers.scala b/main/core/src/mill/util/Loggers.scala deleted file mode 100644 index aab1a324..00000000 --- a/main/core/src/mill/util/Loggers.scala +++ /dev/null @@ -1,190 +0,0 @@ -package mill.util - -import java.io._ -import mill.api.Logger - -object DummyLogger extends Logger { - def colored = false - - object errorStream extends PrintStream(_ => ()) - object outputStream extends PrintStream(_ => ()) - val inStream = new ByteArrayInputStream(Array()) - - def info(s: String) = () - def error(s: String) = () - def ticker(s: String) = () - def debug(s: String) = () -} - -class CallbackStream(wrapped: OutputStream, - setPrintState0: PrintState => Unit) extends OutputStream{ - def setPrintState(c: Char) = { - setPrintState0( - c match{ - case '\n' => PrintState.Newline - case '\r' => PrintState.Newline - case _ => PrintState.Middle - } - ) - } - override def write(b: Array[Byte]): Unit = { - if (b.nonEmpty) setPrintState(b(b.length-1).toChar) - wrapped.write(b) - } - - override def write(b: Array[Byte], off: Int, len: Int): Unit = { - if (len != 0) setPrintState(b(off+len-1).toChar) - wrapped.write(b, off, len) - } - - def write(b: Int) = { - setPrintState(b.toChar) - wrapped.write(b) - } -} -sealed trait PrintState -object PrintState{ - case object Ticker extends PrintState - case object Newline extends PrintState - case object Middle extends PrintState -} - -case class PrintLogger( - colored: Boolean, - disableTicker: Boolean, - colors: ammonite.util.Colors, - outStream: PrintStream, - infoStream: PrintStream, - errStream: PrintStream, - inStream: InputStream, - debugEnabled: Boolean - ) extends Logger { - - var printState: PrintState = PrintState.Newline - - override val errorStream = new PrintStream(new CallbackStream(errStream, printState = _)) - override val outputStream = new PrintStream(new CallbackStream(outStream, printState = _)) - - - def info(s: String) = { - printState = PrintState.Newline - infoStream.println(colors.info()(s)) - } - def error(s: String) = { - printState = PrintState.Newline - errStream.println(colors.error()(s)) - } - def ticker(s: String) = { - if(!disableTicker) { - printState match{ - case PrintState.Newline => - infoStream.println(colors.info()(s)) - case PrintState.Middle => - infoStream.println() - infoStream.println(colors.info()(s)) - case PrintState.Ticker => - val p = new PrintWriter(infoStream) - val nav = new ammonite.terminal.AnsiNav(p) - nav.up(1) - nav.clearLine(2) - nav.left(9999) - p.flush() - - infoStream.println(colors.info()(s)) - } - printState = PrintState.Ticker - } - } - - def debug(s: String) = if (debugEnabled) { - printState = PrintState.Newline - errStream.println(s) - } -} - -case class FileLogger(colored: Boolean, file: os.Path, debugEnabled: Boolean) extends Logger { - private[this] var outputStreamUsed: Boolean = false - - lazy val outputStream = { - if (!outputStreamUsed) os.remove.all(file) - outputStreamUsed = true - new PrintStream(new FileOutputStream(file.toIO.getAbsolutePath)) - } - - lazy val errorStream = { - if (!outputStreamUsed) os.remove.all(file) - outputStreamUsed = true - new PrintStream(new FileOutputStream(file.toIO.getAbsolutePath)) - } - - def info(s: String) = outputStream.println(s) - def error(s: String) = outputStream.println(s) - def ticker(s: String) = outputStream.println(s) - def debug(s: String) = if (debugEnabled) outputStream.println(s) - val inStream: InputStream = mill.api.DummyInputStream - override def close() = { - if (outputStreamUsed) - outputStream.close() - } -} - - - -class MultiStream(stream1: OutputStream, stream2: OutputStream) extends PrintStream(new OutputStream { - def write(b: Int): Unit = { - stream1.write(b) - stream2.write(b) - } - override def write(b: Array[Byte]): Unit = { - stream1.write(b) - stream2.write(b) - } - override def write(b: Array[Byte], off: Int, len: Int) = { - stream1.write(b, off, len) - stream2.write(b, off, len) - } - override def flush() = { - stream1.flush() - stream2.flush() - } - override def close() = { - stream1.close() - stream2.close() - } -}) - -case class MultiLogger(colored: Boolean, logger1: Logger, logger2: Logger) extends Logger { - - - lazy val outputStream: PrintStream = new MultiStream(logger1.outputStream, logger2.outputStream) - - lazy val errorStream: PrintStream = new MultiStream(logger1.errorStream, logger2.errorStream) - - lazy val inStream = Seq(logger1, logger2).collectFirst{case t: PrintLogger => t} match{ - case Some(x) => x.inStream - case None => new ByteArrayInputStream(Array()) - } - - def info(s: String) = { - logger1.info(s) - logger2.info(s) - } - def error(s: String) = { - logger1.error(s) - logger2.error(s) - } - def ticker(s: String) = { - logger1.ticker(s) - logger2.ticker(s) - } - - def debug(s: String) = { - logger1.debug(s) - logger2.debug(s) - } - - override def close() = { - logger1.close() - logger2.close() - } -} diff --git a/main/core/src/mill/util/MultiBiMap.scala b/main/core/src/mill/util/MultiBiMap.scala deleted file mode 100644 index 73bb42c4..00000000 --- a/main/core/src/mill/util/MultiBiMap.scala +++ /dev/null @@ -1,57 +0,0 @@ -package mill.util - -import scala.collection.mutable -import Strict.Agg - -/** - * A map from keys to collections of values: you can assign multiple values - * to any particular key. Also allows lookups in both directions: what values - * are assigned to a key or what key a value is assigned to. - */ -trait MultiBiMap[K, V]{ - def containsValue(v: V): Boolean - def lookupKey(k: K): Agg[V] - def lookupValue(v: V): K - def lookupValueOpt(v: V): Option[K] - def add(k: K, v: V): Unit - def removeAll(k: K): Agg[V] - def addAll(k: K, vs: TraversableOnce[V]): Unit - def keys(): Iterator[K] - def items(): Iterator[(K, Agg[V])] - def values(): Iterator[Agg[V]] - def keyCount: Int -} - -object MultiBiMap{ - - class Mutable[K, V]() extends MultiBiMap[K, V]{ - private[this] val valueToKey = mutable.LinkedHashMap.empty[V, K] - private[this] val keyToValues = mutable.LinkedHashMap.empty[K, Agg.Mutable[V]] - def containsValue(v: V) = valueToKey.contains(v) - def lookupKey(k: K) = keyToValues(k) - def lookupKeyOpt(k: K) = keyToValues.get(k) - def lookupValue(v: V) = valueToKey(v) - def lookupValueOpt(v: V) = valueToKey.get(v) - def add(k: K, v: V): Unit = { - valueToKey(v) = k - keyToValues.getOrElseUpdate(k, new Agg.Mutable[V]()).append(v) - } - def removeAll(k: K): Agg[V] = keyToValues.get(k) match { - case None => Agg() - case Some(vs) => - vs.foreach(valueToKey.remove) - - keyToValues.remove(k) - vs - } - def addAll(k: K, vs: TraversableOnce[V]): Unit = vs.foreach(this.add(k, _)) - - def keys() = keyToValues.keysIterator - - def values() = keyToValues.valuesIterator - - def items() = keyToValues.iterator - - def keyCount = keyToValues.size - } -} diff --git a/main/core/src/mill/util/ParseArgs.scala b/main/core/src/mill/util/ParseArgs.scala deleted file mode 100644 index fc1a8ab3..00000000 --- a/main/core/src/mill/util/ParseArgs.scala +++ /dev/null @@ -1,137 +0,0 @@ -package mill.util - -import fastparse._, NoWhitespace._ -import mill.define.{Segment, Segments} - -object ParseArgs { - - def apply(scriptArgs: Seq[String], - multiSelect: Boolean): Either[String, (List[(Option[Segments], Segments)], Seq[String])] = { - val (selectors, args) = extractSelsAndArgs(scriptArgs, multiSelect) - for { - _ <- validateSelectors(selectors) - expandedSelectors <- EitherOps - .sequence(selectors.map(expandBraces)) - .map(_.flatten) - selectors <- EitherOps.sequence(expandedSelectors.map(extractSegments)) - } yield (selectors.toList, args) - } - - def extractSelsAndArgs(scriptArgs: Seq[String], - multiSelect: Boolean): (Seq[String], Seq[String]) = { - - if (multiSelect) { - val dd = scriptArgs.indexOf("--") - val selectors = if (dd == -1) scriptArgs else scriptArgs.take(dd) - val args = if (dd == -1) Seq.empty else scriptArgs.drop(dd + 1) - - (selectors, args) - } else { - (scriptArgs.take(1), scriptArgs.drop(1)) - } - } - - private def validateSelectors(selectors: Seq[String]): Either[String, Unit] = { - if (selectors.isEmpty || selectors.exists(_.isEmpty)) - Left("Selector cannot be empty") - else Right(()) - } - - def expandBraces(selectorString: String): Either[String, List[String]] = { - parseBraceExpansion(selectorString) match { - case f: Parsed.Failure => Left(s"Parsing exception ${f.msg}") - case Parsed.Success(expanded, _) => Right(expanded.toList) - } - } - - private sealed trait Fragment - private object Fragment { - case class Keep(value: String) extends Fragment - case class Expand(values: List[List[Fragment]]) extends Fragment - - def unfold(fragments: List[Fragment]): Seq[String] = { - fragments match { - case head :: rest => - val prefixes = head match { - case Keep(v) => Seq(v) - case Expand(Nil) => Seq("{}") - case Expand(List(vs)) => unfold(vs).map("{" + _ + "}") - case Expand(vss) => vss.flatMap(unfold) - } - for { - prefix <- prefixes - suffix <- unfold(rest) - } yield prefix + suffix - - case Nil => Seq("") - } - } - } - - private object BraceExpansionParser { - def plainChars[_: P] = - P(CharsWhile(c => c != ',' && c != '{' && c != '}')).!.map(Fragment.Keep) - - def toExpand[_: P]: P[Fragment] = - P("{" ~ braceParser.rep(1).rep(sep = ",") ~ "}").map( - x => Fragment.Expand(x.toList.map(_.toList)) - ) - - def braceParser[_: P] = P(toExpand | plainChars) - - def parser[_: P] = P(braceParser.rep(1).rep(sep = ",") ~ End).map { vss => - def unfold(vss: List[Seq[String]]): Seq[String] = { - vss match { - case Nil => Seq("") - case head :: rest => - for { - str <- head - r <- unfold(rest) - } yield - r match { - case "" => str - case _ => str + "," + r - } - } - } - - val stringss = vss.map(x => Fragment.unfold(x.toList)).toList - unfold(stringss) - } - } - - private def parseBraceExpansion(input: String) = { - - - parse( - input, - BraceExpansionParser.parser(_) - ) - } - - def extractSegments(selectorString: String): Either[String, (Option[Segments], Segments)] = - parseSelector(selectorString) match { - case f: Parsed.Failure => Left(s"Parsing exception ${f.msg}") - case Parsed.Success(selector, _) => Right(selector) - } - - private def ident[_: P] = P( CharsWhileIn("a-zA-Z0-9_\\-") ).! - - def standaloneIdent[_: P] = P(Start ~ ident ~ End ) - def isLegalIdentifier(identifier: String): Boolean = - parse(identifier, standaloneIdent(_)).isInstanceOf[Parsed.Success[_]] - - private def parseSelector(input: String) = { - def ident2[_: P] = P( CharsWhileIn("a-zA-Z0-9_\\-.") ).! - def segment[_: P] = P( ident ).map( Segment.Label) - def crossSegment[_: P] = P("[" ~ ident2.rep(1, sep = ",") ~ "]").map(Segment.Cross) - def simpleQuery[_: P] = P(segment ~ ("." ~ segment | crossSegment).rep).map { - case (h, rest) => Segments(h :: rest.toList:_*) - } - def query[_: P] = P( simpleQuery ~ ("/" ~/ simpleQuery).?).map{ - case (q, None) => (None, q) - case (q, Some(q2)) => (Some(q), q2) - } - parse(input, query(_)) - } -} diff --git a/main/core/src/mill/util/Router.scala b/main/core/src/mill/util/Router.scala deleted file mode 100644 index 5dd3c947..00000000 --- a/main/core/src/mill/util/Router.scala +++ /dev/null @@ -1,451 +0,0 @@ -package mill.util - -import ammonite.main.Compat -import language.experimental.macros - -import scala.annotation.StaticAnnotation -import scala.collection.mutable -import scala.reflect.macros.blackbox.Context - -/** - * More or less a minimal version of Autowire's Server that lets you generate - * a set of "routes" from the methods defined in an object, and call them - * using passing in name/args/kwargs via Java reflection, without having to - * generate/compile code or use Scala reflection. This saves us spinning up - * the Scala compiler and greatly reduces the startup time of cached scripts. - */ -object Router{ - /** - * Allows you to query how many things are overriden by the enclosing owner. - */ - case class Overrides(value: Int) - object Overrides{ - def apply()(implicit c: Overrides) = c.value - implicit def generate: Overrides = macro impl - def impl(c: Context): c.Tree = { - import c.universe._ - q"new _root_.mill.util.Router.Overrides(${c.internal.enclosingOwner.overrides.length})" - } - } - - class doc(s: String) extends StaticAnnotation - class main extends StaticAnnotation - def generateRoutes[T]: Seq[Router.EntryPoint[T]] = macro generateRoutesImpl[T] - def generateRoutesImpl[T: c.WeakTypeTag](c: Context): c.Expr[Seq[EntryPoint[T]]] = { - import c.universe._ - val r = new Router(c) - val allRoutes = r.getAllRoutesForClass( - weakTypeOf[T].asInstanceOf[r.c.Type] - ).asInstanceOf[Iterable[c.Tree]] - - c.Expr[Seq[EntryPoint[T]]](q"_root_.scala.Seq(..$allRoutes)") - } - - /** - * Models what is known by the router about a single argument: that it has - * a [[name]], a human-readable [[typeString]] describing what the type is - * (just for logging and reading, not a replacement for a `TypeTag`) and - * possible a function that can compute its default value - */ - case class ArgSig[T, V](name: String, - typeString: String, - doc: Option[String], - default: Option[T => V]) - (implicit val reads: scopt.Read[V]) - - def stripDashes(s: String) = { - if (s.startsWith("--")) s.drop(2) - else if (s.startsWith("-")) s.drop(1) - else s - } - /** - * What is known about a single endpoint for our routes. It has a [[name]], - * [[argSignatures]] for each argument, and a macro-generated [[invoke0]] - * that performs all the necessary argument parsing and de-serialization. - * - * Realistically, you will probably spend most of your time calling [[invoke]] - * instead, which provides a nicer API to call it that mimmicks the API of - * calling a Scala method. - */ - case class EntryPoint[T](name: String, - argSignatures: Seq[ArgSig[T, _]], - doc: Option[String], - varargs: Boolean, - invoke0: (T, Map[String, String], Seq[String], Seq[ArgSig[T, _]]) => Result[Any], - overrides: Int){ - def invoke(target: T, groupedArgs: Seq[(String, Option[String])]): Result[Any] = { - var remainingArgSignatures = argSignatures.toList.filter(_.reads.arity > 0) - - val accumulatedKeywords = mutable.Map.empty[ArgSig[T, _], mutable.Buffer[String]] - val keywordableArgs = if (varargs) argSignatures.dropRight(1) else argSignatures - - for(arg <- keywordableArgs) accumulatedKeywords(arg) = mutable.Buffer.empty - - val leftoverArgs = mutable.Buffer.empty[String] - - val lookupArgSig = Map(argSignatures.map(x => (x.name, x)):_*) - - var incomplete: Option[ArgSig[T, _]] = None - - for(group <- groupedArgs){ - - group match{ - case (value, None) => - if (value(0) == '-' && !varargs){ - lookupArgSig.get(stripDashes(value)) match{ - case None => leftoverArgs.append(value) - case Some(sig) => incomplete = Some(sig) - } - - } else remainingArgSignatures match { - case Nil => leftoverArgs.append(value) - case last :: Nil if varargs => leftoverArgs.append(value) - case next :: rest => - accumulatedKeywords(next).append(value) - remainingArgSignatures = rest - } - case (rawKey, Some(value)) => - val key = stripDashes(rawKey) - lookupArgSig.get(key) match{ - case Some(x) if accumulatedKeywords.contains(x) => - if (accumulatedKeywords(x).nonEmpty && varargs){ - leftoverArgs.append(rawKey, value) - }else{ - accumulatedKeywords(x).append(value) - remainingArgSignatures = remainingArgSignatures.filter(_.name != key) - } - case _ => - leftoverArgs.append(rawKey, value) - } - } - } - - val missing0 = remainingArgSignatures - .filter(_.default.isEmpty) - - val missing = if(varargs) { - missing0.filter(_ != argSignatures.last) - } else { - missing0.filter(x => incomplete != Some(x)) - } - val duplicates = accumulatedKeywords.toSeq.filter(_._2.length > 1) - - if ( - incomplete.nonEmpty || - missing.nonEmpty || - duplicates.nonEmpty || - (leftoverArgs.nonEmpty && !varargs) - ){ - Result.Error.MismatchedArguments( - missing = missing, - unknown = leftoverArgs, - duplicate = duplicates, - incomplete = incomplete - - ) - } else { - val mapping = accumulatedKeywords - .iterator - .collect{case (k, Seq(single)) => (k.name, single)} - .toMap - - try invoke0(target, mapping, leftoverArgs, argSignatures) - catch{case e: Throwable => - Result.Error.Exception(e) - } - } - } - } - - def tryEither[T](t: => T, error: Throwable => Result.ParamError) = { - try Right(t) - catch{ case e: Throwable => Left(error(e))} - } - def readVarargs(arg: ArgSig[_, _], - values: Seq[String], - thunk: String => Any) = { - val attempts = - for(item <- values) - yield tryEither(thunk(item), Result.ParamError.Invalid(arg, item, _)) - - - val bad = attempts.collect{ case Left(x) => x} - if (bad.nonEmpty) Left(bad) - else Right(attempts.collect{case Right(x) => x}) - } - def read(dict: Map[String, String], - default: => Option[Any], - arg: ArgSig[_, _], - thunk: String => Any): FailMaybe = { - arg.reads.arity match{ - case 0 => - tryEither(thunk(null), Result.ParamError.DefaultFailed(arg, _)).left.map(Seq(_)) - case 1 => - dict.get(arg.name) match{ - case None => - tryEither(default.get, Result.ParamError.DefaultFailed(arg, _)).left.map(Seq(_)) - - case Some(x) => - tryEither(thunk(x), Result.ParamError.Invalid(arg, x, _)).left.map(Seq(_)) - } - } - - } - - /** - * Represents what comes out of an attempt to invoke an [[EntryPoint]]. - * Could succeed with a value, but could fail in many different ways. - */ - sealed trait Result[+T] - object Result{ - - /** - * Invoking the [[EntryPoint]] was totally successful, and returned a - * result - */ - case class Success[T](value: T) extends Result[T] - - /** - * Invoking the [[EntryPoint]] was not successful - */ - sealed trait Error extends Result[Nothing] - object Error{ - - /** - * Invoking the [[EntryPoint]] failed with an exception while executing - * code within it. - */ - case class Exception(t: Throwable) extends Error - - /** - * Invoking the [[EntryPoint]] failed because the arguments provided - * did not line up with the arguments expected - */ - case class MismatchedArguments(missing: Seq[ArgSig[_, _]], - unknown: Seq[String], - duplicate: Seq[(ArgSig[_, _], Seq[String])], - incomplete: Option[ArgSig[_, _]]) extends Error - /** - * Invoking the [[EntryPoint]] failed because there were problems - * deserializing/parsing individual arguments - */ - case class InvalidArguments(values: Seq[ParamError]) extends Error - } - - sealed trait ParamError - object ParamError{ - /** - * Something went wrong trying to de-serialize the input parameter; - * the thrown exception is stored in [[ex]] - */ - case class Invalid(arg: ArgSig[_, _], value: String, ex: Throwable) extends ParamError - /** - * Something went wrong trying to evaluate the default value - * for this input parameter - */ - case class DefaultFailed(arg: ArgSig[_, _], ex: Throwable) extends ParamError - } - } - - - type FailMaybe = Either[Seq[Result.ParamError], Any] - type FailAll = Either[Seq[Result.ParamError], Seq[Any]] - - def validate(args: Seq[FailMaybe]): Result[Seq[Any]] = { - val lefts = args.collect{case Left(x) => x}.flatten - - if (lefts.nonEmpty) Result.Error.InvalidArguments(lefts) - else { - val rights = args.collect{case Right(x) => x} - Result.Success(rights) - } - } - - def makeReadCall(dict: Map[String, String], - default: => Option[Any], - arg: ArgSig[_, _]) = { - read(dict, default, arg, arg.reads.reads(_)) - } - def makeReadVarargsCall(arg: ArgSig[_, _], values: Seq[String]) = { - readVarargs(arg, values, arg.reads.reads(_)) - } -} - - -class Router [C <: Context](val c: C) { - import c.universe._ - def getValsOrMeths(curCls: Type): Iterable[MethodSymbol] = { - def isAMemberOfAnyRef(member: Symbol) = { - // AnyRef is an alias symbol, we go to the real "owner" of these methods - val anyRefSym = c.mirror.universe.definitions.ObjectClass - member.owner == anyRefSym - } - val extractableMembers = for { - member <- curCls.members.toList.reverse - if !isAMemberOfAnyRef(member) - if !member.isSynthetic - if member.isPublic - if member.isTerm - memTerm = member.asTerm - if memTerm.isMethod - if !memTerm.isModule - } yield memTerm.asMethod - - extractableMembers flatMap { case memTerm => - if (memTerm.isSetter || memTerm.isConstructor || memTerm.isGetter) Nil - else Seq(memTerm) - - } - } - - - - def extractMethod(meth: MethodSymbol, curCls: c.universe.Type): c.universe.Tree = { - val baseArgSym = TermName(c.freshName()) - val flattenedArgLists = meth.paramss.flatten - def hasDefault(i: Int) = { - val defaultName = s"${meth.name}$$default$$${i + 1}" - if (curCls.members.exists(_.name.toString == defaultName)) Some(defaultName) - else None - } - val argListSymbol = q"${c.fresh[TermName]("argsList")}" - val extrasSymbol = q"${c.fresh[TermName]("extras")}" - val defaults = for ((arg, i) <- flattenedArgLists.zipWithIndex) yield { - val arg = TermName(c.freshName()) - hasDefault(i).map(defaultName => q"($arg: $curCls) => $arg.${newTermName(defaultName)}") - } - - def getDocAnnotation(annotations: List[Annotation]) = { - val (docTrees, remaining) = annotations.partition(_.tpe =:= typeOf[Router.doc]) - val docValues = for { - doc <- docTrees - if doc.scalaArgs.head.isInstanceOf[Literal] - l = doc.scalaArgs.head.asInstanceOf[Literal] - if l.value.value.isInstanceOf[String] - } yield l.value.value.asInstanceOf[String] - (remaining, docValues.headOption) - } - - def unwrapVarargType(arg: Symbol) = { - val vararg = arg.typeSignature.typeSymbol == definitions.RepeatedParamClass - val unwrappedType = - if (!vararg) arg.typeSignature - else arg.typeSignature.asInstanceOf[TypeRef].args(0) - - (vararg, unwrappedType) - } - - val argSigSymbol = q"${c.fresh[TermName]("argSigs")}" - - val (_, methodDoc) = getDocAnnotation(meth.annotations) - val readArgSigs = for( - ((arg, defaultOpt), i) <- flattenedArgLists.zip(defaults).zipWithIndex - ) yield { - - val (vararg, varargUnwrappedType) = unwrapVarargType(arg) - - val default = - if (vararg) q"scala.Some(scala.Nil)" - else defaultOpt match { - case Some(defaultExpr) => q"scala.Some($defaultExpr($baseArgSym))" - case None => q"scala.None" - } - - val (docUnwrappedType, docOpt) = varargUnwrappedType match{ - case t: AnnotatedType => - - val (remaining, docValue) = getDocAnnotation(t.annotations) - if (remaining.isEmpty) (t.underlying, docValue) - else (Compat.copyAnnotatedType(c)(t, remaining), docValue) - - case t => (t, None) - } - - val docTree = docOpt match{ - case None => q"scala.None" - case Some(s) => q"scala.Some($s)" - } - - - val argSig = q""" - mill.util.Router.ArgSig[$curCls, $docUnwrappedType]( - ${arg.name.toString}, - ${docUnwrappedType.toString + (if(vararg) "*" else "")}, - $docTree, - $defaultOpt - ) - """ - - val reader = - if(vararg) q""" - mill.util.Router.makeReadVarargsCall( - $argSigSymbol($i), - $extrasSymbol - ) - """ else q""" - mill.util.Router.makeReadCall( - $argListSymbol, - $default, - $argSigSymbol($i) - ) - """ - c.internal.setPos(reader, meth.pos) - (reader, argSig, vararg) - } - - val readArgs = readArgSigs.map(_._1) - val argSigs = readArgSigs.map(_._2) - val varargs = readArgSigs.map(_._3) - val (argNames, argNameCasts) = flattenedArgLists.map { arg => - val (vararg, unwrappedType) = unwrapVarargType(arg) - ( - pq"${arg.name.toTermName}", - if (!vararg) q"${arg.name.toTermName}.asInstanceOf[$unwrappedType]" - else q"${arg.name.toTermName}.asInstanceOf[Seq[$unwrappedType]]: _*" - - ) - }.unzip - - - val res = q""" - mill.util.Router.EntryPoint[$curCls]( - ${meth.name.toString}, - scala.Seq(..$argSigs), - ${methodDoc match{ - case None => q"scala.None" - case Some(s) => q"scala.Some($s)" - }}, - ${varargs.contains(true)}, - ( - $baseArgSym: $curCls, - $argListSymbol: Map[String, String], - $extrasSymbol: Seq[String], - $argSigSymbol: Seq[mill.util.Router.ArgSig[$curCls, _]] - ) => - mill.util.Router.validate(Seq(..$readArgs)) match{ - case mill.util.Router.Result.Success(List(..$argNames)) => - mill.util.Router.Result.Success( - $baseArgSym.${meth.name.toTermName}(..$argNameCasts) - ) - case x: mill.util.Router.Result.Error => x - }, - ammonite.main.Router.Overrides() - ) - """ - res - } - - def hasMainAnnotation(t: MethodSymbol) = { - t.annotations.exists(_.tpe =:= typeOf[Router.main]) - } - def getAllRoutesForClass(curCls: Type, - pred: MethodSymbol => Boolean = hasMainAnnotation) - : Iterable[c.universe.Tree] = { - for{ - t <- getValsOrMeths(curCls) - if pred(t) - } yield { - extractMethod(t, curCls) - } - } -} diff --git a/main/core/src/mill/util/Scripts.scala b/main/core/src/mill/util/Scripts.scala deleted file mode 100644 index 65eb6b2b..00000000 --- a/main/core/src/mill/util/Scripts.scala +++ /dev/null @@ -1,330 +0,0 @@ -package mill.util - -import java.nio.file.NoSuchFileException - - -import ammonite.runtime.Evaluator.AmmoniteExit -import ammonite.util.Name.backtickWrap -import ammonite.util.Util.CodeSource -import ammonite.util.{Name, Res, Util} -import fastparse.internal.Util.literalize -import mill.util.Router.{ArgSig, EntryPoint} - -/** - * Logic around using Ammonite as a script-runner; invoking scripts via the - * macro-generated [[Router]], and pretty-printing any output or error messages - */ -object Scripts { - def groupArgs(flatArgs: List[String]): Seq[(String, Option[String])] = { - var keywordTokens = flatArgs - var scriptArgs = Vector.empty[(String, Option[String])] - - while(keywordTokens.nonEmpty) keywordTokens match{ - case List(head, next, rest@_*) if head.startsWith("-") => - scriptArgs = scriptArgs :+ (head, Some(next)) - keywordTokens = rest.toList - case List(head, rest@_*) => - scriptArgs = scriptArgs :+ (head, None) - keywordTokens = rest.toList - - } - scriptArgs - } - - def runScript(wd: os.Path, - path: os.Path, - interp: ammonite.interp.Interpreter, - scriptArgs: Seq[(String, Option[String])] = Nil) = { - interp.watch(path) - val (pkg, wrapper) = Util.pathToPackageWrapper(Seq(), path relativeTo wd) - - for{ - scriptTxt <- try Res.Success(Util.normalizeNewlines(os.read(path))) catch{ - case e: NoSuchFileException => Res.Failure("Script file not found: " + path) - } - - processed <- interp.processModule( - scriptTxt, - CodeSource(wrapper, pkg, Seq(Name("ammonite"), Name("$file")), Some(path)), - autoImport = true, - // Not sure why we need to wrap this in a separate `$routes` object, - // but if we don't do it for some reason the `generateRoutes` macro - // does not see the annotations on the methods of the outer-wrapper. - // It can inspect the type and its methods fine, it's just the - // `methodsymbol.annotations` ends up being empty. - extraCode = Util.normalizeNewlines( - s""" - |val $$routesOuter = this - |object $$routes - |extends scala.Function0[scala.Seq[ammonite.main.Router.EntryPoint[$$routesOuter.type]]]{ - | def apply() = ammonite.main.Router.generateRoutes[$$routesOuter.type] - |} - """.stripMargin - ), - hardcoded = true - ) - - routeClsName <- processed.blockInfo.lastOption match{ - case Some(meta) => Res.Success(meta.id.wrapperPath) - case None => Res.Skip - } - - mainCls = - interp - .evalClassloader - .loadClass(processed.blockInfo.last.id.wrapperPath + "$") - - routesCls = - interp - .evalClassloader - .loadClass(routeClsName + "$$routes$") - - scriptMains = - routesCls - .getField("MODULE$") - .get(null) - .asInstanceOf[() => Seq[Router.EntryPoint[Any]]] - .apply() - - - mainObj = mainCls.getField("MODULE$").get(null) - - res <- Util.withContextClassloader(interp.evalClassloader){ - scriptMains match { - // If there are no @main methods, there's nothing to do - case Seq() => - if (scriptArgs.isEmpty) Res.Success(()) - else { - val scriptArgString = - scriptArgs.flatMap{case (a, b) => Seq(a) ++ b}.map(literalize(_)) - .mkString(" ") - - Res.Failure("Script " + path.last + " does not take arguments: " + scriptArgString) - } - - // If there's one @main method, we run it with all args - case Seq(main) => runMainMethod(mainObj, main, scriptArgs) - - // If there are multiple @main methods, we use the first arg to decide - // which method to run, and pass the rest to that main method - case mainMethods => - val suffix = formatMainMethods(mainObj, mainMethods) - scriptArgs match{ - case Seq() => - Res.Failure( - s"Need to specify a subcommand to call when running " + path.last + suffix - ) - case Seq((head, Some(_)), tail @ _*) => - Res.Failure( - "To select a subcommand to run, you don't need --s." + Util.newLine + - s"Did you mean `${head.drop(2)}` instead of `$head`?" - ) - case Seq((head, None), tail @ _*) => - mainMethods.find(_.name == head) match{ - case None => - Res.Failure( - s"Unable to find subcommand: " + backtickWrap(head) + suffix - ) - case Some(main) => - runMainMethod(mainObj, main, tail) - } - } - } - } - } yield res - } - def formatMainMethods[T](base: T, mainMethods: Seq[Router.EntryPoint[T]]) = { - if (mainMethods.isEmpty) "" - else{ - val leftColWidth = getLeftColWidth(mainMethods.flatMap(_.argSignatures)) - - val methods = - for(main <- mainMethods) - yield formatMainMethodSignature(base, main, 2, leftColWidth) - - Util.normalizeNewlines( - s""" - | - |Available subcommands: - | - |${methods.mkString(Util.newLine)}""".stripMargin - ) - } - } - def getLeftColWidth[T](items: Seq[ArgSig[T, _]]) = { - items.map(_.name.length + 2) match{ - case Nil => 0 - case x => x.max - } - } - def formatMainMethodSignature[T](base: T, - main: Router.EntryPoint[T], - leftIndent: Int, - leftColWidth: Int) = { - // +2 for space on right of left col - val args = main.argSignatures.map(renderArg(base, _, leftColWidth + leftIndent + 2 + 2, 80)) - - val leftIndentStr = " " * leftIndent - val argStrings = - for((lhs, rhs) <- args) - yield { - val lhsPadded = lhs.padTo(leftColWidth, ' ') - val rhsPadded = rhs.linesIterator.mkString(Util.newLine) - s"$leftIndentStr $lhsPadded $rhsPadded" - } - val mainDocSuffix = main.doc match{ - case Some(d) => Util.newLine + leftIndentStr + softWrap(d, leftIndent, 80) - case None => "" - } - - s"""$leftIndentStr${main.name}$mainDocSuffix - |${argStrings.map(_ + Util.newLine).mkString}""".stripMargin - } - def runMainMethod[T](base: T, - mainMethod: Router.EntryPoint[T], - scriptArgs: Seq[(String, Option[String])]): Res[Any] = { - val leftColWidth = getLeftColWidth(mainMethod.argSignatures) - - def expectedMsg = formatMainMethodSignature(base: T, mainMethod, 0, leftColWidth) - - def pluralize(s: String, n: Int) = { - if (n == 1) s else s + "s" - } - - mainMethod.invoke(base, scriptArgs) match{ - case Router.Result.Success(x) => Res.Success(x) - case Router.Result.Error.Exception(x: AmmoniteExit) => Res.Success(x.value) - case Router.Result.Error.Exception(x) => Res.Exception(x, "") - case Router.Result.Error.MismatchedArguments(missing, unknown, duplicate, incomplete) => - val missingStr = - if (missing.isEmpty) "" - else { - val chunks = - for (x <- missing) - yield "--" + x.name + ": " + x.typeString - - val argumentsStr = pluralize("argument", chunks.length) - s"Missing $argumentsStr: (${chunks.mkString(", ")})" + Util.newLine - } - - - val unknownStr = - if (unknown.isEmpty) "" - else { - val argumentsStr = pluralize("argument", unknown.length) - s"Unknown $argumentsStr: " + unknown.map(literalize(_)).mkString(" ") + Util.newLine - } - - val duplicateStr = - if (duplicate.isEmpty) "" - else { - val lines = - for ((sig, options) <- duplicate) - yield { - s"Duplicate arguments for (--${sig.name}: ${sig.typeString}): " + - options.map(literalize(_)).mkString(" ") + Util.newLine - } - - lines.mkString - - } - val incompleteStr = incomplete match{ - case None => "" - case Some(sig) => - s"Option (--${sig.name}: ${sig.typeString}) is missing a corresponding value" + - Util.newLine - - } - - Res.Failure( - Util.normalizeNewlines( - s"""$missingStr$unknownStr$duplicateStr$incompleteStr - |Arguments provided did not match expected signature: - | - |$expectedMsg - |""".stripMargin - ) - ) - - case Router.Result.Error.InvalidArguments(x) => - val argumentsStr = pluralize("argument", x.length) - val thingies = x.map{ - case Router.Result.ParamError.Invalid(p, v, ex) => - val literalV = literalize(v) - val rendered = {renderArgShort(p)} - s"$rendered: ${p.typeString} = $literalV failed to parse with $ex" - case Router.Result.ParamError.DefaultFailed(p, ex) => - s"${renderArgShort(p)}'s default value failed to evaluate with $ex" - } - - Res.Failure( - Util.normalizeNewlines( - s"""The following $argumentsStr failed to parse: - | - |${thingies.mkString(Util.newLine)} - | - |expected signature: - | - |$expectedMsg - """.stripMargin - ) - ) - } - } - - def softWrap(s: String, leftOffset: Int, maxWidth: Int) = { - val oneLine = s.linesIterator.mkString(" ").split(' ') - - lazy val indent = " " * leftOffset - - val output = new StringBuilder(oneLine.head) - var currentLineWidth = oneLine.head.length - for(chunk <- oneLine.tail){ - val addedWidth = currentLineWidth + chunk.length + 1 - if (addedWidth > maxWidth){ - output.append(Util.newLine + indent) - output.append(chunk) - currentLineWidth = chunk.length - } else{ - currentLineWidth = addedWidth - output.append(' ') - output.append(chunk) - } - } - output.mkString - } - def renderArgShort[T](arg: ArgSig[T, _]) = "--" + backtickWrap(arg.name) - def renderArg[T](base: T, - arg: ArgSig[T, _], - leftOffset: Int, - wrappedWidth: Int): (String, String) = { - val suffix = arg.default match{ - case Some(f) => " (default " + f(base) + ")" - case None => "" - } - val docSuffix = arg.doc match{ - case Some(d) => ": " + d - case None => "" - } - val wrapped = softWrap( - arg.typeString + suffix + docSuffix, - leftOffset, - wrappedWidth - leftOffset - ) - (renderArgShort(arg), wrapped) - } - - - def mainMethodDetails[T](ep: EntryPoint[T]) = { - ep.argSignatures.collect{ - case ArgSig(name, tpe, Some(doc), default) => - Util.newLine + name + " // " + doc - }.mkString - } - - /** - * Additional [[scopt.Read]] instance to teach it how to read Ammonite paths - */ - implicit def pathScoptRead: scopt.Read[os.Path] = scopt.Read.stringRead.map(os.Path(_, os.pwd)) - -} diff --git a/main/core/src/mill/util/Watched.scala b/main/core/src/mill/util/Watched.scala deleted file mode 100644 index 29be53c3..00000000 --- a/main/core/src/mill/util/Watched.scala +++ /dev/null @@ -1,8 +0,0 @@ -package mill.util - -import mill.api.PathRef - -case class Watched[T](value: T, watched: Seq[PathRef]) -object Watched{ - implicit def readWrite[T: upickle.default.ReadWriter] = upickle.default.macroRW[Watched[T]] -} diff --git a/main/core/src/mill/util/package.scala b/main/core/src/mill/util/package.scala deleted file mode 100644 index ec5d2efc..00000000 --- a/main/core/src/mill/util/package.scala +++ /dev/null @@ -1,7 +0,0 @@ -package mill - -package object util { - // Backwards compat stubs - val Ctx = mill.api.Ctx - type Ctx = mill.api.Ctx -} diff --git a/main/core/src/util/AggWrapper.scala b/main/core/src/util/AggWrapper.scala new file mode 100644 index 00000000..6c107875 --- /dev/null +++ b/main/core/src/util/AggWrapper.scala @@ -0,0 +1,119 @@ +package mill.util + + + +import scala.collection.mutable +object Strict extends AggWrapper(true) +object Loose extends AggWrapper(false) +sealed class AggWrapper(strictUniqueness: Boolean){ + /** + * A collection with enforced uniqueness, fast contains and deterministic + * ordering. Raises an exception if a duplicate is found; call + * `toSeq.distinct` if you explicitly want to make it swallow duplicates + */ + trait Agg[V] extends TraversableOnce[V]{ + def contains(v: V): Boolean + def items: Iterator[V] + def indexed: IndexedSeq[V] + def flatMap[T](f: V => TraversableOnce[T]): Agg[T] + def map[T](f: V => T): Agg[T] + def filter(f: V => Boolean): Agg[V] + def withFilter(f: V => Boolean): Agg[V] + def collect[T](f: PartialFunction[V, T]): Agg[T] + def zipWithIndex: Agg[(V, Int)] + def reverse: Agg[V] + def zip[T](other: Agg[T]): Agg[(V, T)] + def ++[T >: V](other: TraversableOnce[T]): Agg[T] + def length: Int + } + + object Agg{ + def empty[V]: Agg[V] = new Agg.Mutable[V] + implicit def jsonFormat[T: upickle.default.ReadWriter]: upickle.default.ReadWriter[Agg[T]] = + upickle.default.readwriter[Seq[T]].bimap[Agg[T]]( + _.toList, + Agg.from(_) + ) + + def apply[V](items: V*) = from(items) + + implicit def from[V](items: TraversableOnce[V]): Agg[V] = { + val set = new Agg.Mutable[V]() + items.foreach(set.append) + set + } + + + class Mutable[V]() extends Agg[V]{ + + private[this] val set0 = mutable.LinkedHashSet.empty[V] + def contains(v: V) = set0.contains(v) + def append(v: V) = if (!contains(v)){ + set0.add(v) + + }else if (strictUniqueness){ + throw new Exception("Duplicated item inserted into OrderedSet: " + v) + } + def appendAll(vs: Seq[V]) = vs.foreach(append) + def items = set0.iterator + def indexed: IndexedSeq[V] = items.toIndexedSeq + def set: collection.Set[V] = set0 + + def map[T](f: V => T): Agg[T] = { + val output = new Agg.Mutable[T] + for(i <- items) output.append(f(i)) + output + } + def flatMap[T](f: V => TraversableOnce[T]): Agg[T] = { + val output = new Agg.Mutable[T] + for(i <- items) for(i0 <- f(i)) output.append(i0) + output + } + def filter(f: V => Boolean): Agg[V] = { + val output = new Agg.Mutable[V] + for(i <- items) if (f(i)) output.append(i) + output + } + def withFilter(f: V => Boolean): Agg[V] = filter(f) + + def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x)) + + def zipWithIndex = { + var i = 0 + this.map{ x => + i += 1 + (x, i-1) + } + } + + def reverse = Agg.from(indexed.reverseIterator) + + def zip[T](other: Agg[T]) = Agg.from(items.zip(other.items)) + def ++[T >: V](other: TraversableOnce[T]) = Agg.from(items ++ other) + def length: Int = set0.size + + // Members declared in scala.collection.GenTraversableOnce + def isTraversableAgain: Boolean = items.isTraversableAgain + def toIterator: Iterator[V] = items.toIterator + def toStream: Stream[V] = items.toStream + + // Members declared in scala.collection.TraversableOnce + def copyToArray[B >: V](xs: Array[B], start: Int,len: Int): Unit = items.copyToArray(xs, start, len) + def exists(p: V => Boolean): Boolean = items.exists(p) + def find(p: V => Boolean): Option[V] = items.find(p) + def forall(p: V => Boolean): Boolean = items.forall(p) + def foreach[U](f: V => U): Unit = items.foreach(f) + def hasDefiniteSize: Boolean = items.hasDefiniteSize + def isEmpty: Boolean = items.isEmpty + def seq: scala.collection.TraversableOnce[V] = items + def toTraversable: Traversable[V] = items.toTraversable + + override def hashCode() = items.map(_.hashCode()).sum + override def equals(other: Any) = other match{ + case s: Agg[_] => items.sameElements(s.items) + case _ => super.equals(other) + } + override def toString = items.mkString("Agg(", ", ", ")") + } + } +} diff --git a/main/core/src/util/EitherOps.scala b/main/core/src/util/EitherOps.scala new file mode 100644 index 00000000..da2552c8 --- /dev/null +++ b/main/core/src/util/EitherOps.scala @@ -0,0 +1,18 @@ +package mill.util + +import scala.collection.generic.CanBuildFrom +import scala.collection.mutable +import scala.language.higherKinds + +object EitherOps { + + // implementation similar to scala.concurrent.Future#sequence + def sequence[A, B, M[X] <: TraversableOnce[X]](in: M[Either[A, B]])( + implicit cbf: CanBuildFrom[M[Either[A, B]], B, M[B]]): Either[A, M[B]] = { + in.foldLeft[Either[A, mutable.Builder[B, M[B]]]](Right(cbf(in))) { + case (acc, el) => + for (a <- acc; e <- el) yield a += e + } + .map(_.result()) + } +} diff --git a/main/core/src/util/EnclosingClass.scala b/main/core/src/util/EnclosingClass.scala new file mode 100644 index 00000000..a69cc525 --- /dev/null +++ b/main/core/src/util/EnclosingClass.scala @@ -0,0 +1,15 @@ +package mill.util + +import sourcecode.Compat.Context +import language.experimental.macros +case class EnclosingClass(value: Class[_]) +object EnclosingClass{ + def apply()(implicit c: EnclosingClass) = c.value + implicit def generate: EnclosingClass = macro impl + def impl(c: Context): c.Tree = { + import c.universe._ + val cls = c.internal.enclosingOwner.owner.asType.asClass + // q"new _root_.mill.define.EnclosingClass(classOf[$cls])" + q"new _root_.mill.util.EnclosingClass(this.getClass)" + } +} diff --git a/main/core/src/util/JsonFormatters.scala b/main/core/src/util/JsonFormatters.scala new file mode 100644 index 00000000..830782c6 --- /dev/null +++ b/main/core/src/util/JsonFormatters.scala @@ -0,0 +1,10 @@ +package mill.util + +import upickle.default.{ReadWriter => RW} + +trait JsonFormatters extends mill.api.JsonFormatters{ + implicit lazy val modFormat: RW[coursier.Module] = upickle.default.macroRW + implicit lazy val depFormat: RW[coursier.Dependency]= upickle.default.macroRW + implicit lazy val attrFormat: RW[coursier.Attributes] = upickle.default.macroRW +} +object JsonFormatters extends JsonFormatters diff --git a/main/core/src/util/Loggers.scala b/main/core/src/util/Loggers.scala new file mode 100644 index 00000000..aab1a324 --- /dev/null +++ b/main/core/src/util/Loggers.scala @@ -0,0 +1,190 @@ +package mill.util + +import java.io._ +import mill.api.Logger + +object DummyLogger extends Logger { + def colored = false + + object errorStream extends PrintStream(_ => ()) + object outputStream extends PrintStream(_ => ()) + val inStream = new ByteArrayInputStream(Array()) + + def info(s: String) = () + def error(s: String) = () + def ticker(s: String) = () + def debug(s: String) = () +} + +class CallbackStream(wrapped: OutputStream, + setPrintState0: PrintState => Unit) extends OutputStream{ + def setPrintState(c: Char) = { + setPrintState0( + c match{ + case '\n' => PrintState.Newline + case '\r' => PrintState.Newline + case _ => PrintState.Middle + } + ) + } + override def write(b: Array[Byte]): Unit = { + if (b.nonEmpty) setPrintState(b(b.length-1).toChar) + wrapped.write(b) + } + + override def write(b: Array[Byte], off: Int, len: Int): Unit = { + if (len != 0) setPrintState(b(off+len-1).toChar) + wrapped.write(b, off, len) + } + + def write(b: Int) = { + setPrintState(b.toChar) + wrapped.write(b) + } +} +sealed trait PrintState +object PrintState{ + case object Ticker extends PrintState + case object Newline extends PrintState + case object Middle extends PrintState +} + +case class PrintLogger( + colored: Boolean, + disableTicker: Boolean, + colors: ammonite.util.Colors, + outStream: PrintStream, + infoStream: PrintStream, + errStream: PrintStream, + inStream: InputStream, + debugEnabled: Boolean + ) extends Logger { + + var printState: PrintState = PrintState.Newline + + override val errorStream = new PrintStream(new CallbackStream(errStream, printState = _)) + override val outputStream = new PrintStream(new CallbackStream(outStream, printState = _)) + + + def info(s: String) = { + printState = PrintState.Newline + infoStream.println(colors.info()(s)) + } + def error(s: String) = { + printState = PrintState.Newline + errStream.println(colors.error()(s)) + } + def ticker(s: String) = { + if(!disableTicker) { + printState match{ + case PrintState.Newline => + infoStream.println(colors.info()(s)) + case PrintState.Middle => + infoStream.println() + infoStream.println(colors.info()(s)) + case PrintState.Ticker => + val p = new PrintWriter(infoStream) + val nav = new ammonite.terminal.AnsiNav(p) + nav.up(1) + nav.clearLine(2) + nav.left(9999) + p.flush() + + infoStream.println(colors.info()(s)) + } + printState = PrintState.Ticker + } + } + + def debug(s: String) = if (debugEnabled) { + printState = PrintState.Newline + errStream.println(s) + } +} + +case class FileLogger(colored: Boolean, file: os.Path, debugEnabled: Boolean) extends Logger { + private[this] var outputStreamUsed: Boolean = false + + lazy val outputStream = { + if (!outputStreamUsed) os.remove.all(file) + outputStreamUsed = true + new PrintStream(new FileOutputStream(file.toIO.getAbsolutePath)) + } + + lazy val errorStream = { + if (!outputStreamUsed) os.remove.all(file) + outputStreamUsed = true + new PrintStream(new FileOutputStream(file.toIO.getAbsolutePath)) + } + + def info(s: String) = outputStream.println(s) + def error(s: String) = outputStream.println(s) + def ticker(s: String) = outputStream.println(s) + def debug(s: String) = if (debugEnabled) outputStream.println(s) + val inStream: InputStream = mill.api.DummyInputStream + override def close() = { + if (outputStreamUsed) + outputStream.close() + } +} + + + +class MultiStream(stream1: OutputStream, stream2: OutputStream) extends PrintStream(new OutputStream { + def write(b: Int): Unit = { + stream1.write(b) + stream2.write(b) + } + override def write(b: Array[Byte]): Unit = { + stream1.write(b) + stream2.write(b) + } + override def write(b: Array[Byte], off: Int, len: Int) = { + stream1.write(b, off, len) + stream2.write(b, off, len) + } + override def flush() = { + stream1.flush() + stream2.flush() + } + override def close() = { + stream1.close() + stream2.close() + } +}) + +case class MultiLogger(colored: Boolean, logger1: Logger, logger2: Logger) extends Logger { + + + lazy val outputStream: PrintStream = new MultiStream(logger1.outputStream, logger2.outputStream) + + lazy val errorStream: PrintStream = new MultiStream(logger1.errorStream, logger2.errorStream) + + lazy val inStream = Seq(logger1, logger2).collectFirst{case t: PrintLogger => t} match{ + case Some(x) => x.inStream + case None => new ByteArrayInputStream(Array()) + } + + def info(s: String) = { + logger1.info(s) + logger2.info(s) + } + def error(s: String) = { + logger1.error(s) + logger2.error(s) + } + def ticker(s: String) = { + logger1.ticker(s) + logger2.ticker(s) + } + + def debug(s: String) = { + logger1.debug(s) + logger2.debug(s) + } + + override def close() = { + logger1.close() + logger2.close() + } +} diff --git a/main/core/src/util/MultiBiMap.scala b/main/core/src/util/MultiBiMap.scala new file mode 100644 index 00000000..73bb42c4 --- /dev/null +++ b/main/core/src/util/MultiBiMap.scala @@ -0,0 +1,57 @@ +package mill.util + +import scala.collection.mutable +import Strict.Agg + +/** + * A map from keys to collections of values: you can assign multiple values + * to any particular key. Also allows lookups in both directions: what values + * are assigned to a key or what key a value is assigned to. + */ +trait MultiBiMap[K, V]{ + def containsValue(v: V): Boolean + def lookupKey(k: K): Agg[V] + def lookupValue(v: V): K + def lookupValueOpt(v: V): Option[K] + def add(k: K, v: V): Unit + def removeAll(k: K): Agg[V] + def addAll(k: K, vs: TraversableOnce[V]): Unit + def keys(): Iterator[K] + def items(): Iterator[(K, Agg[V])] + def values(): Iterator[Agg[V]] + def keyCount: Int +} + +object MultiBiMap{ + + class Mutable[K, V]() extends MultiBiMap[K, V]{ + private[this] val valueToKey = mutable.LinkedHashMap.empty[V, K] + private[this] val keyToValues = mutable.LinkedHashMap.empty[K, Agg.Mutable[V]] + def containsValue(v: V) = valueToKey.contains(v) + def lookupKey(k: K) = keyToValues(k) + def lookupKeyOpt(k: K) = keyToValues.get(k) + def lookupValue(v: V) = valueToKey(v) + def lookupValueOpt(v: V) = valueToKey.get(v) + def add(k: K, v: V): Unit = { + valueToKey(v) = k + keyToValues.getOrElseUpdate(k, new Agg.Mutable[V]()).append(v) + } + def removeAll(k: K): Agg[V] = keyToValues.get(k) match { + case None => Agg() + case Some(vs) => + vs.foreach(valueToKey.remove) + + keyToValues.remove(k) + vs + } + def addAll(k: K, vs: TraversableOnce[V]): Unit = vs.foreach(this.add(k, _)) + + def keys() = keyToValues.keysIterator + + def values() = keyToValues.valuesIterator + + def items() = keyToValues.iterator + + def keyCount = keyToValues.size + } +} diff --git a/main/core/src/util/ParseArgs.scala b/main/core/src/util/ParseArgs.scala new file mode 100644 index 00000000..fc1a8ab3 --- /dev/null +++ b/main/core/src/util/ParseArgs.scala @@ -0,0 +1,137 @@ +package mill.util + +import fastparse._, NoWhitespace._ +import mill.define.{Segment, Segments} + +object ParseArgs { + + def apply(scriptArgs: Seq[String], + multiSelect: Boolean): Either[String, (List[(Option[Segments], Segments)], Seq[String])] = { + val (selectors, args) = extractSelsAndArgs(scriptArgs, multiSelect) + for { + _ <- validateSelectors(selectors) + expandedSelectors <- EitherOps + .sequence(selectors.map(expandBraces)) + .map(_.flatten) + selectors <- EitherOps.sequence(expandedSelectors.map(extractSegments)) + } yield (selectors.toList, args) + } + + def extractSelsAndArgs(scriptArgs: Seq[String], + multiSelect: Boolean): (Seq[String], Seq[String]) = { + + if (multiSelect) { + val dd = scriptArgs.indexOf("--") + val selectors = if (dd == -1) scriptArgs else scriptArgs.take(dd) + val args = if (dd == -1) Seq.empty else scriptArgs.drop(dd + 1) + + (selectors, args) + } else { + (scriptArgs.take(1), scriptArgs.drop(1)) + } + } + + private def validateSelectors(selectors: Seq[String]): Either[String, Unit] = { + if (selectors.isEmpty || selectors.exists(_.isEmpty)) + Left("Selector cannot be empty") + else Right(()) + } + + def expandBraces(selectorString: String): Either[String, List[String]] = { + parseBraceExpansion(selectorString) match { + case f: Parsed.Failure => Left(s"Parsing exception ${f.msg}") + case Parsed.Success(expanded, _) => Right(expanded.toList) + } + } + + private sealed trait Fragment + private object Fragment { + case class Keep(value: String) extends Fragment + case class Expand(values: List[List[Fragment]]) extends Fragment + + def unfold(fragments: List[Fragment]): Seq[String] = { + fragments match { + case head :: rest => + val prefixes = head match { + case Keep(v) => Seq(v) + case Expand(Nil) => Seq("{}") + case Expand(List(vs)) => unfold(vs).map("{" + _ + "}") + case Expand(vss) => vss.flatMap(unfold) + } + for { + prefix <- prefixes + suffix <- unfold(rest) + } yield prefix + suffix + + case Nil => Seq("") + } + } + } + + private object BraceExpansionParser { + def plainChars[_: P] = + P(CharsWhile(c => c != ',' && c != '{' && c != '}')).!.map(Fragment.Keep) + + def toExpand[_: P]: P[Fragment] = + P("{" ~ braceParser.rep(1).rep(sep = ",") ~ "}").map( + x => Fragment.Expand(x.toList.map(_.toList)) + ) + + def braceParser[_: P] = P(toExpand | plainChars) + + def parser[_: P] = P(braceParser.rep(1).rep(sep = ",") ~ End).map { vss => + def unfold(vss: List[Seq[String]]): Seq[String] = { + vss match { + case Nil => Seq("") + case head :: rest => + for { + str <- head + r <- unfold(rest) + } yield + r match { + case "" => str + case _ => str + "," + r + } + } + } + + val stringss = vss.map(x => Fragment.unfold(x.toList)).toList + unfold(stringss) + } + } + + private def parseBraceExpansion(input: String) = { + + + parse( + input, + BraceExpansionParser.parser(_) + ) + } + + def extractSegments(selectorString: String): Either[String, (Option[Segments], Segments)] = + parseSelector(selectorString) match { + case f: Parsed.Failure => Left(s"Parsing exception ${f.msg}") + case Parsed.Success(selector, _) => Right(selector) + } + + private def ident[_: P] = P( CharsWhileIn("a-zA-Z0-9_\\-") ).! + + def standaloneIdent[_: P] = P(Start ~ ident ~ End ) + def isLegalIdentifier(identifier: String): Boolean = + parse(identifier, standaloneIdent(_)).isInstanceOf[Parsed.Success[_]] + + private def parseSelector(input: String) = { + def ident2[_: P] = P( CharsWhileIn("a-zA-Z0-9_\\-.") ).! + def segment[_: P] = P( ident ).map( Segment.Label) + def crossSegment[_: P] = P("[" ~ ident2.rep(1, sep = ",") ~ "]").map(Segment.Cross) + def simpleQuery[_: P] = P(segment ~ ("." ~ segment | crossSegment).rep).map { + case (h, rest) => Segments(h :: rest.toList:_*) + } + def query[_: P] = P( simpleQuery ~ ("/" ~/ simpleQuery).?).map{ + case (q, None) => (None, q) + case (q, Some(q2)) => (Some(q), q2) + } + parse(input, query(_)) + } +} diff --git a/main/core/src/util/Router.scala b/main/core/src/util/Router.scala new file mode 100644 index 00000000..5dd3c947 --- /dev/null +++ b/main/core/src/util/Router.scala @@ -0,0 +1,451 @@ +package mill.util + +import ammonite.main.Compat +import language.experimental.macros + +import scala.annotation.StaticAnnotation +import scala.collection.mutable +import scala.reflect.macros.blackbox.Context + +/** + * More or less a minimal version of Autowire's Server that lets you generate + * a set of "routes" from the methods defined in an object, and call them + * using passing in name/args/kwargs via Java reflection, without having to + * generate/compile code or use Scala reflection. This saves us spinning up + * the Scala compiler and greatly reduces the startup time of cached scripts. + */ +object Router{ + /** + * Allows you to query how many things are overriden by the enclosing owner. + */ + case class Overrides(value: Int) + object Overrides{ + def apply()(implicit c: Overrides) = c.value + implicit def generate: Overrides = macro impl + def impl(c: Context): c.Tree = { + import c.universe._ + q"new _root_.mill.util.Router.Overrides(${c.internal.enclosingOwner.overrides.length})" + } + } + + class doc(s: String) extends StaticAnnotation + class main extends StaticAnnotation + def generateRoutes[T]: Seq[Router.EntryPoint[T]] = macro generateRoutesImpl[T] + def generateRoutesImpl[T: c.WeakTypeTag](c: Context): c.Expr[Seq[EntryPoint[T]]] = { + import c.universe._ + val r = new Router(c) + val allRoutes = r.getAllRoutesForClass( + weakTypeOf[T].asInstanceOf[r.c.Type] + ).asInstanceOf[Iterable[c.Tree]] + + c.Expr[Seq[EntryPoint[T]]](q"_root_.scala.Seq(..$allRoutes)") + } + + /** + * Models what is known by the router about a single argument: that it has + * a [[name]], a human-readable [[typeString]] describing what the type is + * (just for logging and reading, not a replacement for a `TypeTag`) and + * possible a function that can compute its default value + */ + case class ArgSig[T, V](name: String, + typeString: String, + doc: Option[String], + default: Option[T => V]) + (implicit val reads: scopt.Read[V]) + + def stripDashes(s: String) = { + if (s.startsWith("--")) s.drop(2) + else if (s.startsWith("-")) s.drop(1) + else s + } + /** + * What is known about a single endpoint for our routes. It has a [[name]], + * [[argSignatures]] for each argument, and a macro-generated [[invoke0]] + * that performs all the necessary argument parsing and de-serialization. + * + * Realistically, you will probably spend most of your time calling [[invoke]] + * instead, which provides a nicer API to call it that mimmicks the API of + * calling a Scala method. + */ + case class EntryPoint[T](name: String, + argSignatures: Seq[ArgSig[T, _]], + doc: Option[String], + varargs: Boolean, + invoke0: (T, Map[String, String], Seq[String], Seq[ArgSig[T, _]]) => Result[Any], + overrides: Int){ + def invoke(target: T, groupedArgs: Seq[(String, Option[String])]): Result[Any] = { + var remainingArgSignatures = argSignatures.toList.filter(_.reads.arity > 0) + + val accumulatedKeywords = mutable.Map.empty[ArgSig[T, _], mutable.Buffer[String]] + val keywordableArgs = if (varargs) argSignatures.dropRight(1) else argSignatures + + for(arg <- keywordableArgs) accumulatedKeywords(arg) = mutable.Buffer.empty + + val leftoverArgs = mutable.Buffer.empty[String] + + val lookupArgSig = Map(argSignatures.map(x => (x.name, x)):_*) + + var incomplete: Option[ArgSig[T, _]] = None + + for(group <- groupedArgs){ + + group match{ + case (value, None) => + if (value(0) == '-' && !varargs){ + lookupArgSig.get(stripDashes(value)) match{ + case None => leftoverArgs.append(value) + case Some(sig) => incomplete = Some(sig) + } + + } else remainingArgSignatures match { + case Nil => leftoverArgs.append(value) + case last :: Nil if varargs => leftoverArgs.append(value) + case next :: rest => + accumulatedKeywords(next).append(value) + remainingArgSignatures = rest + } + case (rawKey, Some(value)) => + val key = stripDashes(rawKey) + lookupArgSig.get(key) match{ + case Some(x) if accumulatedKeywords.contains(x) => + if (accumulatedKeywords(x).nonEmpty && varargs){ + leftoverArgs.append(rawKey, value) + }else{ + accumulatedKeywords(x).append(value) + remainingArgSignatures = remainingArgSignatures.filter(_.name != key) + } + case _ => + leftoverArgs.append(rawKey, value) + } + } + } + + val missing0 = remainingArgSignatures + .filter(_.default.isEmpty) + + val missing = if(varargs) { + missing0.filter(_ != argSignatures.last) + } else { + missing0.filter(x => incomplete != Some(x)) + } + val duplicates = accumulatedKeywords.toSeq.filter(_._2.length > 1) + + if ( + incomplete.nonEmpty || + missing.nonEmpty || + duplicates.nonEmpty || + (leftoverArgs.nonEmpty && !varargs) + ){ + Result.Error.MismatchedArguments( + missing = missing, + unknown = leftoverArgs, + duplicate = duplicates, + incomplete = incomplete + + ) + } else { + val mapping = accumulatedKeywords + .iterator + .collect{case (k, Seq(single)) => (k.name, single)} + .toMap + + try invoke0(target, mapping, leftoverArgs, argSignatures) + catch{case e: Throwable => + Result.Error.Exception(e) + } + } + } + } + + def tryEither[T](t: => T, error: Throwable => Result.ParamError) = { + try Right(t) + catch{ case e: Throwable => Left(error(e))} + } + def readVarargs(arg: ArgSig[_, _], + values: Seq[String], + thunk: String => Any) = { + val attempts = + for(item <- values) + yield tryEither(thunk(item), Result.ParamError.Invalid(arg, item, _)) + + + val bad = attempts.collect{ case Left(x) => x} + if (bad.nonEmpty) Left(bad) + else Right(attempts.collect{case Right(x) => x}) + } + def read(dict: Map[String, String], + default: => Option[Any], + arg: ArgSig[_, _], + thunk: String => Any): FailMaybe = { + arg.reads.arity match{ + case 0 => + tryEither(thunk(null), Result.ParamError.DefaultFailed(arg, _)).left.map(Seq(_)) + case 1 => + dict.get(arg.name) match{ + case None => + tryEither(default.get, Result.ParamError.DefaultFailed(arg, _)).left.map(Seq(_)) + + case Some(x) => + tryEither(thunk(x), Result.ParamError.Invalid(arg, x, _)).left.map(Seq(_)) + } + } + + } + + /** + * Represents what comes out of an attempt to invoke an [[EntryPoint]]. + * Could succeed with a value, but could fail in many different ways. + */ + sealed trait Result[+T] + object Result{ + + /** + * Invoking the [[EntryPoint]] was totally successful, and returned a + * result + */ + case class Success[T](value: T) extends Result[T] + + /** + * Invoking the [[EntryPoint]] was not successful + */ + sealed trait Error extends Result[Nothing] + object Error{ + + /** + * Invoking the [[EntryPoint]] failed with an exception while executing + * code within it. + */ + case class Exception(t: Throwable) extends Error + + /** + * Invoking the [[EntryPoint]] failed because the arguments provided + * did not line up with the arguments expected + */ + case class MismatchedArguments(missing: Seq[ArgSig[_, _]], + unknown: Seq[String], + duplicate: Seq[(ArgSig[_, _], Seq[String])], + incomplete: Option[ArgSig[_, _]]) extends Error + /** + * Invoking the [[EntryPoint]] failed because there were problems + * deserializing/parsing individual arguments + */ + case class InvalidArguments(values: Seq[ParamError]) extends Error + } + + sealed trait ParamError + object ParamError{ + /** + * Something went wrong trying to de-serialize the input parameter; + * the thrown exception is stored in [[ex]] + */ + case class Invalid(arg: ArgSig[_, _], value: String, ex: Throwable) extends ParamError + /** + * Something went wrong trying to evaluate the default value + * for this input parameter + */ + case class DefaultFailed(arg: ArgSig[_, _], ex: Throwable) extends ParamError + } + } + + + type FailMaybe = Either[Seq[Result.ParamError], Any] + type FailAll = Either[Seq[Result.ParamError], Seq[Any]] + + def validate(args: Seq[FailMaybe]): Result[Seq[Any]] = { + val lefts = args.collect{case Left(x) => x}.flatten + + if (lefts.nonEmpty) Result.Error.InvalidArguments(lefts) + else { + val rights = args.collect{case Right(x) => x} + Result.Success(rights) + } + } + + def makeReadCall(dict: Map[String, String], + default: => Option[Any], + arg: ArgSig[_, _]) = { + read(dict, default, arg, arg.reads.reads(_)) + } + def makeReadVarargsCall(arg: ArgSig[_, _], values: Seq[String]) = { + readVarargs(arg, values, arg.reads.reads(_)) + } +} + + +class Router [C <: Context](val c: C) { + import c.universe._ + def getValsOrMeths(curCls: Type): Iterable[MethodSymbol] = { + def isAMemberOfAnyRef(member: Symbol) = { + // AnyRef is an alias symbol, we go to the real "owner" of these methods + val anyRefSym = c.mirror.universe.definitions.ObjectClass + member.owner == anyRefSym + } + val extractableMembers = for { + member <- curCls.members.toList.reverse + if !isAMemberOfAnyRef(member) + if !member.isSynthetic + if member.isPublic + if member.isTerm + memTerm = member.asTerm + if memTerm.isMethod + if !memTerm.isModule + } yield memTerm.asMethod + + extractableMembers flatMap { case memTerm => + if (memTerm.isSetter || memTerm.isConstructor || memTerm.isGetter) Nil + else Seq(memTerm) + + } + } + + + + def extractMethod(meth: MethodSymbol, curCls: c.universe.Type): c.universe.Tree = { + val baseArgSym = TermName(c.freshName()) + val flattenedArgLists = meth.paramss.flatten + def hasDefault(i: Int) = { + val defaultName = s"${meth.name}$$default$$${i + 1}" + if (curCls.members.exists(_.name.toString == defaultName)) Some(defaultName) + else None + } + val argListSymbol = q"${c.fresh[TermName]("argsList")}" + val extrasSymbol = q"${c.fresh[TermName]("extras")}" + val defaults = for ((arg, i) <- flattenedArgLists.zipWithIndex) yield { + val arg = TermName(c.freshName()) + hasDefault(i).map(defaultName => q"($arg: $curCls) => $arg.${newTermName(defaultName)}") + } + + def getDocAnnotation(annotations: List[Annotation]) = { + val (docTrees, remaining) = annotations.partition(_.tpe =:= typeOf[Router.doc]) + val docValues = for { + doc <- docTrees + if doc.scalaArgs.head.isInstanceOf[Literal] + l = doc.scalaArgs.head.asInstanceOf[Literal] + if l.value.value.isInstanceOf[String] + } yield l.value.value.asInstanceOf[String] + (remaining, docValues.headOption) + } + + def unwrapVarargType(arg: Symbol) = { + val vararg = arg.typeSignature.typeSymbol == definitions.RepeatedParamClass + val unwrappedType = + if (!vararg) arg.typeSignature + else arg.typeSignature.asInstanceOf[TypeRef].args(0) + + (vararg, unwrappedType) + } + + val argSigSymbol = q"${c.fresh[TermName]("argSigs")}" + + val (_, methodDoc) = getDocAnnotation(meth.annotations) + val readArgSigs = for( + ((arg, defaultOpt), i) <- flattenedArgLists.zip(defaults).zipWithIndex + ) yield { + + val (vararg, varargUnwrappedType) = unwrapVarargType(arg) + + val default = + if (vararg) q"scala.Some(scala.Nil)" + else defaultOpt match { + case Some(defaultExpr) => q"scala.Some($defaultExpr($baseArgSym))" + case None => q"scala.None" + } + + val (docUnwrappedType, docOpt) = varargUnwrappedType match{ + case t: AnnotatedType => + + val (remaining, docValue) = getDocAnnotation(t.annotations) + if (remaining.isEmpty) (t.underlying, docValue) + else (Compat.copyAnnotatedType(c)(t, remaining), docValue) + + case t => (t, None) + } + + val docTree = docOpt match{ + case None => q"scala.None" + case Some(s) => q"scala.Some($s)" + } + + + val argSig = q""" + mill.util.Router.ArgSig[$curCls, $docUnwrappedType]( + ${arg.name.toString}, + ${docUnwrappedType.toString + (if(vararg) "*" else "")}, + $docTree, + $defaultOpt + ) + """ + + val reader = + if(vararg) q""" + mill.util.Router.makeReadVarargsCall( + $argSigSymbol($i), + $extrasSymbol + ) + """ else q""" + mill.util.Router.makeReadCall( + $argListSymbol, + $default, + $argSigSymbol($i) + ) + """ + c.internal.setPos(reader, meth.pos) + (reader, argSig, vararg) + } + + val readArgs = readArgSigs.map(_._1) + val argSigs = readArgSigs.map(_._2) + val varargs = readArgSigs.map(_._3) + val (argNames, argNameCasts) = flattenedArgLists.map { arg => + val (vararg, unwrappedType) = unwrapVarargType(arg) + ( + pq"${arg.name.toTermName}", + if (!vararg) q"${arg.name.toTermName}.asInstanceOf[$unwrappedType]" + else q"${arg.name.toTermName}.asInstanceOf[Seq[$unwrappedType]]: _*" + + ) + }.unzip + + + val res = q""" + mill.util.Router.EntryPoint[$curCls]( + ${meth.name.toString}, + scala.Seq(..$argSigs), + ${methodDoc match{ + case None => q"scala.None" + case Some(s) => q"scala.Some($s)" + }}, + ${varargs.contains(true)}, + ( + $baseArgSym: $curCls, + $argListSymbol: Map[String, String], + $extrasSymbol: Seq[String], + $argSigSymbol: Seq[mill.util.Router.ArgSig[$curCls, _]] + ) => + mill.util.Router.validate(Seq(..$readArgs)) match{ + case mill.util.Router.Result.Success(List(..$argNames)) => + mill.util.Router.Result.Success( + $baseArgSym.${meth.name.toTermName}(..$argNameCasts) + ) + case x: mill.util.Router.Result.Error => x + }, + ammonite.main.Router.Overrides() + ) + """ + res + } + + def hasMainAnnotation(t: MethodSymbol) = { + t.annotations.exists(_.tpe =:= typeOf[Router.main]) + } + def getAllRoutesForClass(curCls: Type, + pred: MethodSymbol => Boolean = hasMainAnnotation) + : Iterable[c.universe.Tree] = { + for{ + t <- getValsOrMeths(curCls) + if pred(t) + } yield { + extractMethod(t, curCls) + } + } +} diff --git a/main/core/src/util/Scripts.scala b/main/core/src/util/Scripts.scala new file mode 100644 index 00000000..65eb6b2b --- /dev/null +++ b/main/core/src/util/Scripts.scala @@ -0,0 +1,330 @@ +package mill.util + +import java.nio.file.NoSuchFileException + + +import ammonite.runtime.Evaluator.AmmoniteExit +import ammonite.util.Name.backtickWrap +import ammonite.util.Util.CodeSource +import ammonite.util.{Name, Res, Util} +import fastparse.internal.Util.literalize +import mill.util.Router.{ArgSig, EntryPoint} + +/** + * Logic around using Ammonite as a script-runner; invoking scripts via the + * macro-generated [[Router]], and pretty-printing any output or error messages + */ +object Scripts { + def groupArgs(flatArgs: List[String]): Seq[(String, Option[String])] = { + var keywordTokens = flatArgs + var scriptArgs = Vector.empty[(String, Option[String])] + + while(keywordTokens.nonEmpty) keywordTokens match{ + case List(head, next, rest@_*) if head.startsWith("-") => + scriptArgs = scriptArgs :+ (head, Some(next)) + keywordTokens = rest.toList + case List(head, rest@_*) => + scriptArgs = scriptArgs :+ (head, None) + keywordTokens = rest.toList + + } + scriptArgs + } + + def runScript(wd: os.Path, + path: os.Path, + interp: ammonite.interp.Interpreter, + scriptArgs: Seq[(String, Option[String])] = Nil) = { + interp.watch(path) + val (pkg, wrapper) = Util.pathToPackageWrapper(Seq(), path relativeTo wd) + + for{ + scriptTxt <- try Res.Success(Util.normalizeNewlines(os.read(path))) catch{ + case e: NoSuchFileException => Res.Failure("Script file not found: " + path) + } + + processed <- interp.processModule( + scriptTxt, + CodeSource(wrapper, pkg, Seq(Name("ammonite"), Name("$file")), Some(path)), + autoImport = true, + // Not sure why we need to wrap this in a separate `$routes` object, + // but if we don't do it for some reason the `generateRoutes` macro + // does not see the annotations on the methods of the outer-wrapper. + // It can inspect the type and its methods fine, it's just the + // `methodsymbol.annotations` ends up being empty. + extraCode = Util.normalizeNewlines( + s""" + |val $$routesOuter = this + |object $$routes + |extends scala.Function0[scala.Seq[ammonite.main.Router.EntryPoint[$$routesOuter.type]]]{ + | def apply() = ammonite.main.Router.generateRoutes[$$routesOuter.type] + |} + """.stripMargin + ), + hardcoded = true + ) + + routeClsName <- processed.blockInfo.lastOption match{ + case Some(meta) => Res.Success(meta.id.wrapperPath) + case None => Res.Skip + } + + mainCls = + interp + .evalClassloader + .loadClass(processed.blockInfo.last.id.wrapperPath + "$") + + routesCls = + interp + .evalClassloader + .loadClass(routeClsName + "$$routes$") + + scriptMains = + routesCls + .getField("MODULE$") + .get(null) + .asInstanceOf[() => Seq[Router.EntryPoint[Any]]] + .apply() + + + mainObj = mainCls.getField("MODULE$").get(null) + + res <- Util.withContextClassloader(interp.evalClassloader){ + scriptMains match { + // If there are no @main methods, there's nothing to do + case Seq() => + if (scriptArgs.isEmpty) Res.Success(()) + else { + val scriptArgString = + scriptArgs.flatMap{case (a, b) => Seq(a) ++ b}.map(literalize(_)) + .mkString(" ") + + Res.Failure("Script " + path.last + " does not take arguments: " + scriptArgString) + } + + // If there's one @main method, we run it with all args + case Seq(main) => runMainMethod(mainObj, main, scriptArgs) + + // If there are multiple @main methods, we use the first arg to decide + // which method to run, and pass the rest to that main method + case mainMethods => + val suffix = formatMainMethods(mainObj, mainMethods) + scriptArgs match{ + case Seq() => + Res.Failure( + s"Need to specify a subcommand to call when running " + path.last + suffix + ) + case Seq((head, Some(_)), tail @ _*) => + Res.Failure( + "To select a subcommand to run, you don't need --s." + Util.newLine + + s"Did you mean `${head.drop(2)}` instead of `$head`?" + ) + case Seq((head, None), tail @ _*) => + mainMethods.find(_.name == head) match{ + case None => + Res.Failure( + s"Unable to find subcommand: " + backtickWrap(head) + suffix + ) + case Some(main) => + runMainMethod(mainObj, main, tail) + } + } + } + } + } yield res + } + def formatMainMethods[T](base: T, mainMethods: Seq[Router.EntryPoint[T]]) = { + if (mainMethods.isEmpty) "" + else{ + val leftColWidth = getLeftColWidth(mainMethods.flatMap(_.argSignatures)) + + val methods = + for(main <- mainMethods) + yield formatMainMethodSignature(base, main, 2, leftColWidth) + + Util.normalizeNewlines( + s""" + | + |Available subcommands: + | + |${methods.mkString(Util.newLine)}""".stripMargin + ) + } + } + def getLeftColWidth[T](items: Seq[ArgSig[T, _]]) = { + items.map(_.name.length + 2) match{ + case Nil => 0 + case x => x.max + } + } + def formatMainMethodSignature[T](base: T, + main: Router.EntryPoint[T], + leftIndent: Int, + leftColWidth: Int) = { + // +2 for space on right of left col + val args = main.argSignatures.map(renderArg(base, _, leftColWidth + leftIndent + 2 + 2, 80)) + + val leftIndentStr = " " * leftIndent + val argStrings = + for((lhs, rhs) <- args) + yield { + val lhsPadded = lhs.padTo(leftColWidth, ' ') + val rhsPadded = rhs.linesIterator.mkString(Util.newLine) + s"$leftIndentStr $lhsPadded $rhsPadded" + } + val mainDocSuffix = main.doc match{ + case Some(d) => Util.newLine + leftIndentStr + softWrap(d, leftIndent, 80) + case None => "" + } + + s"""$leftIndentStr${main.name}$mainDocSuffix + |${argStrings.map(_ + Util.newLine).mkString}""".stripMargin + } + def runMainMethod[T](base: T, + mainMethod: Router.EntryPoint[T], + scriptArgs: Seq[(String, Option[String])]): Res[Any] = { + val leftColWidth = getLeftColWidth(mainMethod.argSignatures) + + def expectedMsg = formatMainMethodSignature(base: T, mainMethod, 0, leftColWidth) + + def pluralize(s: String, n: Int) = { + if (n == 1) s else s + "s" + } + + mainMethod.invoke(base, scriptArgs) match{ + case Router.Result.Success(x) => Res.Success(x) + case Router.Result.Error.Exception(x: AmmoniteExit) => Res.Success(x.value) + case Router.Result.Error.Exception(x) => Res.Exception(x, "") + case Router.Result.Error.MismatchedArguments(missing, unknown, duplicate, incomplete) => + val missingStr = + if (missing.isEmpty) "" + else { + val chunks = + for (x <- missing) + yield "--" + x.name + ": " + x.typeString + + val argumentsStr = pluralize("argument", chunks.length) + s"Missing $argumentsStr: (${chunks.mkString(", ")})" + Util.newLine + } + + + val unknownStr = + if (unknown.isEmpty) "" + else { + val argumentsStr = pluralize("argument", unknown.length) + s"Unknown $argumentsStr: " + unknown.map(literalize(_)).mkString(" ") + Util.newLine + } + + val duplicateStr = + if (duplicate.isEmpty) "" + else { + val lines = + for ((sig, options) <- duplicate) + yield { + s"Duplicate arguments for (--${sig.name}: ${sig.typeString}): " + + options.map(literalize(_)).mkString(" ") + Util.newLine + } + + lines.mkString + + } + val incompleteStr = incomplete match{ + case None => "" + case Some(sig) => + s"Option (--${sig.name}: ${sig.typeString}) is missing a corresponding value" + + Util.newLine + + } + + Res.Failure( + Util.normalizeNewlines( + s"""$missingStr$unknownStr$duplicateStr$incompleteStr + |Arguments provided did not match expected signature: + | + |$expectedMsg + |""".stripMargin + ) + ) + + case Router.Result.Error.InvalidArguments(x) => + val argumentsStr = pluralize("argument", x.length) + val thingies = x.map{ + case Router.Result.ParamError.Invalid(p, v, ex) => + val literalV = literalize(v) + val rendered = {renderArgShort(p)} + s"$rendered: ${p.typeString} = $literalV failed to parse with $ex" + case Router.Result.ParamError.DefaultFailed(p, ex) => + s"${renderArgShort(p)}'s default value failed to evaluate with $ex" + } + + Res.Failure( + Util.normalizeNewlines( + s"""The following $argumentsStr failed to parse: + | + |${thingies.mkString(Util.newLine)} + | + |expected signature: + | + |$expectedMsg + """.stripMargin + ) + ) + } + } + + def softWrap(s: String, leftOffset: Int, maxWidth: Int) = { + val oneLine = s.linesIterator.mkString(" ").split(' ') + + lazy val indent = " " * leftOffset + + val output = new StringBuilder(oneLine.head) + var currentLineWidth = oneLine.head.length + for(chunk <- oneLine.tail){ + val addedWidth = currentLineWidth + chunk.length + 1 + if (addedWidth > maxWidth){ + output.append(Util.newLine + indent) + output.append(chunk) + currentLineWidth = chunk.length + } else{ + currentLineWidth = addedWidth + output.append(' ') + output.append(chunk) + } + } + output.mkString + } + def renderArgShort[T](arg: ArgSig[T, _]) = "--" + backtickWrap(arg.name) + def renderArg[T](base: T, + arg: ArgSig[T, _], + leftOffset: Int, + wrappedWidth: Int): (String, String) = { + val suffix = arg.default match{ + case Some(f) => " (default " + f(base) + ")" + case None => "" + } + val docSuffix = arg.doc match{ + case Some(d) => ": " + d + case None => "" + } + val wrapped = softWrap( + arg.typeString + suffix + docSuffix, + leftOffset, + wrappedWidth - leftOffset + ) + (renderArgShort(arg), wrapped) + } + + + def mainMethodDetails[T](ep: EntryPoint[T]) = { + ep.argSignatures.collect{ + case ArgSig(name, tpe, Some(doc), default) => + Util.newLine + name + " // " + doc + }.mkString + } + + /** + * Additional [[scopt.Read]] instance to teach it how to read Ammonite paths + */ + implicit def pathScoptRead: scopt.Read[os.Path] = scopt.Read.stringRead.map(os.Path(_, os.pwd)) + +} diff --git a/main/core/src/util/Watched.scala b/main/core/src/util/Watched.scala new file mode 100644 index 00000000..29be53c3 --- /dev/null +++ b/main/core/src/util/Watched.scala @@ -0,0 +1,8 @@ +package mill.util + +import mill.api.PathRef + +case class Watched[T](value: T, watched: Seq[PathRef]) +object Watched{ + implicit def readWrite[T: upickle.default.ReadWriter] = upickle.default.macroRW[Watched[T]] +} diff --git a/main/core/src/util/package.scala b/main/core/src/util/package.scala new file mode 100644 index 00000000..ec5d2efc --- /dev/null +++ b/main/core/src/util/package.scala @@ -0,0 +1,7 @@ +package mill + +package object util { + // Backwards compat stubs + val Ctx = mill.api.Ctx + type Ctx = mill.api.Ctx +} diff --git a/main/graphviz/src/GraphvizTools.scala b/main/graphviz/src/GraphvizTools.scala new file mode 100644 index 00000000..9812c81f --- /dev/null +++ b/main/graphviz/src/GraphvizTools.scala @@ -0,0 +1,71 @@ +package mill.main.graphviz +import guru.nidi.graphviz.attribute.Style +import mill.define.{Graph, NamedTask} +import org.jgrapht.graph.{DefaultEdge, SimpleDirectedGraph} +object GraphvizTools{ + def apply(targets: Seq[NamedTask[Any]], rs: Seq[NamedTask[Any]], dest: os.Path) = { + val transitive = Graph.transitiveTargets(rs.distinct) + val topoSorted = Graph.topoSorted(transitive) + val goalSet = rs.toSet + val sortedGroups = Graph.groupAroundImportantTargets(topoSorted){ + case x: NamedTask[Any] if goalSet.contains(x) => x + } + import guru.nidi.graphviz.engine.{Format, Graphviz} + import guru.nidi.graphviz.model.Factory._ + + val edgesIterator = + for((k, vs) <- sortedGroups.items()) + yield ( + k, + for { + v <- vs.items + dest <- v.inputs.collect { case v: NamedTask[Any] => v } + if goalSet.contains(dest) + } yield dest + ) + + val edges = edgesIterator.map{case (k, v) => (k, v.toArray.distinct)}.toArray + + val indexToTask = edges.flatMap{case (k, vs) => Iterator(k) ++ vs}.distinct + val taskToIndex = indexToTask.zipWithIndex.toMap + + val jgraph = new SimpleDirectedGraph[Int, DefaultEdge](classOf[DefaultEdge]) + + for(i <- indexToTask.indices) jgraph.addVertex(i) + for((src, dests) <- edges; dest <- dests) { + jgraph.addEdge(taskToIndex(src), taskToIndex(dest)) + } + + + org.jgrapht.alg.TransitiveReduction.INSTANCE.reduce(jgraph) + val nodes = indexToTask.map(t => + node(t.ctx.segments.render).`with`{ + if(targets.contains(t)) Style.SOLID + else Style.DOTTED + } + ) + + var g = graph("example1").directed + for(i <- indexToTask.indices){ + val outgoing = for{ + e <- edges(i)._2 + j = taskToIndex(e) + if jgraph.containsEdge(i, j) + } yield nodes(j) + g = g.`with`(nodes(i).link(outgoing:_*)) + } + + val gv = Graphviz.fromGraph(g).totalMemory(100 * 1000 * 1000) + val outputs = Seq( + Format.PLAIN -> "out.txt", + Format.XDOT -> "out.dot", + Format.JSON -> "out.json", + Format.PNG -> "out.png", + Format.SVG -> "out.svg" + ) + for((fmt, name) <- outputs) { + gv.render(fmt).toFile((dest / name).toIO) + } + outputs.map(x => mill.PathRef(dest / x._2)) + } +} \ No newline at end of file diff --git a/main/graphviz/src/mill/main/graphviz/GraphvizTools.scala b/main/graphviz/src/mill/main/graphviz/GraphvizTools.scala deleted file mode 100644 index 9812c81f..00000000 --- a/main/graphviz/src/mill/main/graphviz/GraphvizTools.scala +++ /dev/null @@ -1,71 +0,0 @@ -package mill.main.graphviz -import guru.nidi.graphviz.attribute.Style -import mill.define.{Graph, NamedTask} -import org.jgrapht.graph.{DefaultEdge, SimpleDirectedGraph} -object GraphvizTools{ - def apply(targets: Seq[NamedTask[Any]], rs: Seq[NamedTask[Any]], dest: os.Path) = { - val transitive = Graph.transitiveTargets(rs.distinct) - val topoSorted = Graph.topoSorted(transitive) - val goalSet = rs.toSet - val sortedGroups = Graph.groupAroundImportantTargets(topoSorted){ - case x: NamedTask[Any] if goalSet.contains(x) => x - } - import guru.nidi.graphviz.engine.{Format, Graphviz} - import guru.nidi.graphviz.model.Factory._ - - val edgesIterator = - for((k, vs) <- sortedGroups.items()) - yield ( - k, - for { - v <- vs.items - dest <- v.inputs.collect { case v: NamedTask[Any] => v } - if goalSet.contains(dest) - } yield dest - ) - - val edges = edgesIterator.map{case (k, v) => (k, v.toArray.distinct)}.toArray - - val indexToTask = edges.flatMap{case (k, vs) => Iterator(k) ++ vs}.distinct - val taskToIndex = indexToTask.zipWithIndex.toMap - - val jgraph = new SimpleDirectedGraph[Int, DefaultEdge](classOf[DefaultEdge]) - - for(i <- indexToTask.indices) jgraph.addVertex(i) - for((src, dests) <- edges; dest <- dests) { - jgraph.addEdge(taskToIndex(src), taskToIndex(dest)) - } - - - org.jgrapht.alg.TransitiveReduction.INSTANCE.reduce(jgraph) - val nodes = indexToTask.map(t => - node(t.ctx.segments.render).`with`{ - if(targets.contains(t)) Style.SOLID - else Style.DOTTED - } - ) - - var g = graph("example1").directed - for(i <- indexToTask.indices){ - val outgoing = for{ - e <- edges(i)._2 - j = taskToIndex(e) - if jgraph.containsEdge(i, j) - } yield nodes(j) - g = g.`with`(nodes(i).link(outgoing:_*)) - } - - val gv = Graphviz.fromGraph(g).totalMemory(100 * 1000 * 1000) - val outputs = Seq( - Format.PLAIN -> "out.txt", - Format.XDOT -> "out.dot", - Format.JSON -> "out.json", - Format.PNG -> "out.png", - Format.SVG -> "out.svg" - ) - for((fmt, name) <- outputs) { - gv.render(fmt).toFile((dest / name).toIO) - } - outputs.map(x => mill.PathRef(dest / x._2)) - } -} \ No newline at end of file diff --git a/main/moduledefs/src/AutoOverridePlugin.scala b/main/moduledefs/src/AutoOverridePlugin.scala new file mode 100644 index 00000000..a870e7ec --- /dev/null +++ b/main/moduledefs/src/AutoOverridePlugin.scala @@ -0,0 +1,172 @@ +package mill.moduledefs + + +import scala.collection.mutable.ListBuffer +import scala.reflect.internal.Flags +import scala.tools.nsc.doc.ScaladocSyntaxAnalyzer +import scala.tools.nsc.io.VirtualFile +import scala.tools.nsc.util.BatchSourceFile +import scala.tools.nsc.{Global, Phase} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} +import scala.tools.nsc.transform.Transform + +class AutoOverridePlugin(val global: Global) extends Plugin { + import global._ + override def init(options: List[String], error: String => Unit): Boolean = true + + val name = "auto-override-plugin" + val description = "automatically inserts `override` keywords for you" + + val components = List[PluginComponent]( + new PluginComponent with Transform { + type GT = AutoOverridePlugin.this.global.type + override val global: GT = AutoOverridePlugin.this.global + override val phaseName: String = "EmbedScaladocAnnotation" + override val runsAfter: List[String] = List("parser") + override def newTransformer(unit: global.CompilationUnit): global.Transformer = { + new ScaladocTransformer + } + import global._ + + + class ScaladocTransformer extends global.Transformer { + + val comments = new Comments() + + override def transformUnit(unit: CompilationUnit)= { + if (unit.source.file.name.endsWith(".scala") || + unit.source.file.name.endsWith(".sc")){ + comments.parseComments(unit) + super.transformUnit(unit) + } + } + + override def transform(tree: global.Tree): global.Tree = { + super.transform(tree match { + case x: global.ClassDef => + comments.getComment(x.pos) match { + case Some(comment) => + global.treeCopy.ClassDef(tree, newMods(x.mods, comment), x.name, x.tparams, x.impl) + case None => x + } + + case x: global.ModuleDef => + comments.getComment(x.pos) match { + case Some(comment) => + global.treeCopy.ModuleDef(tree, newMods(x.mods, comment), x.name, x.impl) + case None => x + } + + case x: global.DefDef => + comments.getComment(x.pos) match { + case Some(comment) => + global.treeCopy.DefDef(tree, newMods(x.mods, comment), x.name, x.tparams, x.vparamss, x.tpt, x.rhs) + case None => x + } + + case x: global.ValDef => + comments.getComment(x.pos) match { + case Some(comment) => + global.treeCopy.ValDef(tree, newMods(x.mods, comment), x.name, x.tpt, x.rhs) + case None => x + } + + case x => x + }) + } + + def newMods(old: global.Modifiers, comment: String) = { + old.copy( + annotations = createAnnotation(comment) :: old.annotations + ) + } + + private def createAnnotation(comment: String): global.Tree = + global.Apply( + global.Select( + global.New( + global.Select( + global.Select( + global.Ident( + global.newTermName("mill") + ), + global.newTermName("moduledefs") + ), + global.newTypeName("Scaladoc") + ) + ), + global.nme.CONSTRUCTOR + ), + List(Literal(Constant(comment))) + ) + + } + + class Comments extends ScaladocSyntaxAnalyzer[global.type](global){ + val comments = ListBuffer[(Position, String)]() + + def getComment(pos: Position): Option[String] = { + val tookComments = comments.takeWhile { case (x, _) => x.end < pos.start } + comments --= (tookComments) + tookComments.lastOption.map(_._2) + } + + def parseComments(unit: CompilationUnit): Unit = { + comments.clear() + + new ScaladocUnitParser(unit, Nil) { + override def newScanner = new ScaladocUnitScanner(unit, Nil) { + override def registerDocComment(str: String, pos: Position) = { + comments += ((pos, str)) + } + } + }.parse() + } + + override val runsAfter: List[String] = Nil + override val runsRightAfter: Option[String] = None + } + }, + new PluginComponent { + + val global = AutoOverridePlugin.this.global + import global._ + + override val runsAfter = List("typer") + override val runsBefore = List("patmat") + + val phaseName = "auto-override" + + override def newPhase(prev: Phase) = new GlobalPhase(prev) { + + def name: String = phaseName + + def isCacher(owner: Symbol) = { + val baseClasses = + if (owner.isClass) Some(owner.asClass.baseClasses) + else if (owner.isModule) Some(owner.asModule.baseClasses) + else None + baseClasses.exists(_.exists(_.fullName == "mill.moduledefs.Cacher")) + } + + def apply(unit: global.CompilationUnit): Unit = { + object AutoOverrider extends global.Transformer { + override def transform(tree: global.Tree) = tree match{ + case d: DefDef + if d.symbol.overrideChain.count(!_.isAbstract) > 1 + && !d.mods.isOverride + && isCacher(d.symbol.owner) => + + d.symbol.flags = d.symbol.flags | Flags.OVERRIDE + copyDefDef(d)(mods = d.mods | Flags.OVERRIDE) + case _ => super.transform(tree) + + } + } + + unit.body = AutoOverrider.transform(unit.body) + } + } + } + ) +} \ No newline at end of file diff --git a/main/moduledefs/src/Cacher.scala b/main/moduledefs/src/Cacher.scala new file mode 100644 index 00000000..023f03be --- /dev/null +++ b/main/moduledefs/src/Cacher.scala @@ -0,0 +1,35 @@ +package mill.moduledefs + +import scala.collection.mutable +import scala.reflect.macros.blackbox.Context + + +trait Cacher{ + private[this] lazy val cacherLazyMap = mutable.Map.empty[sourcecode.Enclosing, Any] + + protected[this] def cachedTarget[T](t: => T) + (implicit c: sourcecode.Enclosing): T = synchronized{ + cacherLazyMap.getOrElseUpdate(c, t).asInstanceOf[T] + } +} + +object Cacher{ + def impl0[T: c.WeakTypeTag](c: Context) + (t: c.Expr[T]): c.Expr[T] = { + c.Expr[T](wrapCached[T](c)(t.tree)) + } + def wrapCached[R: c.WeakTypeTag](c: Context)(t: c.Tree) = { + + import c.universe._ + val owner = c.internal.enclosingOwner + val ownerIsCacherClass = + owner.owner.isClass && + owner.owner.asClass.baseClasses.exists(_.fullName == "mill.moduledefs.Cacher") + + if (ownerIsCacherClass && owner.isMethod) q"this.cachedTarget[${weakTypeTag[R]}]($t)" + else c.abort( + c.enclosingPosition, + "T{} members must be defs defined in a Cacher class/trait/object body" + ) + } +} \ No newline at end of file diff --git a/main/moduledefs/src/Scaladoc.java b/main/moduledefs/src/Scaladoc.java new file mode 100644 index 00000000..7a7d700b --- /dev/null +++ b/main/moduledefs/src/Scaladoc.java @@ -0,0 +1,11 @@ +package mill.moduledefs; + + +import java.lang.annotation.*; + +@Target({ElementType.TYPE, ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface Scaladoc { + String value(); +} diff --git a/main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala b/main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala deleted file mode 100644 index a870e7ec..00000000 --- a/main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala +++ /dev/null @@ -1,172 +0,0 @@ -package mill.moduledefs - - -import scala.collection.mutable.ListBuffer -import scala.reflect.internal.Flags -import scala.tools.nsc.doc.ScaladocSyntaxAnalyzer -import scala.tools.nsc.io.VirtualFile -import scala.tools.nsc.util.BatchSourceFile -import scala.tools.nsc.{Global, Phase} -import scala.tools.nsc.plugins.{Plugin, PluginComponent} -import scala.tools.nsc.transform.Transform - -class AutoOverridePlugin(val global: Global) extends Plugin { - import global._ - override def init(options: List[String], error: String => Unit): Boolean = true - - val name = "auto-override-plugin" - val description = "automatically inserts `override` keywords for you" - - val components = List[PluginComponent]( - new PluginComponent with Transform { - type GT = AutoOverridePlugin.this.global.type - override val global: GT = AutoOverridePlugin.this.global - override val phaseName: String = "EmbedScaladocAnnotation" - override val runsAfter: List[String] = List("parser") - override def newTransformer(unit: global.CompilationUnit): global.Transformer = { - new ScaladocTransformer - } - import global._ - - - class ScaladocTransformer extends global.Transformer { - - val comments = new Comments() - - override def transformUnit(unit: CompilationUnit)= { - if (unit.source.file.name.endsWith(".scala") || - unit.source.file.name.endsWith(".sc")){ - comments.parseComments(unit) - super.transformUnit(unit) - } - } - - override def transform(tree: global.Tree): global.Tree = { - super.transform(tree match { - case x: global.ClassDef => - comments.getComment(x.pos) match { - case Some(comment) => - global.treeCopy.ClassDef(tree, newMods(x.mods, comment), x.name, x.tparams, x.impl) - case None => x - } - - case x: global.ModuleDef => - comments.getComment(x.pos) match { - case Some(comment) => - global.treeCopy.ModuleDef(tree, newMods(x.mods, comment), x.name, x.impl) - case None => x - } - - case x: global.DefDef => - comments.getComment(x.pos) match { - case Some(comment) => - global.treeCopy.DefDef(tree, newMods(x.mods, comment), x.name, x.tparams, x.vparamss, x.tpt, x.rhs) - case None => x - } - - case x: global.ValDef => - comments.getComment(x.pos) match { - case Some(comment) => - global.treeCopy.ValDef(tree, newMods(x.mods, comment), x.name, x.tpt, x.rhs) - case None => x - } - - case x => x - }) - } - - def newMods(old: global.Modifiers, comment: String) = { - old.copy( - annotations = createAnnotation(comment) :: old.annotations - ) - } - - private def createAnnotation(comment: String): global.Tree = - global.Apply( - global.Select( - global.New( - global.Select( - global.Select( - global.Ident( - global.newTermName("mill") - ), - global.newTermName("moduledefs") - ), - global.newTypeName("Scaladoc") - ) - ), - global.nme.CONSTRUCTOR - ), - List(Literal(Constant(comment))) - ) - - } - - class Comments extends ScaladocSyntaxAnalyzer[global.type](global){ - val comments = ListBuffer[(Position, String)]() - - def getComment(pos: Position): Option[String] = { - val tookComments = comments.takeWhile { case (x, _) => x.end < pos.start } - comments --= (tookComments) - tookComments.lastOption.map(_._2) - } - - def parseComments(unit: CompilationUnit): Unit = { - comments.clear() - - new ScaladocUnitParser(unit, Nil) { - override def newScanner = new ScaladocUnitScanner(unit, Nil) { - override def registerDocComment(str: String, pos: Position) = { - comments += ((pos, str)) - } - } - }.parse() - } - - override val runsAfter: List[String] = Nil - override val runsRightAfter: Option[String] = None - } - }, - new PluginComponent { - - val global = AutoOverridePlugin.this.global - import global._ - - override val runsAfter = List("typer") - override val runsBefore = List("patmat") - - val phaseName = "auto-override" - - override def newPhase(prev: Phase) = new GlobalPhase(prev) { - - def name: String = phaseName - - def isCacher(owner: Symbol) = { - val baseClasses = - if (owner.isClass) Some(owner.asClass.baseClasses) - else if (owner.isModule) Some(owner.asModule.baseClasses) - else None - baseClasses.exists(_.exists(_.fullName == "mill.moduledefs.Cacher")) - } - - def apply(unit: global.CompilationUnit): Unit = { - object AutoOverrider extends global.Transformer { - override def transform(tree: global.Tree) = tree match{ - case d: DefDef - if d.symbol.overrideChain.count(!_.isAbstract) > 1 - && !d.mods.isOverride - && isCacher(d.symbol.owner) => - - d.symbol.flags = d.symbol.flags | Flags.OVERRIDE - copyDefDef(d)(mods = d.mods | Flags.OVERRIDE) - case _ => super.transform(tree) - - } - } - - unit.body = AutoOverrider.transform(unit.body) - } - } - } - ) -} \ No newline at end of file diff --git a/main/moduledefs/src/mill/moduledefs/Cacher.scala b/main/moduledefs/src/mill/moduledefs/Cacher.scala deleted file mode 100644 index 023f03be..00000000 --- a/main/moduledefs/src/mill/moduledefs/Cacher.scala +++ /dev/null @@ -1,35 +0,0 @@ -package mill.moduledefs - -import scala.collection.mutable -import scala.reflect.macros.blackbox.Context - - -trait Cacher{ - private[this] lazy val cacherLazyMap = mutable.Map.empty[sourcecode.Enclosing, Any] - - protected[this] def cachedTarget[T](t: => T) - (implicit c: sourcecode.Enclosing): T = synchronized{ - cacherLazyMap.getOrElseUpdate(c, t).asInstanceOf[T] - } -} - -object Cacher{ - def impl0[T: c.WeakTypeTag](c: Context) - (t: c.Expr[T]): c.Expr[T] = { - c.Expr[T](wrapCached[T](c)(t.tree)) - } - def wrapCached[R: c.WeakTypeTag](c: Context)(t: c.Tree) = { - - import c.universe._ - val owner = c.internal.enclosingOwner - val ownerIsCacherClass = - owner.owner.isClass && - owner.owner.asClass.baseClasses.exists(_.fullName == "mill.moduledefs.Cacher") - - if (ownerIsCacherClass && owner.isMethod) q"this.cachedTarget[${weakTypeTag[R]}]($t)" - else c.abort( - c.enclosingPosition, - "T{} members must be defs defined in a Cacher class/trait/object body" - ) - } -} \ No newline at end of file diff --git a/main/moduledefs/src/mill/moduledefs/Scaladoc.java b/main/moduledefs/src/mill/moduledefs/Scaladoc.java deleted file mode 100644 index 7a7d700b..00000000 --- a/main/moduledefs/src/mill/moduledefs/Scaladoc.java +++ /dev/null @@ -1,11 +0,0 @@ -package mill.moduledefs; - - -import java.lang.annotation.*; - -@Target({ElementType.TYPE, ElementType.METHOD}) -@Retention(RetentionPolicy.RUNTIME) -@Documented -public @interface Scaladoc { - String value(); -} diff --git a/main/src/MillMain.scala b/main/src/MillMain.scala new file mode 100644 index 00000000..e953e65d --- /dev/null +++ b/main/src/MillMain.scala @@ -0,0 +1,155 @@ +package mill + +import java.io.{InputStream, PrintStream} + +import scala.collection.JavaConverters._ +import ammonite.main.Cli._ +import io.github.retronym.java9rtexport.Export +import mill.eval.Evaluator +import mill.api.DummyInputStream + +object MillMain { + + def main(args: Array[String]): Unit = { + val as = args match { + case Array(s, _*) if s == "-i" || s == "--interactive" => args.tail + case _ => args + } + val (result, _) = main0( + as, + None, + ammonite.Main.isInteractive(), + System.in, + System.out, + System.err, + System.getenv().asScala.toMap, + b => () + ) + System.exit(if(result) 0 else 1) + } + + def main0(args: Array[String], + stateCache: Option[Evaluator.State], + mainInteractive: Boolean, + stdin: InputStream, + stdout: PrintStream, + stderr: PrintStream, + env: Map[String, String], + setIdle: Boolean => Unit): (Boolean, Option[Evaluator.State]) = { + import ammonite.main.Cli + + val millHome = mill.api.Ctx.defaultHome + + val removed = Set("predef-code", "no-home-predef") + var interactive = false + val interactiveSignature = Arg[Config, Unit]( + "interactive", Some('i'), + "Run Mill in interactive mode, suitable for opening REPLs and taking user input. In this mode, no mill server will be used.", + (c, v) =>{ + interactive = true + c + } + ) + + + + var disableTicker = false + val disableTickerSignature = Arg[Config, Unit]( + "disable-ticker", None, + "Disable ticker log (e.g. short-lived prints of stages and progress bars)", + (c, v) =>{ + disableTicker = true + c + } + ) + + var debugLog = false + val debugLogSignature = Arg[Config, Unit]( + name = "debug", shortName = Some('d'), + doc = "Show debug output on STDOUT", + (c, v) => { + debugLog = true + c + } + ) + + val millArgSignature = + Cli.genericSignature.filter(a => !removed(a.name)) ++ Seq(interactiveSignature, disableTickerSignature, debugLogSignature) + + Cli.groupArgs( + args.toList, + millArgSignature, + Cli.Config(home = millHome, remoteLogging = false) + ) match{ + case _ if interactive => + stderr.println("-i/--interactive must be passed in as the first argument") + (false, None) + case Left(msg) => + stderr.println(msg) + (false, None) + case Right((cliConfig, _)) if cliConfig.help => + val leftMargin = millArgSignature.map(ammonite.main.Cli.showArg(_).length).max + 2 + stdout.println( + s"""Mill Build Tool + |usage: mill [mill-options] [target [target-options]] + | + |${formatBlock(millArgSignature, leftMargin).mkString(ammonite.util.Util.newLine)}""".stripMargin + ) + (true, None) + case Right((cliConfig, leftoverArgs)) => + + val repl = leftoverArgs.isEmpty + if (repl && stdin == DummyInputStream) { + stderr.println("Build repl needs to be run with the -i/--interactive flag") + (false, stateCache) + }else{ + val config = + if(!repl) cliConfig + else cliConfig.copy( + predefCode = + s"""import $$file.build, build._ + |implicit val replApplyHandler = mill.main.ReplApplyHandler( + | os.Path(${pprint.apply(cliConfig.home.toIO.getCanonicalPath.replaceAllLiterally("$", "$$")).plainText}), + | $disableTicker, + | interp.colors(), + | repl.pprinter(), + | build.millSelf.get, + | build.millDiscover, + | $debugLog + |) + |repl.pprinter() = replApplyHandler.pprinter + |import replApplyHandler.generatedEval._ + | + """.stripMargin, + welcomeBanner = None + ) + + val runner = new mill.main.MainRunner( + config.copy(colored = config.colored orElse Option(mainInteractive)), + disableTicker, + stdout, stderr, stdin, + stateCache, + env, + setIdle, + debugLog + ) + + if (mill.main.client.Util.isJava9OrAbove) { + val rt = cliConfig.home / Export.rtJarName + if (!os.exists(rt)) { + runner.printInfo(s"Preparing Java ${System.getProperty("java.version")} runtime; this may take a minute or two ...") + Export.rtTo(rt.toIO, false) + } + } + + if (repl){ + runner.printInfo("Loading...") + (runner.watchLoop(isRepl = true, printing = false, _.run()), runner.stateCache) + } else { + (runner.runScript(os.pwd / "build.sc", leftoverArgs), runner.stateCache) + } + } + + } + } +} diff --git a/main/src/main/MainModule.scala b/main/src/main/MainModule.scala new file mode 100644 index 00000000..34145668 --- /dev/null +++ b/main/src/main/MainModule.scala @@ -0,0 +1,269 @@ +package mill.main + +import java.util.concurrent.LinkedBlockingQueue + +import mill.T +import mill.define.{NamedTask, Task} +import mill.eval.{Evaluator, PathRef, Result} +import mill.util.{Ctx, PrintLogger, Watched} +import pprint.{Renderer, Truncated} +object MainModule{ + def resolveTasks[T](evaluator: Evaluator, targets: Seq[String], multiSelect: Boolean) + (f: List[NamedTask[Any]] => T) = { + RunScript.resolveTasks(mill.main.ResolveTasks, evaluator, targets, multiSelect) match{ + case Left(err) => Result.Failure(err) + case Right(tasks) => Result.Success(f(tasks)) + } + } + def evaluateTasks[T](evaluator: Evaluator, targets: Seq[String], multiSelect: Boolean) + (f: Seq[(Any, Option[ujson.Value])] => T) = { + RunScript.evaluateTasks(evaluator, targets, multiSelect) match{ + case Left(err) => Result.Failure(err) + case Right((watched, Left(err))) => Result.Failure(err, Some(Watched((), watched))) + case Right((watched, Right(res))) => + f(res) + Result.Success(Watched((), watched)) + } + } +} + +trait MainModule extends mill.Module{ + + implicit def millDiscover: mill.define.Discover[_] + implicit def millScoptTasksReads[T] = new mill.main.Tasks.Scopt[T]() + implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() + def version() = mill.T.command { + val res = System.getProperty("MILL_VERSION") + println(res) + res + } + + private val OutDir: String = "out" + + /** + * Resolves a mill query string and prints out the tasks it resolves to. + */ + def resolve(evaluator: Evaluator, targets: String*) = mill.T.command{ + val resolved = RunScript.resolveTasks( + mill.main.ResolveMetadata, evaluator, targets, multiSelect = true + ) + + resolved match{ + case Left(err) => Result.Failure(err) + case Right(rs) => + for(r <- rs.sorted) { + println(r) + } + Result.Success(()) + } + } + + /** + * Given a set of tasks, prints out the execution plan of what tasks will be + * executed in what order, without actually executing them. + */ + def plan(evaluator: Evaluator, targets: String*) = mill.T.command{ + plan0(evaluator, targets) match{ + case Right(success) => { + val renderedTasks = success.map{ _.segments.render} + renderedTasks.foreach(println) + Result.Success(renderedTasks) + } + case Left(err) => Result.Failure(err) + } + } + + private def plan0(evaluator: Evaluator, targets: Seq[String]) = { + RunScript.resolveTasks( + mill.main.ResolveTasks, evaluator, targets, multiSelect = true + ) match { + case Left(err) => Left(err) + case Right(rs) => + val (sortedGroups, _) = Evaluator.plan(evaluator.rootModule, rs) + Right(sortedGroups.keys().collect{ case Right(r) => r}.toArray) + } + } + + /** + * Prints out some dependency path from the `src` task to the `dest` task. + * + * If there are multiple dependency paths between `src` and `dest`, the path + * chosen is arbitrary. + */ + def path(evaluator: Evaluator, src: String, dest: String) = mill.T.command{ + val resolved = RunScript.resolveTasks( + mill.main.ResolveTasks, evaluator, List(src, dest), multiSelect = true + ) + + resolved match{ + case Left(err) => Result.Failure(err) + case Right(Seq(src1, dest1)) => + val queue = collection.mutable.Queue[List[Task[_]]](List(src1)) + var found = Option.empty[List[Task[_]]] + val seen = collection.mutable.Set.empty[Task[_]] + while(queue.nonEmpty && found.isEmpty){ + val current = queue.dequeue() + if (current.head == dest1) found = Some(current) + else{ + for{ + next <- current.head.inputs + if !seen.contains(next) + }{ + seen.add(next) + queue.enqueue(next :: current) + } + } + } + found match{ + case None => + Result.Failure(s"No path found between $src and $dest") + case Some(list) => + val labels = list + .collect{case n: NamedTask[_] => n.ctx.segments.render} + + labels.foreach(mill.T.ctx().log.outputStream.println(_)) + + Result.Success(labels) + } + } + } + + /** + * Displays metadata about the given task without actually running it. + */ + def inspect(evaluator: Evaluator, targets: String*) = mill.T.command{ + MainModule.resolveTasks(evaluator, targets, multiSelect = true){ tasks => + val output = new StringBuilder + for{ + task <- tasks + tree = ReplApplyHandler.pprintTask(task, evaluator) + val defaults = pprint.PPrinter() + val renderer = new Renderer( + defaults.defaultWidth, + defaults.colorApplyPrefix, + defaults.colorLiteral, + defaults.defaultIndent + ) + val rendered = renderer.rec(tree, 0, 0).iter + val truncated = new Truncated(rendered, defaults.defaultWidth, defaults.defaultHeight) + str <- truncated ++ Iterator("\n") + } { + output.append(str) + } + println(output) + output.toString + } + } + + /** + * Runs multiple tasks in a single call. + * + * + */ + def all(evaluator: Evaluator, targets: String*) = mill.T.command{ + MainModule.evaluateTasks(evaluator, targets, multiSelect = true) {res => + res.flatMap(_._2) + } + } + + /** + * Runs a given task and prints the JSON result to stdout. This is useful + * to integrate Mill into external scripts and tooling. + */ + def show(evaluator: Evaluator, targets: String*) = mill.T.command{ + MainModule.evaluateTasks( + evaluator.copy( + // When using `show`, redirect all stdout of the evaluated tasks so the + // printed JSON is the only thing printed to stdout. + log = evaluator.log match{ + case PrintLogger(c1, d, c2, o, i, e, in, de) => PrintLogger(c1, d, c2, e, i, e, in, de) + case l => l + } + ), + targets, + multiSelect = false + ) {res => + for(json <- res.flatMap(_._2)){ + println(json.render(indent = 4)) + } + } + } + + /** + * Deletes the given targets from the out directory. Providing no targets + * will clean everything. + */ + def clean(evaluator: Evaluator, targets: String*) = mill.T.command { + val rootDir = ammonite.ops.pwd / OutDir + + val KeepPattern = "(mill-.+)".r.anchored + + def keepPath(path: os.Path) = path.segments.toSeq.lastOption match { + case Some(KeepPattern(_)) => true + case _ => false + } + + val pathsToRemove = + if (targets.isEmpty) + Right(ammonite.ops.ls(rootDir).filterNot(keepPath)) + else + RunScript.resolveTasks( + mill.main.ResolveSegments, evaluator, targets, multiSelect = true + ).map( + _.map { segments => + Evaluator.resolveDestPaths(rootDir, segments).out + }) + + pathsToRemove match { + case Left(err) => + Result.Failure(err) + case Right(paths) => + paths.foreach(os.remove.all) + Result.Success(()) + } + } + + + /** + * Renders the dependencies between the given tasks as a SVG for you to look at + */ + def visualize(evaluator: Evaluator, targets: String*) = mill.T.command{ + visualize0(evaluator, targets, T.ctx(), mill.main.VisualizeModule.worker()) + } + + /** + * Renders the dependencies between the given tasks, and all their dependencies, as a SVG + */ + def visualizePlan(evaluator: Evaluator, targets: String*) = mill.T.command{ + plan0(evaluator, targets) match { + case Left(err) => Result.Failure(err) + case Right(planResults) => visualize0( + evaluator, targets, T.ctx(), mill.main.VisualizeModule.worker(), Some(planResults.toList.map(_.task)) + ) + } + } + + private type VizWorker = (LinkedBlockingQueue[(scala.Seq[_], scala.Seq[_], os.Path)], + LinkedBlockingQueue[Result[scala.Seq[PathRef]]]) + + private def visualize0(evaluator: Evaluator, targets: Seq[String], ctx: Ctx, vizWorker: VizWorker, + planTasks: Option[List[NamedTask[_]]] = None) = { + def callVisualizeModule(rs: List[NamedTask[Any]], allRs: List[NamedTask[Any]]) = { + val (in, out) = vizWorker + in.put((rs, allRs, ctx.dest)) + out.take() + } + + RunScript.resolveTasks( + mill.main.ResolveTasks, evaluator, targets, multiSelect = true + ) match { + case Left(err) => Result.Failure(err) + case Right(rs) => planTasks match { + case Some(allRs) => { + callVisualizeModule(rs, allRs) + } + case None => callVisualizeModule(rs, rs) + } + } + } +} diff --git a/main/src/main/MainRunner.scala b/main/src/main/MainRunner.scala new file mode 100644 index 00000000..e50ed370 --- /dev/null +++ b/main/src/main/MainRunner.scala @@ -0,0 +1,170 @@ +package mill.main +import java.io.{InputStream, PrintStream} + +import ammonite.Main +import ammonite.interp.{Interpreter, Preprocessor} +import ammonite.util.Util.CodeSource +import ammonite.util._ +import mill.eval.{Evaluator, PathRef} +import mill.util.PrintLogger + +import scala.annotation.tailrec + + +/** + * Customized version of [[ammonite.MainRunner]], allowing us to run Mill + * `build.sc` scripts with mill-specific tweaks such as a custom + * `scriptCodeWrapper` or with a persistent evaluator between runs. + */ +class MainRunner(val config: ammonite.main.Cli.Config, + disableTicker: Boolean, + outprintStream: PrintStream, + errPrintStream: PrintStream, + stdIn: InputStream, + stateCache0: Option[Evaluator.State] = None, + env : Map[String, String], + setIdle: Boolean => Unit, + debugLog: Boolean) + extends ammonite.MainRunner( + config, outprintStream, errPrintStream, + stdIn, outprintStream, errPrintStream + ){ + + var stateCache = stateCache0 + + override def watchAndWait(watched: Seq[(os.Path, Long)]) = { + printInfo(s"Watching for changes to ${watched.length} files... (Ctrl-C to exit)") + def statAll() = watched.forall{ case (file, lastMTime) => + Interpreter.pathSignature(file) == lastMTime + } + setIdle(true) + while(statAll()) Thread.sleep(100) + setIdle(false) + } + + /** + * Custom version of [[watchLoop]] that lets us generate the watched-file + * signature only on demand, so if we don't have config.watch enabled we do + * not pay the cost of generating it + */ + @tailrec final def watchLoop2[T](isRepl: Boolean, + printing: Boolean, + run: Main => (Res[T], () => Seq[(os.Path, Long)])): Boolean = { + val (result, watched) = run(initMain(isRepl)) + + val success = handleWatchRes(result, printing) + if (!config.watch) success + else{ + watchAndWait(watched()) + watchLoop2(isRepl, printing, run) + } + } + + + override def runScript(scriptPath: os.Path, scriptArgs: List[String]) = + watchLoop2( + isRepl = false, + printing = true, + mainCfg => { + val (result, interpWatched) = RunScript.runScript( + config.home, + mainCfg.wd, + scriptPath, + mainCfg.instantiateInterpreter(), + scriptArgs, + stateCache, + new PrintLogger( + colors != ammonite.util.Colors.BlackWhite, + disableTicker, + colors, + outprintStream, + errPrintStream, + errPrintStream, + stdIn, + debugEnabled = debugLog + ), + env + ) + + result match{ + case Res.Success(data) => + val (eval, evalWatches, res) = data + + stateCache = Some(Evaluator.State(eval.rootModule, eval.classLoaderSig, eval.workerCache, interpWatched)) + val watched = () => { + val alreadyStale = evalWatches.exists(p => p.sig != PathRef(p.path, p.quick).sig) + // If the file changed between the creation of the original + // `PathRef` and the current moment, use random junk .sig values + // to force an immediate re-run. Otherwise calculate the + // pathSignatures the same way Ammonite would and hand over the + // values, so Ammonite can watch them and only re-run if they + // subsequently change + if (alreadyStale) evalWatches.map(_.path -> util.Random.nextLong()) + else evalWatches.map(p => p.path -> Interpreter.pathSignature(p.path)) + } + (Res(res), () => interpWatched ++ watched()) + case _ => (result, () => interpWatched) + } + } + ) + + override def handleWatchRes[T](res: Res[T], printing: Boolean) = { + res match{ + case Res.Success(value) => true + case _ => super.handleWatchRes(res, printing) + } + } + + override def initMain(isRepl: Boolean) = { + super.initMain(isRepl).copy( + scriptCodeWrapper = CustomCodeWrapper, + // Ammonite does not properly forward the wd from CliConfig to Main, so + // force forward it outselves + wd = config.wd + ) + } + + object CustomCodeWrapper extends Preprocessor.CodeWrapper { + def apply(code: String, + source: CodeSource, + imports: ammonite.util.Imports, + printCode: String, + indexedWrapperName: ammonite.util.Name, + extraCode: String): (String, String, Int) = { + import source.pkgName + val wrapName = indexedWrapperName.backticked + val path = source + .path + .map(path => path.toNIO.getParent) + .getOrElse(config.wd.toNIO) + val literalPath = pprint.Util.literalize(path.toString) + val external = !(path.compareTo(config.wd.toNIO) == 0) + val top = s""" + |package ${pkgName.head.encoded} + |package ${Util.encodeScalaSourcePath(pkgName.tail)} + |$imports + |import mill._ + |object $wrapName + |extends mill.define.BaseModule(os.Path($literalPath), foreign0 = $external)( + | implicitly, implicitly, implicitly, implicitly, mill.define.Caller(()) + |) + |with $wrapName{ + | // Stub to make sure Ammonite has something to call after it evaluates a script, + | // even if it does nothing... + | def $$main() = Iterator[String]() + | + | // Need to wrap the returned Module in Some(...) to make sure it + | // doesn't get picked up during reflective child-module discovery + | def millSelf = Some(this) + | + | implicit lazy val millDiscover: mill.define.Discover[this.type] = mill.define.Discover[this.type] + |} + | + |sealed trait $wrapName extends mill.main.MainModule{ + |""".stripMargin + val bottom = "}" + + (top, bottom, 1) + } + } +} diff --git a/main/src/main/MainScopts.scala b/main/src/main/MainScopts.scala new file mode 100644 index 00000000..718a30e6 --- /dev/null +++ b/main/src/main/MainScopts.scala @@ -0,0 +1,31 @@ +package mill.main +import mill.eval.Evaluator + +case class Tasks[T](value: Seq[mill.define.NamedTask[T]]) + +object Tasks{ + + class Scopt[T]() extends scopt.Read[Tasks[T]] { + def arity = 1 + + def reads = s => { + RunScript.resolveTasks( + mill.main.ResolveTasks, + Evaluator.currentEvaluator.get, + Seq(s), + multiSelect = false + ) match{ + case Left(err) => throw new Exception(err) + case Right(tasks) => Tasks(tasks).asInstanceOf[Tasks[T]] + } + } + } +} + +class EvaluatorScopt[T]() + extends scopt.Read[mill.eval.Evaluator]{ + def arity = 0 + def reads = s => { + Evaluator.currentEvaluator.get.asInstanceOf[mill.eval.Evaluator] + } +} diff --git a/main/src/main/MillServerMain.scala b/main/src/main/MillServerMain.scala new file mode 100644 index 00000000..26ca99e6 --- /dev/null +++ b/main/src/main/MillServerMain.scala @@ -0,0 +1,227 @@ +package mill.main + +import java.io._ +import java.net.Socket + +import mill.MillMain + +import scala.collection.JavaConverters._ +import org.scalasbt.ipcsocket._ +import mill.main.client._ +import mill.eval.Evaluator +import mill.api.DummyInputStream +import sun.misc.{Signal, SignalHandler} + +trait MillServerMain[T]{ + var stateCache = Option.empty[T] + def main0(args: Array[String], + stateCache: Option[T], + mainInteractive: Boolean, + stdin: InputStream, + stdout: PrintStream, + stderr: PrintStream, + env : Map[String, String], + setIdle: Boolean => Unit): (Boolean, Option[T]) +} + +object MillServerMain extends mill.main.MillServerMain[Evaluator.State]{ + def main(args0: Array[String]): Unit = { + // Disable SIGINT interrupt signal in the Mill server. + // + // This gets passed through from the client to server whenever the user + // hits `Ctrl-C`, which by default kills the server, which defeats the purpose + // of running a background server. Furthermore, the background server already + // can detect when the Mill client goes away, which is necessary to handle + // the case when a Mill client that did *not* spawn the server gets `CTRL-C`ed + Signal.handle(new Signal("INT"), new SignalHandler () { + def handle(sig: Signal) = {} // do nothing + }) + new Server( + args0(0), + this, + () => System.exit(0), + 300000, + mill.main.client.Locks.files(args0(0)) + ).run() + } + def main0(args: Array[String], + stateCache: Option[Evaluator.State], + mainInteractive: Boolean, + stdin: InputStream, + stdout: PrintStream, + stderr: PrintStream, + env : Map[String, String], + setIdle: Boolean => Unit) = { + MillMain.main0( + args, + stateCache, + mainInteractive, + DummyInputStream, + stdout, + stderr, + env, + setIdle + ) + } +} + + +class Server[T](lockBase: String, + sm: MillServerMain[T], + interruptServer: () => Unit, + acceptTimeout: Int, + locks: Locks) { + + val originalStdout = System.out + def run() = { + Server.tryLockBlock(locks.processLock){ + var running = true + while (running) { + Server.lockBlock(locks.serverLock){ + val (serverSocket, socketClose) = if (Util.isWindows) { + val socketName = Util.WIN32_PIPE_PREFIX + new File(lockBase).getName + (new Win32NamedPipeServerSocket(socketName), () => new Win32NamedPipeSocket(socketName).close()) + } else { + val socketName = lockBase + "/io" + new File(socketName).delete() + (new UnixDomainServerSocket(socketName), () => new UnixDomainSocket(socketName).close()) + } + + val sockOpt = Server.interruptWith( + "MillSocketTimeoutInterruptThread", + acceptTimeout, + socketClose(), + serverSocket.accept() + ) + + sockOpt match{ + case None => running = false + case Some(sock) => + try { + handleRun(sock) + serverSocket.close() + } + catch{case e: Throwable => e.printStackTrace(originalStdout) } + } + } + // Make sure you give an opportunity for the client to probe the lock + // and realize the server has released it to signal completion + Thread.sleep(10) + } + }.getOrElse(throw new Exception("PID already present")) + } + + def handleRun(clientSocket: Socket) = { + + val currentOutErr = clientSocket.getOutputStream + val stdout = new PrintStream(new ProxyOutputStream(currentOutErr, 1), true) + val stderr = new PrintStream(new ProxyOutputStream(currentOutErr, -1), true) + val socketIn = clientSocket.getInputStream + val argStream = new FileInputStream(lockBase + "/run") + val interactive = argStream.read() != 0 + val clientMillVersion = Util.readString(argStream) + val serverMillVersion = sys.props("MILL_VERSION") + if (clientMillVersion != serverMillVersion) { + stdout.println(s"Mill version changed ($serverMillVersion -> $clientMillVersion), re-starting server") + System.exit(0) + } + val args = Util.parseArgs(argStream) + val env = Util.parseMap(argStream) + argStream.close() + + @volatile var done = false + @volatile var idle = false + val t = new Thread(() => + try { + val (result, newStateCache) = sm.main0( + args, + sm.stateCache, + interactive, + socketIn, + stdout, + stderr, + env.asScala.toMap, + idle = _ + ) + + sm.stateCache = newStateCache + java.nio.file.Files.write( + java.nio.file.Paths.get(lockBase + "/exitCode"), + (if (result) 0 else 1).toString.getBytes + ) + } finally{ + done = true + idle = true + }, + "MillServerActionRunner" + ) + t.start() + // We cannot simply use Lock#await here, because the filesystem doesn't + // realize the clientLock/serverLock are held by different threads in the + // two processes and gives a spurious deadlock error + while(!done && !locks.clientLock.probe()) Thread.sleep(3) + + if (!idle) interruptServer() + + + t.interrupt() + t.stop() + + if (Util.isWindows) { + // Closing Win32NamedPipeSocket can often take ~5s + // It seems OK to exit the client early and subsequently + // start up mill client again (perhaps closing the server + // socket helps speed up the process). + val t = new Thread(() => clientSocket.close()) + t.setDaemon(true) + t.start() + } else clientSocket.close() + } +} +object Server{ + def lockBlock[T](lock: Lock)(t: => T): T = { + val l = lock.lock() + try t + finally l.release() + } + def tryLockBlock[T](lock: Lock)(t: => T): Option[T] = { + lock.tryLock() match{ + case null => None + case l => + try Some(t) + finally l.release() + } + + } + def interruptWith[T](threadName: String, millis: Int, close: => Unit, t: => T): Option[T] = { + @volatile var interrupt = true + @volatile var interrupted = false + val thread = new Thread( + () => { + try Thread.sleep(millis) + catch{ case t: InterruptedException => /* Do Nothing */ } + if (interrupt) { + interrupted = true + close + } + }, + threadName + ) + + thread.start() + try { + val res = + try Some(t) + catch {case e: Throwable => None} + + if (interrupted) None + else res + + } finally { + thread.interrupt() + interrupt = false + } + } +} + + diff --git a/main/src/main/ReplApplyHandler.scala b/main/src/main/ReplApplyHandler.scala new file mode 100644 index 00000000..a8e467d4 --- /dev/null +++ b/main/src/main/ReplApplyHandler.scala @@ -0,0 +1,163 @@ +package mill.main + + +import mill.define.Applicative.ApplyHandler +import mill.define.Segment.Label +import mill.define._ +import mill.eval.{Evaluator, Result} + +import mill.util.Strict.Agg + +import scala.collection.mutable +object ReplApplyHandler{ + def apply[T](home: os.Path, + disableTicker: Boolean, + colors: ammonite.util.Colors, + pprinter0: pprint.PPrinter, + rootModule: mill.define.BaseModule, + discover: Discover[_], + debugLog: Boolean) = { + new ReplApplyHandler( + pprinter0, + new Evaluator( + home, + ammonite.ops.pwd / 'out, + ammonite.ops.pwd / 'out, + rootModule, + new mill.util.PrintLogger( + colors != ammonite.util.Colors.BlackWhite, + disableTicker, + colors, + System.out, + System.err, + System.err, + System.in, + debugEnabled = debugLog + ) + ) + ) + } + def pprintCross(c: mill.define.Cross[_], evaluator: Evaluator) = { + pprint.Tree.Lazy( ctx => + Iterator(c.millOuterCtx.enclosing , ":", c.millOuterCtx.lineNum.toString, ctx.applyPrefixColor("\nChildren:").toString) ++ + c.items.iterator.map(x => + "\n (" + x._1.map(pprint.PPrinter.BlackWhite.apply(_)).mkString(", ") + ")" + ) + ) + } + def pprintModule(m: mill.define.Module, evaluator: Evaluator) = { + pprint.Tree.Lazy( ctx => + Iterator(m.millInternal.millModuleEnclosing, ":", m.millInternal.millModuleLine.toString) ++ + (if (m.millInternal.reflectAll[mill.Module].isEmpty) Nil + else + ctx.applyPrefixColor("\nChildren:").toString +: + m.millInternal.reflectAll[mill.Module].map("\n ." + _.millOuterCtx.segment.pathSegments.mkString("."))) ++ + (evaluator.rootModule.millDiscover.value.get(m.getClass) match{ + case None => Nil + case Some(commands) => + ctx.applyPrefixColor("\nCommands:").toString +: commands.map{c => + "\n ." + c._2.name + "(" + + c._2.argSignatures.map(s => s.name + ": " + s.typeString).mkString(", ") + + ")()" + } + }) ++ + (if (m.millInternal.reflectAll[Target[_]].isEmpty) Nil + else { + Seq(ctx.applyPrefixColor("\nTargets:").toString) ++ + m.millInternal.reflectAll[Target[_]].map(t => + "\n ." + t.label + "()" + ) + }) + + ) + } + + def resolveParents(c: Class[_]): Seq[Class[_]] = { + Seq(c) ++ Option(c.getSuperclass).toSeq.flatMap(resolveParents) ++ c.getInterfaces.flatMap(resolveParents) + } + + def pprintTask(t: NamedTask[_], evaluator: Evaluator) = { + val seen = mutable.Set.empty[Task[_]] + def rec(t: Task[_]): Seq[Segments] = { + if (seen(t)) Nil // do nothing + else t match { + case t: Target[_] if evaluator.rootModule.millInternal.targets.contains(t) => + Seq(t.ctx.segments) + case _ => + seen.add(t) + t.inputs.flatMap(rec) + } + } + + val annots = for { + c <- resolveParents(t.ctx.enclosingCls) + m <- c.getMethods + if m.getName == t.ctx.segment.pathSegments.head + a = m.getAnnotation(classOf[mill.moduledefs.Scaladoc]) + if a != null + }yield a + + val allDocs = + for(a <- annots.distinct) + yield mill.modules.Util.cleanupScaladoc(a.value).map("\n " + _).mkString + + pprint.Tree.Lazy(ctx => + Iterator( + ctx.applyPrefixColor(t.toString).toString, "(", t.ctx.fileName.split('/').last, ":", t.ctx.lineNum.toString, ")", + allDocs.mkString("\n"), "\n", + "\n", ctx.applyPrefixColor("Inputs").toString, ":" + ) ++ t.inputs.iterator.flatMap(rec).map("\n " + _.render) + ) + } + +} +class ReplApplyHandler(pprinter0: pprint.PPrinter, + val evaluator: Evaluator) extends ApplyHandler[Task] { + // Evaluate classLoaderSig only once in the REPL to avoid busting caches + // as the user enters more REPL commands and changes the classpath + val classLoaderSig = Evaluator.classLoaderSig + override def apply[V](t: Task[V]) = { + val res = evaluator.evaluate(Agg(t)) + res.values match{ + case Seq(head: V) => head + case Nil => + val msg = new mutable.StringBuilder() + msg.append(res.failing.keyCount + " targets failed\n") + for((k, vs) <- res.failing.items){ + msg.append(k match{ + case Left(t) => "Anonymous Task\n" + case Right(k) => k.segments.render + "\n" + }) + + for(v <- vs){ + v match{ + case Result.Failure(m, _) => msg.append(m + "\n") + case Result.Exception(t, outerStack) => + msg.append( + t.toString + + t.getStackTrace.dropRight(outerStack.value.length).map("\n " + _).mkString + + "\n" + ) + + } + } + } + throw new Exception(msg.toString) + } + } + + val generatedEval = new EvalGenerated(evaluator) + + val millHandlers: PartialFunction[Any, pprint.Tree] = { + case c: Cross[_] => + ReplApplyHandler.pprintCross(c, evaluator) + case m: mill.Module if evaluator.rootModule.millInternal.modules.contains(m) => + ReplApplyHandler.pprintModule(m, evaluator) + case t: mill.define.Target[_] if evaluator.rootModule.millInternal.targets.contains(t) => + ReplApplyHandler.pprintTask(t, evaluator) + + } + val pprinter = pprinter0.copy( + additionalHandlers = millHandlers orElse pprinter0.additionalHandlers + ) +} diff --git a/main/src/main/Resolve.scala b/main/src/main/Resolve.scala new file mode 100644 index 00000000..a2c186ed --- /dev/null +++ b/main/src/main/Resolve.scala @@ -0,0 +1,443 @@ +package mill.main + +import mill.define._ +import mill.define.TaskModule +import ammonite.util.Res +import mill.main.ResolveMetadata.singleModuleMeta +import mill.util.Router.EntryPoint +import mill.util.Scripts + +import scala.reflect.ClassTag + +object ResolveMetadata extends Resolve[String]{ + def singleModuleMeta(obj: Module, discover: Discover[_], isRootModule: Boolean) = { + val modules = obj.millModuleDirectChildren.map(_.toString) + val targets = + obj + .millInternal + .reflectAll[Target[_]] + .map(_.toString) + val commands = for{ + (cls, entryPoints) <- discover.value + if cls.isAssignableFrom(obj.getClass) + ep <- entryPoints + } yield { + if (isRootModule) ep._2.name + else obj + "." + ep._2.name + } + modules ++ targets ++ commands + } + + def endResolveLabel(obj: Module, + revSelectorsSoFar: List[Segment], + last: String, + discover: Discover[_], + rest: Seq[String]): Either[String, List[String]] = { + + val direct = singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty) + last match{ + case "__" => + Right( + // Filter out our own module in + obj.millInternal.modules + .filter(_ != obj) + .flatMap(m => singleModuleMeta(m, discover, m != obj)) + .toList + ) + case "_" => Right(direct.toList) + case _ => + direct.find(_.split('.').last == last) match{ + case None => Resolve.errorMsgLabel(direct, last, revSelectorsSoFar) + case Some(s) => Right(List(s)) + } + } + } + + def endResolveCross(obj: Module, + revSelectorsSoFar: List[Segment], + last: List[String], + discover: Discover[_], + rest: Seq[String]): Either[String, List[String]] = { + obj match{ + case c: Cross[Module] => + last match{ + case List("__") => Right(c.items.map(_._2.toString)) + case items => + c.items + .filter(_._1.length == items.length) + .filter(_._1.zip(last).forall{case (a, b) => b == "_" || a.toString == b}) + .map(_._2.toString) match{ + case Nil => + Resolve.errorMsgCross( + c.items.map(_._1.map(_.toString)), + last, + revSelectorsSoFar + ) + case res => Right(res) + } + + } + case _ => + Left( + Resolve.unableToResolve(Segment.Cross(last), revSelectorsSoFar) + + Resolve.hintListLabel(revSelectorsSoFar) + ) + } + } +} + +object ResolveSegments extends Resolve[Segments] { + + override def endResolveCross(obj: Module, + revSelectorsSoFar: List[Segment], + last: List[String], + discover: Discover[_], + rest: Seq[String]): Either[String, Seq[Segments]] = { + obj match{ + case c: Cross[Module] => + last match{ + case List("__") => Right(c.items.map(_._2.millModuleSegments)) + case items => + c.items + .filter(_._1.length == items.length) + .filter(_._1.zip(last).forall{case (a, b) => b == "_" || a.toString == b}) + .map(_._2.millModuleSegments) match { + case Nil => + Resolve.errorMsgCross( + c.items.map(_._1.map(_.toString)), + last, + revSelectorsSoFar + ) + case res => Right(res) + } + } + case _ => + Left( + Resolve.unableToResolve(Segment.Cross(last), revSelectorsSoFar) + + Resolve.hintListLabel(revSelectorsSoFar) + ) + } + } + + def endResolveLabel(obj: Module, + revSelectorsSoFar: List[Segment], + last: String, + discover: Discover[_], + rest: Seq[String]): Either[String, Seq[Segments]] = { + val target = + obj + .millInternal + .reflectSingle[Target[_]](last) + .map(t => Right(t.ctx.segments)) + + val command = + Resolve + .invokeCommand(obj, last, discover, rest) + .headOption + .map(_.map(_.ctx.segments)) + + val module = + obj.millInternal + .reflectNestedObjects[Module] + .find(_.millOuterCtx.segment == Segment.Label(last)) + .map(m => Right(m.millModuleSegments)) + + command orElse target orElse module match { + case None => + Resolve.errorMsgLabel( + singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty), + last, + revSelectorsSoFar + ) + + case Some(either) => either.right.map(Seq(_)) + } + } +} + +object ResolveTasks extends Resolve[NamedTask[Any]]{ + + + def endResolveCross(obj: Module, + revSelectorsSoFar: List[Segment], + last: List[String], + discover: Discover[_], + rest: Seq[String])= { + + obj match{ + case c: Cross[Module] => + + Resolve.runDefault(obj, Segment.Cross(last), discover, rest).flatten.headOption match{ + case None => + Left( + "Cannot find default task to evaluate for module " + + Segments((Segment.Cross(last) :: revSelectorsSoFar).reverse:_*).render + ) + case Some(v) => v.map(Seq(_)) + } + case _ => + Left( + Resolve.unableToResolve(Segment.Cross(last), revSelectorsSoFar) + + Resolve.hintListLabel(revSelectorsSoFar) + ) + } + } + + def endResolveLabel(obj: Module, + revSelectorsSoFar: List[Segment], + last: String, + discover: Discover[_], + rest: Seq[String]) = last match{ + case "__" => + Right( + obj.millInternal.modules + .filter(_ != obj) + .flatMap(m => m.millInternal.reflectAll[Target[_]]) + ) + case "_" => Right(obj.millInternal.reflectAll[Target[_]]) + + case _ => + val target = + obj + .millInternal + .reflectSingle[Target[_]](last) + .map(Right(_)) + + val command = Resolve.invokeCommand(obj, last, discover, rest).headOption + + command orElse target orElse Resolve.runDefault(obj, Segment.Label(last), discover, rest).flatten.headOption match { + case None => + Resolve.errorMsgLabel( + singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty), + last, + revSelectorsSoFar + ) + + // Contents of `either` *must* be a `Task`, because we only select + // methods returning `Task` in the discovery process + case Some(either) => either.right.map(Seq(_)) + } + } +} + +object Resolve{ + def minimum(i1: Int, i2: Int, i3: Int)= math.min(math.min(i1, i2), i3) + + /** + * Short Levenshtein distance algorithm, based on + * + * https://rosettacode.org/wiki/Levenshtein_distance#Scala + */ + def editDistance(s1: String, s2: String) = { + val dist = Array.tabulate(s2.length+1, s1.length+1){(j, i) => if(j==0) i else if (i==0) j else 0} + + for(j <- 1 to s2.length; i <- 1 to s1.length) + dist(j)(i) = if(s2(j - 1) == s1(i-1)) dist(j - 1)(i-1) + else minimum(dist(j - 1)(i) + 1, dist(j)(i - 1) + 1, dist(j - 1)(i - 1) + 1) + + dist(s2.length)(s1.length) + } + + def unableToResolve(last: Segment, revSelectorsSoFar: List[Segment]): String = { + unableToResolve(Segments((last :: revSelectorsSoFar).reverse: _*).render) + } + + def unableToResolve(segments: String): String = "Cannot resolve " + segments + "." + + def hintList(revSelectorsSoFar: List[Segment]) = { + val search = Segments(revSelectorsSoFar.reverse: _*).render + s" Try `mill resolve $search` to see what's available." + } + + def hintListLabel(revSelectorsSoFar: List[Segment]) = { + hintList(Segment.Label("_") :: revSelectorsSoFar) + } + + def hintListCross(revSelectorsSoFar: List[Segment]) = { + hintList(Segment.Cross(Seq("__")) :: revSelectorsSoFar) + } + + def errorMsgBase[T](direct: Seq[T], + last0: T, + revSelectorsSoFar: List[Segment], + editSplit: String => String, + defaultErrorMsg: String) + (strings: T => Seq[String], + render: T => String): Left[String, Nothing] = { + val last = strings(last0) + val similar = + direct + .map(x => (x, strings(x))) + .filter(_._2.length == last.length) + .map{ case (d, s) => (d, s.zip(last).map{case (a, b) => Resolve.editDistance(editSplit(a), b)}.sum)} + .filter(_._2 < 3) + .sortBy(_._2) + + if (similar.headOption.exists(_._1 == last0)){ + // Special case: if the most similar segment is the desired segment itself, + // this means we are trying to resolve a module where a task is present. + // Special case the error message to make it something meaningful + Left("Task " + last0 + " is not a module and has no children.") + }else{ + + val hint = similar match{ + case Nil => defaultErrorMsg + case items => " Did you mean " + render(items.head._1) + "?" + } + Left(unableToResolve(render(last0)) + hint) + } + } + + def errorMsgLabel(direct: Seq[String], last: String, revSelectorsSoFar: List[Segment]) = { + errorMsgBase( + direct, + Segments((Segment.Label(last) :: revSelectorsSoFar).reverse:_*).render, + revSelectorsSoFar, + _.split('.').last, + hintListLabel(revSelectorsSoFar) + )( + rendered => Seq(rendered.split('.').last), + x => x + ) + } + + def errorMsgCross(crossKeys: Seq[Seq[String]], + last: Seq[String], + revSelectorsSoFar: List[Segment]) = { + errorMsgBase( + crossKeys, + last, + revSelectorsSoFar, + x => x, + hintListCross(revSelectorsSoFar) + )( + crossKeys => crossKeys, + crossKeys => Segments((Segment.Cross(crossKeys) :: revSelectorsSoFar).reverse:_*).render + ) + } + + def invokeCommand(target: Module, + name: String, + discover: Discover[_], + rest: Seq[String]) = for { + (cls, entryPoints) <- discover.value + if cls.isAssignableFrom(target.getClass) + ep <- entryPoints + if ep._2.name == name + } yield Scripts.runMainMethod( + target, + ep._2.asInstanceOf[EntryPoint[Module]], + ammonite.main.Scripts.groupArgs(rest.toList) + ) match { + case Res.Success(v: Command[_]) => Right(v) + case Res.Failure(msg) => Left(msg) + case Res.Exception(ex, msg) => + val sw = new java.io.StringWriter() + ex.printStackTrace(new java.io.PrintWriter(sw)) + val prefix = if (msg.nonEmpty) msg + "\n" else msg + Left(prefix + sw.toString) + + } + + def runDefault(obj: Module, last: Segment, discover: Discover[_], rest: Seq[String]) = for { + child <- obj.millInternal.reflectNestedObjects[Module] + if child.millOuterCtx.segment == last + res <- child match { + case taskMod: TaskModule => + Some(invokeCommand(child, taskMod.defaultCommandName(), discover, rest).headOption) + case _ => None + } + } yield res + +} +abstract class Resolve[R: ClassTag] { + def endResolveCross(obj: Module, + revSelectorsSoFar: List[Segment], + last: List[String], + discover: Discover[_], + rest: Seq[String]): Either[String, Seq[R]] + def endResolveLabel(obj: Module, + revSelectorsSoFar: List[Segment], + last: String, + discover: Discover[_], + rest: Seq[String]): Either[String, Seq[R]] + + def resolve(remainingSelector: List[Segment], + obj: mill.Module, + discover: Discover[_], + rest: Seq[String], + remainingCrossSelectors: List[List[String]], + revSelectorsSoFar: List[Segment]): Either[String, Seq[R]] = { + + remainingSelector match{ + case Segment.Cross(last) :: Nil => + endResolveCross(obj, revSelectorsSoFar, last.map(_.toString).toList, discover, rest) + case Segment.Label(last) :: Nil => + endResolveLabel(obj, revSelectorsSoFar, last, discover, rest) + + case head :: tail => + val newRevSelectorsSoFar = head :: revSelectorsSoFar + + def recurse(searchModules: Seq[Module], resolveFailureMsg: => Left[String, Nothing]) = { + val matching = searchModules + .map(resolve(tail, _, discover, rest, remainingCrossSelectors, newRevSelectorsSoFar)) + + matching match{ + case Seq(Left(err)) => Left(err) + case items => + items.collect{case Right(v) => v} match{ + case Nil => resolveFailureMsg + case values => Right(values.flatten) + } + } + } + head match{ + case Segment.Label(singleLabel) => + recurse( + if (singleLabel == "__") obj.millInternal.modules + else if (singleLabel == "_") obj.millModuleDirectChildren.toSeq + else{ + obj.millInternal.reflectNestedObjects[mill.Module] + .find(_.millOuterCtx.segment == Segment.Label(singleLabel)) + .toSeq + }, + if (singleLabel != "_") Resolve.errorMsgLabel( + singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty), + singleLabel, + revSelectorsSoFar + ) + else Left( + "Cannot resolve " + Segments((remainingSelector.reverse ++ revSelectorsSoFar).reverse:_*).render + + ". Try `mill resolve " + Segments((Segment.Label("_") :: revSelectorsSoFar).reverse:_*).render + "` to see what's available" + ) + ) + case Segment.Cross(cross) => + obj match{ + case c: Cross[Module] => + recurse( + if(cross == Seq("__")) for ((k, v) <- c.items) yield v + else if (cross.contains("_")){ + for { + (k, v) <- c.items + if k.length == cross.length + if k.zip(cross).forall { case (l, r) => l == r || r == "_" } + } yield v + }else c.itemMap.get(cross.toList).toSeq, + Resolve.errorMsgCross( + c.items.map(_._1.map(_.toString)), + cross.map(_.toString), + revSelectorsSoFar + ) + ) + case _ => + Left( + Resolve.unableToResolve(Segment.Cross(cross.map(_.toString)), tail) + + Resolve.hintListLabel(tail) + ) + } + } + + case Nil => Left("Selector cannot be empty") + } + } +} diff --git a/main/src/main/RunScript.scala b/main/src/main/RunScript.scala new file mode 100644 index 00000000..47526631 --- /dev/null +++ b/main/src/main/RunScript.scala @@ -0,0 +1,262 @@ +package mill.main + +import java.nio.file.NoSuchFileException + +import ammonite.interp.Interpreter +import ammonite.runtime.SpecialClassLoader +import ammonite.util.Util.CodeSource +import ammonite.util.{Name, Res, Util} +import mill.define +import mill.define._ +import mill.eval.{Evaluator, PathRef, Result} +import mill.util.{EitherOps, ParseArgs, Watched} +import mill.api.Logger +import mill.util.Strict.Agg + +import scala.collection.mutable +import scala.reflect.ClassTag + +/** + * Custom version of ammonite.main.Scripts, letting us run the build.sc script + * directly without going through Ammonite's main-method/argument-parsing + * subsystem + */ +object RunScript{ + def runScript(home: os.Path, + wd: os.Path, + path: os.Path, + instantiateInterpreter: => Either[(Res.Failing, Seq[(os.Path, Long)]), ammonite.interp.Interpreter], + scriptArgs: Seq[String], + stateCache: Option[Evaluator.State], + log: Logger, + env : Map[String, String]) + : (Res[(Evaluator, Seq[PathRef], Either[String, Seq[ujson.Value]])], Seq[(os.Path, Long)]) = { + + val (evalState, interpWatched) = stateCache match{ + case Some(s) if watchedSigUnchanged(s.watched) => Res.Success(s) -> s.watched + case _ => + instantiateInterpreter match{ + case Left((res, watched)) => (res, watched) + case Right(interp) => + interp.watch(path) + val eval = + for(rootModule <- evaluateRootModule(wd, path, interp, log)) + yield Evaluator.State( + rootModule, + rootModule.getClass.getClassLoader.asInstanceOf[SpecialClassLoader].classpathSignature, + mutable.Map.empty[Segments, (Int, Any)], + interp.watchedFiles + ) + (eval, interp.watchedFiles) + } + } + + val evalRes = + for(s <- evalState) + yield new Evaluator(home, wd / 'out, wd / 'out, s.rootModule, log, + s.classLoaderSig, s.workerCache, env) + + val evaluated = for{ + evaluator <- evalRes + (evalWatches, res) <- Res(evaluateTasks(evaluator, scriptArgs, multiSelect = false)) + } yield { + (evaluator, evalWatches, res.map(_.flatMap(_._2))) + } + (evaluated, interpWatched) + } + + def watchedSigUnchanged(sig: Seq[(os.Path, Long)]) = { + sig.forall{case (p, l) => Interpreter.pathSignature(p) == l} + } + + def evaluateRootModule(wd: os.Path, + path: os.Path, + interp: ammonite.interp.Interpreter, + log: Logger + ): Res[mill.define.BaseModule] = { + + val (pkg, wrapper) = Util.pathToPackageWrapper(Seq(), path relativeTo wd) + + for { + scriptTxt <- + try Res.Success(Util.normalizeNewlines(os.read(path))) + catch { case _: NoSuchFileException => + log.info("No build file found, you should create build.sc to do something useful") + Res.Success("") + } + + processed <- interp.processModule( + scriptTxt, + CodeSource(wrapper, pkg, Seq(Name("ammonite"), Name("$file")), Some(path)), + autoImport = true, + extraCode = "", + hardcoded = true + ) + + buildClsName <- processed.blockInfo.lastOption match { + case Some(meta) => Res.Success(meta.id.wrapperPath) + case None => Res.Skip + } + + buildCls = interp + .evalClassloader + .loadClass(buildClsName) + + module <- try { + Util.withContextClassloader(interp.evalClassloader) { + Res.Success( + buildCls.getMethod("millSelf") + .invoke(null) + .asInstanceOf[Some[mill.define.BaseModule]] + .get + ) + } + } catch { + case e: Throwable => Res.Exception(e, "") + } +// _ <- Res(consistencyCheck(mapping)) + } yield module + } + + def resolveTasks[T, R: ClassTag](resolver: mill.main.Resolve[R], + evaluator: Evaluator, + scriptArgs: Seq[String], + multiSelect: Boolean) = { + for { + parsed <- ParseArgs(scriptArgs, multiSelect = multiSelect) + (selectors, args) = parsed + taskss <- { + val selected = selectors.map { case (scopedSel, sel) => + for(res <- prepareResolve(evaluator, scopedSel, sel)) + yield { + val (rootModule, crossSelectors) = res + + + try { + // We inject the `evaluator.rootModule` into the TargetScopt, rather + // than the `rootModule`, because even if you are running an external + // module we still want you to be able to resolve targets from your + // main build. Resolving targets from external builds as CLI arguments + // is not currently supported + mill.eval.Evaluator.currentEvaluator.set(evaluator) + resolver.resolve( + sel.value.toList, rootModule, rootModule.millDiscover, + args, crossSelectors.toList, Nil + ) + } finally { + mill.eval.Evaluator.currentEvaluator.set(null) + } + } + } + EitherOps.sequence(selected) + } + res <- EitherOps.sequence(taskss) + } yield res.flatten + } + + def resolveRootModule[T](evaluator: Evaluator, scopedSel: Option[Segments]) = { + scopedSel match { + case None => Right(evaluator.rootModule) + case Some(scoping) => + for { + moduleCls <- + try Right(evaluator.rootModule.getClass.getClassLoader.loadClass(scoping.render + "$")) + catch {case e: ClassNotFoundException => Left ("Cannot resolve external module " + scoping.render)} + rootModule <- moduleCls.getField("MODULE$").get(moduleCls) match { + case rootModule: ExternalModule => Right(rootModule) + case _ => Left("Class " + scoping.render + " is not an external module") + } + } yield rootModule + } + } + + def prepareResolve[T](evaluator: Evaluator, scopedSel: Option[Segments], sel: Segments) = { + for (rootModule<- resolveRootModule(evaluator, scopedSel)) + yield { + val crossSelectors = sel.value.map { + case Segment.Cross(x) => x.toList.map(_.toString) + case _ => Nil + } + (rootModule, crossSelectors) + } + } + + def evaluateTasks[T](evaluator: Evaluator, + scriptArgs: Seq[String], + multiSelect: Boolean) = { + for (targets <- resolveTasks(mill.main.ResolveTasks, evaluator, scriptArgs, multiSelect)) yield { + val (watched, res) = evaluate(evaluator, Agg.from(targets.distinct)) + + val watched2 = for{ + x <- res.right.toSeq + (Watched(_, extraWatched), _) <- x + w <- extraWatched + } yield w + + (watched ++ watched2, res) + } + } + + def evaluate(evaluator: Evaluator, + targets: Agg[Task[Any]]): (Seq[PathRef], Either[String, Seq[(Any, Option[ujson.Value])]]) = { + val evaluated = evaluator.evaluate(targets) + val watched = evaluated.results + .iterator + .collect { + case (t: define.Sources, Result.Success(p: Seq[PathRef])) => p + } + .flatten + .toSeq + + val errorStr = + (for((k, fs) <- evaluated.failing.items()) yield { + val ks = k match{ + case Left(t) => t.toString + case Right(t) => t.segments.render + } + val fss = fs.map{ + case Result.Exception(t, outerStack) => + var current = List(t) + while(current.head.getCause != null){ + current = current.head.getCause :: current + } + current.reverse + .flatMap( ex => + Seq(ex.toString) ++ + ex.getStackTrace.dropRight(outerStack.value.length).map(" " + _) + ) + .mkString("\n") + case Result.Failure(t, _) => t + } + s"$ks ${fss.mkString(", ")}" + }).mkString("\n") + + evaluated.failing.keyCount match { + case 0 => + val json = for(t <- targets.toSeq) yield { + t match { + case t: mill.define.NamedTask[_] => + val jsonFile = Evaluator + .resolveDestPaths(evaluator.outPath, t.ctx.segments) + .meta + val metadata = upickle.default.read[Evaluator.Cached](ujson.read(jsonFile.toIO)) + Some(metadata.value) + + case _ => None + } + } + + watched -> Right(evaluated.values.zip(json)) + case n => watched -> Left(s"$n targets failed\n$errorStr") + } + } + +// def consistencyCheck[T](mapping: Discovered.Mapping[T]): Either[String, Unit] = { +// val consistencyErrors = Discovered.consistencyCheck(mapping) +// if (consistencyErrors.nonEmpty) { +// Left(s"Failed Discovered.consistencyCheck: ${consistencyErrors.map(_.render)}") +// } else { +// Right(()) +// } +// } +} diff --git a/main/src/main/VisualizeModule.scala b/main/src/main/VisualizeModule.scala new file mode 100644 index 00000000..e950973f --- /dev/null +++ b/main/src/main/VisualizeModule.scala @@ -0,0 +1,60 @@ +package mill.main + +import java.util.concurrent.LinkedBlockingQueue + +import coursier.Cache +import coursier.core.Repository +import coursier.maven.MavenRepository +import mill.T +import mill.define.{Discover, ExternalModule} +import mill.eval.{PathRef, Result} + +object VisualizeModule extends ExternalModule with VisualizeModule { + def repositories = Seq( + Cache.ivy2Local, + MavenRepository("https://repo1.maven.org/maven2"), + MavenRepository("https://oss.sonatype.org/content/repositories/releases") + ) + + implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() + lazy val millDiscover = Discover[this.type] +} +trait VisualizeModule extends mill.define.TaskModule{ + def repositories: Seq[Repository] + def defaultCommandName() = "run" + def classpath = T{ + mill.modules.Util.millProjectModule("MILL_GRAPHVIZ", "mill-main-graphviz", repositories) + } + + /** + * The J2V8-based Graphviz library has a limitation that it can only ever + * be called from a single thread. Since Mill forks off a new thread every + * time you execute something, we need to keep around a worker thread that + * everyone can use to call into Graphviz, which the Mill execution threads + * can communicate via in/out queues. + */ + def worker = T.worker{ + val in = new LinkedBlockingQueue[(Seq[_], Seq[_], os.Path)]() + val out = new LinkedBlockingQueue[Result[Seq[PathRef]]]() + + val cl = mill.api.ClassLoader.create( + classpath().map(_.path.toNIO.toUri.toURL).toVector, + getClass.getClassLoader + ) + val visualizeThread = new java.lang.Thread(() => + while(true){ + val res = Result.create{ + val (targets, tasks, dest) = in.take() + cl.loadClass("mill.main.graphviz.GraphvizTools") + .getMethod("apply", classOf[Seq[_]], classOf[Seq[_]], classOf[os.Path]) + .invoke(null, targets, tasks, dest) + .asInstanceOf[Seq[PathRef]] + } + out.put(res) + } + ) + visualizeThread.setDaemon(true) + visualizeThread.start() + (in, out) + } +} diff --git a/main/src/mill/MillMain.scala b/main/src/mill/MillMain.scala deleted file mode 100644 index e953e65d..00000000 --- a/main/src/mill/MillMain.scala +++ /dev/null @@ -1,155 +0,0 @@ -package mill - -import java.io.{InputStream, PrintStream} - -import scala.collection.JavaConverters._ -import ammonite.main.Cli._ -import io.github.retronym.java9rtexport.Export -import mill.eval.Evaluator -import mill.api.DummyInputStream - -object MillMain { - - def main(args: Array[String]): Unit = { - val as = args match { - case Array(s, _*) if s == "-i" || s == "--interactive" => args.tail - case _ => args - } - val (result, _) = main0( - as, - None, - ammonite.Main.isInteractive(), - System.in, - System.out, - System.err, - System.getenv().asScala.toMap, - b => () - ) - System.exit(if(result) 0 else 1) - } - - def main0(args: Array[String], - stateCache: Option[Evaluator.State], - mainInteractive: Boolean, - stdin: InputStream, - stdout: PrintStream, - stderr: PrintStream, - env: Map[String, String], - setIdle: Boolean => Unit): (Boolean, Option[Evaluator.State]) = { - import ammonite.main.Cli - - val millHome = mill.api.Ctx.defaultHome - - val removed = Set("predef-code", "no-home-predef") - var interactive = false - val interactiveSignature = Arg[Config, Unit]( - "interactive", Some('i'), - "Run Mill in interactive mode, suitable for opening REPLs and taking user input. In this mode, no mill server will be used.", - (c, v) =>{ - interactive = true - c - } - ) - - - - var disableTicker = false - val disableTickerSignature = Arg[Config, Unit]( - "disable-ticker", None, - "Disable ticker log (e.g. short-lived prints of stages and progress bars)", - (c, v) =>{ - disableTicker = true - c - } - ) - - var debugLog = false - val debugLogSignature = Arg[Config, Unit]( - name = "debug", shortName = Some('d'), - doc = "Show debug output on STDOUT", - (c, v) => { - debugLog = true - c - } - ) - - val millArgSignature = - Cli.genericSignature.filter(a => !removed(a.name)) ++ Seq(interactiveSignature, disableTickerSignature, debugLogSignature) - - Cli.groupArgs( - args.toList, - millArgSignature, - Cli.Config(home = millHome, remoteLogging = false) - ) match{ - case _ if interactive => - stderr.println("-i/--interactive must be passed in as the first argument") - (false, None) - case Left(msg) => - stderr.println(msg) - (false, None) - case Right((cliConfig, _)) if cliConfig.help => - val leftMargin = millArgSignature.map(ammonite.main.Cli.showArg(_).length).max + 2 - stdout.println( - s"""Mill Build Tool - |usage: mill [mill-options] [target [target-options]] - | - |${formatBlock(millArgSignature, leftMargin).mkString(ammonite.util.Util.newLine)}""".stripMargin - ) - (true, None) - case Right((cliConfig, leftoverArgs)) => - - val repl = leftoverArgs.isEmpty - if (repl && stdin == DummyInputStream) { - stderr.println("Build repl needs to be run with the -i/--interactive flag") - (false, stateCache) - }else{ - val config = - if(!repl) cliConfig - else cliConfig.copy( - predefCode = - s"""import $$file.build, build._ - |implicit val replApplyHandler = mill.main.ReplApplyHandler( - | os.Path(${pprint.apply(cliConfig.home.toIO.getCanonicalPath.replaceAllLiterally("$", "$$")).plainText}), - | $disableTicker, - | interp.colors(), - | repl.pprinter(), - | build.millSelf.get, - | build.millDiscover, - | $debugLog - |) - |repl.pprinter() = replApplyHandler.pprinter - |import replApplyHandler.generatedEval._ - | - """.stripMargin, - welcomeBanner = None - ) - - val runner = new mill.main.MainRunner( - config.copy(colored = config.colored orElse Option(mainInteractive)), - disableTicker, - stdout, stderr, stdin, - stateCache, - env, - setIdle, - debugLog - ) - - if (mill.main.client.Util.isJava9OrAbove) { - val rt = cliConfig.home / Export.rtJarName - if (!os.exists(rt)) { - runner.printInfo(s"Preparing Java ${System.getProperty("java.version")} runtime; this may take a minute or two ...") - Export.rtTo(rt.toIO, false) - } - } - - if (repl){ - runner.printInfo("Loading...") - (runner.watchLoop(isRepl = true, printing = false, _.run()), runner.stateCache) - } else { - (runner.runScript(os.pwd / "build.sc", leftoverArgs), runner.stateCache) - } - } - - } - } -} diff --git a/main/src/mill/main/MainModule.scala b/main/src/mill/main/MainModule.scala deleted file mode 100644 index 34145668..00000000 --- a/main/src/mill/main/MainModule.scala +++ /dev/null @@ -1,269 +0,0 @@ -package mill.main - -import java.util.concurrent.LinkedBlockingQueue - -import mill.T -import mill.define.{NamedTask, Task} -import mill.eval.{Evaluator, PathRef, Result} -import mill.util.{Ctx, PrintLogger, Watched} -import pprint.{Renderer, Truncated} -object MainModule{ - def resolveTasks[T](evaluator: Evaluator, targets: Seq[String], multiSelect: Boolean) - (f: List[NamedTask[Any]] => T) = { - RunScript.resolveTasks(mill.main.ResolveTasks, evaluator, targets, multiSelect) match{ - case Left(err) => Result.Failure(err) - case Right(tasks) => Result.Success(f(tasks)) - } - } - def evaluateTasks[T](evaluator: Evaluator, targets: Seq[String], multiSelect: Boolean) - (f: Seq[(Any, Option[ujson.Value])] => T) = { - RunScript.evaluateTasks(evaluator, targets, multiSelect) match{ - case Left(err) => Result.Failure(err) - case Right((watched, Left(err))) => Result.Failure(err, Some(Watched((), watched))) - case Right((watched, Right(res))) => - f(res) - Result.Success(Watched((), watched)) - } - } -} - -trait MainModule extends mill.Module{ - - implicit def millDiscover: mill.define.Discover[_] - implicit def millScoptTasksReads[T] = new mill.main.Tasks.Scopt[T]() - implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() - def version() = mill.T.command { - val res = System.getProperty("MILL_VERSION") - println(res) - res - } - - private val OutDir: String = "out" - - /** - * Resolves a mill query string and prints out the tasks it resolves to. - */ - def resolve(evaluator: Evaluator, targets: String*) = mill.T.command{ - val resolved = RunScript.resolveTasks( - mill.main.ResolveMetadata, evaluator, targets, multiSelect = true - ) - - resolved match{ - case Left(err) => Result.Failure(err) - case Right(rs) => - for(r <- rs.sorted) { - println(r) - } - Result.Success(()) - } - } - - /** - * Given a set of tasks, prints out the execution plan of what tasks will be - * executed in what order, without actually executing them. - */ - def plan(evaluator: Evaluator, targets: String*) = mill.T.command{ - plan0(evaluator, targets) match{ - case Right(success) => { - val renderedTasks = success.map{ _.segments.render} - renderedTasks.foreach(println) - Result.Success(renderedTasks) - } - case Left(err) => Result.Failure(err) - } - } - - private def plan0(evaluator: Evaluator, targets: Seq[String]) = { - RunScript.resolveTasks( - mill.main.ResolveTasks, evaluator, targets, multiSelect = true - ) match { - case Left(err) => Left(err) - case Right(rs) => - val (sortedGroups, _) = Evaluator.plan(evaluator.rootModule, rs) - Right(sortedGroups.keys().collect{ case Right(r) => r}.toArray) - } - } - - /** - * Prints out some dependency path from the `src` task to the `dest` task. - * - * If there are multiple dependency paths between `src` and `dest`, the path - * chosen is arbitrary. - */ - def path(evaluator: Evaluator, src: String, dest: String) = mill.T.command{ - val resolved = RunScript.resolveTasks( - mill.main.ResolveTasks, evaluator, List(src, dest), multiSelect = true - ) - - resolved match{ - case Left(err) => Result.Failure(err) - case Right(Seq(src1, dest1)) => - val queue = collection.mutable.Queue[List[Task[_]]](List(src1)) - var found = Option.empty[List[Task[_]]] - val seen = collection.mutable.Set.empty[Task[_]] - while(queue.nonEmpty && found.isEmpty){ - val current = queue.dequeue() - if (current.head == dest1) found = Some(current) - else{ - for{ - next <- current.head.inputs - if !seen.contains(next) - }{ - seen.add(next) - queue.enqueue(next :: current) - } - } - } - found match{ - case None => - Result.Failure(s"No path found between $src and $dest") - case Some(list) => - val labels = list - .collect{case n: NamedTask[_] => n.ctx.segments.render} - - labels.foreach(mill.T.ctx().log.outputStream.println(_)) - - Result.Success(labels) - } - } - } - - /** - * Displays metadata about the given task without actually running it. - */ - def inspect(evaluator: Evaluator, targets: String*) = mill.T.command{ - MainModule.resolveTasks(evaluator, targets, multiSelect = true){ tasks => - val output = new StringBuilder - for{ - task <- tasks - tree = ReplApplyHandler.pprintTask(task, evaluator) - val defaults = pprint.PPrinter() - val renderer = new Renderer( - defaults.defaultWidth, - defaults.colorApplyPrefix, - defaults.colorLiteral, - defaults.defaultIndent - ) - val rendered = renderer.rec(tree, 0, 0).iter - val truncated = new Truncated(rendered, defaults.defaultWidth, defaults.defaultHeight) - str <- truncated ++ Iterator("\n") - } { - output.append(str) - } - println(output) - output.toString - } - } - - /** - * Runs multiple tasks in a single call. - * - * - */ - def all(evaluator: Evaluator, targets: String*) = mill.T.command{ - MainModule.evaluateTasks(evaluator, targets, multiSelect = true) {res => - res.flatMap(_._2) - } - } - - /** - * Runs a given task and prints the JSON result to stdout. This is useful - * to integrate Mill into external scripts and tooling. - */ - def show(evaluator: Evaluator, targets: String*) = mill.T.command{ - MainModule.evaluateTasks( - evaluator.copy( - // When using `show`, redirect all stdout of the evaluated tasks so the - // printed JSON is the only thing printed to stdout. - log = evaluator.log match{ - case PrintLogger(c1, d, c2, o, i, e, in, de) => PrintLogger(c1, d, c2, e, i, e, in, de) - case l => l - } - ), - targets, - multiSelect = false - ) {res => - for(json <- res.flatMap(_._2)){ - println(json.render(indent = 4)) - } - } - } - - /** - * Deletes the given targets from the out directory. Providing no targets - * will clean everything. - */ - def clean(evaluator: Evaluator, targets: String*) = mill.T.command { - val rootDir = ammonite.ops.pwd / OutDir - - val KeepPattern = "(mill-.+)".r.anchored - - def keepPath(path: os.Path) = path.segments.toSeq.lastOption match { - case Some(KeepPattern(_)) => true - case _ => false - } - - val pathsToRemove = - if (targets.isEmpty) - Right(ammonite.ops.ls(rootDir).filterNot(keepPath)) - else - RunScript.resolveTasks( - mill.main.ResolveSegments, evaluator, targets, multiSelect = true - ).map( - _.map { segments => - Evaluator.resolveDestPaths(rootDir, segments).out - }) - - pathsToRemove match { - case Left(err) => - Result.Failure(err) - case Right(paths) => - paths.foreach(os.remove.all) - Result.Success(()) - } - } - - - /** - * Renders the dependencies between the given tasks as a SVG for you to look at - */ - def visualize(evaluator: Evaluator, targets: String*) = mill.T.command{ - visualize0(evaluator, targets, T.ctx(), mill.main.VisualizeModule.worker()) - } - - /** - * Renders the dependencies between the given tasks, and all their dependencies, as a SVG - */ - def visualizePlan(evaluator: Evaluator, targets: String*) = mill.T.command{ - plan0(evaluator, targets) match { - case Left(err) => Result.Failure(err) - case Right(planResults) => visualize0( - evaluator, targets, T.ctx(), mill.main.VisualizeModule.worker(), Some(planResults.toList.map(_.task)) - ) - } - } - - private type VizWorker = (LinkedBlockingQueue[(scala.Seq[_], scala.Seq[_], os.Path)], - LinkedBlockingQueue[Result[scala.Seq[PathRef]]]) - - private def visualize0(evaluator: Evaluator, targets: Seq[String], ctx: Ctx, vizWorker: VizWorker, - planTasks: Option[List[NamedTask[_]]] = None) = { - def callVisualizeModule(rs: List[NamedTask[Any]], allRs: List[NamedTask[Any]]) = { - val (in, out) = vizWorker - in.put((rs, allRs, ctx.dest)) - out.take() - } - - RunScript.resolveTasks( - mill.main.ResolveTasks, evaluator, targets, multiSelect = true - ) match { - case Left(err) => Result.Failure(err) - case Right(rs) => planTasks match { - case Some(allRs) => { - callVisualizeModule(rs, allRs) - } - case None => callVisualizeModule(rs, rs) - } - } - } -} diff --git a/main/src/mill/main/MainRunner.scala b/main/src/mill/main/MainRunner.scala deleted file mode 100644 index e50ed370..00000000 --- a/main/src/mill/main/MainRunner.scala +++ /dev/null @@ -1,170 +0,0 @@ -package mill.main -import java.io.{InputStream, PrintStream} - -import ammonite.Main -import ammonite.interp.{Interpreter, Preprocessor} -import ammonite.util.Util.CodeSource -import ammonite.util._ -import mill.eval.{Evaluator, PathRef} -import mill.util.PrintLogger - -import scala.annotation.tailrec - - -/** - * Customized version of [[ammonite.MainRunner]], allowing us to run Mill - * `build.sc` scripts with mill-specific tweaks such as a custom - * `scriptCodeWrapper` or with a persistent evaluator between runs. - */ -class MainRunner(val config: ammonite.main.Cli.Config, - disableTicker: Boolean, - outprintStream: PrintStream, - errPrintStream: PrintStream, - stdIn: InputStream, - stateCache0: Option[Evaluator.State] = None, - env : Map[String, String], - setIdle: Boolean => Unit, - debugLog: Boolean) - extends ammonite.MainRunner( - config, outprintStream, errPrintStream, - stdIn, outprintStream, errPrintStream - ){ - - var stateCache = stateCache0 - - override def watchAndWait(watched: Seq[(os.Path, Long)]) = { - printInfo(s"Watching for changes to ${watched.length} files... (Ctrl-C to exit)") - def statAll() = watched.forall{ case (file, lastMTime) => - Interpreter.pathSignature(file) == lastMTime - } - setIdle(true) - while(statAll()) Thread.sleep(100) - setIdle(false) - } - - /** - * Custom version of [[watchLoop]] that lets us generate the watched-file - * signature only on demand, so if we don't have config.watch enabled we do - * not pay the cost of generating it - */ - @tailrec final def watchLoop2[T](isRepl: Boolean, - printing: Boolean, - run: Main => (Res[T], () => Seq[(os.Path, Long)])): Boolean = { - val (result, watched) = run(initMain(isRepl)) - - val success = handleWatchRes(result, printing) - if (!config.watch) success - else{ - watchAndWait(watched()) - watchLoop2(isRepl, printing, run) - } - } - - - override def runScript(scriptPath: os.Path, scriptArgs: List[String]) = - watchLoop2( - isRepl = false, - printing = true, - mainCfg => { - val (result, interpWatched) = RunScript.runScript( - config.home, - mainCfg.wd, - scriptPath, - mainCfg.instantiateInterpreter(), - scriptArgs, - stateCache, - new PrintLogger( - colors != ammonite.util.Colors.BlackWhite, - disableTicker, - colors, - outprintStream, - errPrintStream, - errPrintStream, - stdIn, - debugEnabled = debugLog - ), - env - ) - - result match{ - case Res.Success(data) => - val (eval, evalWatches, res) = data - - stateCache = Some(Evaluator.State(eval.rootModule, eval.classLoaderSig, eval.workerCache, interpWatched)) - val watched = () => { - val alreadyStale = evalWatches.exists(p => p.sig != PathRef(p.path, p.quick).sig) - // If the file changed between the creation of the original - // `PathRef` and the current moment, use random junk .sig values - // to force an immediate re-run. Otherwise calculate the - // pathSignatures the same way Ammonite would and hand over the - // values, so Ammonite can watch them and only re-run if they - // subsequently change - if (alreadyStale) evalWatches.map(_.path -> util.Random.nextLong()) - else evalWatches.map(p => p.path -> Interpreter.pathSignature(p.path)) - } - (Res(res), () => interpWatched ++ watched()) - case _ => (result, () => interpWatched) - } - } - ) - - override def handleWatchRes[T](res: Res[T], printing: Boolean) = { - res match{ - case Res.Success(value) => true - case _ => super.handleWatchRes(res, printing) - } - } - - override def initMain(isRepl: Boolean) = { - super.initMain(isRepl).copy( - scriptCodeWrapper = CustomCodeWrapper, - // Ammonite does not properly forward the wd from CliConfig to Main, so - // force forward it outselves - wd = config.wd - ) - } - - object CustomCodeWrapper extends Preprocessor.CodeWrapper { - def apply(code: String, - source: CodeSource, - imports: ammonite.util.Imports, - printCode: String, - indexedWrapperName: ammonite.util.Name, - extraCode: String): (String, String, Int) = { - import source.pkgName - val wrapName = indexedWrapperName.backticked - val path = source - .path - .map(path => path.toNIO.getParent) - .getOrElse(config.wd.toNIO) - val literalPath = pprint.Util.literalize(path.toString) - val external = !(path.compareTo(config.wd.toNIO) == 0) - val top = s""" - |package ${pkgName.head.encoded} - |package ${Util.encodeScalaSourcePath(pkgName.tail)} - |$imports - |import mill._ - |object $wrapName - |extends mill.define.BaseModule(os.Path($literalPath), foreign0 = $external)( - | implicitly, implicitly, implicitly, implicitly, mill.define.Caller(()) - |) - |with $wrapName{ - | // Stub to make sure Ammonite has something to call after it evaluates a script, - | // even if it does nothing... - | def $$main() = Iterator[String]() - | - | // Need to wrap the returned Module in Some(...) to make sure it - | // doesn't get picked up during reflective child-module discovery - | def millSelf = Some(this) - | - | implicit lazy val millDiscover: mill.define.Discover[this.type] = mill.define.Discover[this.type] - |} - | - |sealed trait $wrapName extends mill.main.MainModule{ - |""".stripMargin - val bottom = "}" - - (top, bottom, 1) - } - } -} diff --git a/main/src/mill/main/MainScopts.scala b/main/src/mill/main/MainScopts.scala deleted file mode 100644 index 718a30e6..00000000 --- a/main/src/mill/main/MainScopts.scala +++ /dev/null @@ -1,31 +0,0 @@ -package mill.main -import mill.eval.Evaluator - -case class Tasks[T](value: Seq[mill.define.NamedTask[T]]) - -object Tasks{ - - class Scopt[T]() extends scopt.Read[Tasks[T]] { - def arity = 1 - - def reads = s => { - RunScript.resolveTasks( - mill.main.ResolveTasks, - Evaluator.currentEvaluator.get, - Seq(s), - multiSelect = false - ) match{ - case Left(err) => throw new Exception(err) - case Right(tasks) => Tasks(tasks).asInstanceOf[Tasks[T]] - } - } - } -} - -class EvaluatorScopt[T]() - extends scopt.Read[mill.eval.Evaluator]{ - def arity = 0 - def reads = s => { - Evaluator.currentEvaluator.get.asInstanceOf[mill.eval.Evaluator] - } -} diff --git a/main/src/mill/main/MillServerMain.scala b/main/src/mill/main/MillServerMain.scala deleted file mode 100644 index 26ca99e6..00000000 --- a/main/src/mill/main/MillServerMain.scala +++ /dev/null @@ -1,227 +0,0 @@ -package mill.main - -import java.io._ -import java.net.Socket - -import mill.MillMain - -import scala.collection.JavaConverters._ -import org.scalasbt.ipcsocket._ -import mill.main.client._ -import mill.eval.Evaluator -import mill.api.DummyInputStream -import sun.misc.{Signal, SignalHandler} - -trait MillServerMain[T]{ - var stateCache = Option.empty[T] - def main0(args: Array[String], - stateCache: Option[T], - mainInteractive: Boolean, - stdin: InputStream, - stdout: PrintStream, - stderr: PrintStream, - env : Map[String, String], - setIdle: Boolean => Unit): (Boolean, Option[T]) -} - -object MillServerMain extends mill.main.MillServerMain[Evaluator.State]{ - def main(args0: Array[String]): Unit = { - // Disable SIGINT interrupt signal in the Mill server. - // - // This gets passed through from the client to server whenever the user - // hits `Ctrl-C`, which by default kills the server, which defeats the purpose - // of running a background server. Furthermore, the background server already - // can detect when the Mill client goes away, which is necessary to handle - // the case when a Mill client that did *not* spawn the server gets `CTRL-C`ed - Signal.handle(new Signal("INT"), new SignalHandler () { - def handle(sig: Signal) = {} // do nothing - }) - new Server( - args0(0), - this, - () => System.exit(0), - 300000, - mill.main.client.Locks.files(args0(0)) - ).run() - } - def main0(args: Array[String], - stateCache: Option[Evaluator.State], - mainInteractive: Boolean, - stdin: InputStream, - stdout: PrintStream, - stderr: PrintStream, - env : Map[String, String], - setIdle: Boolean => Unit) = { - MillMain.main0( - args, - stateCache, - mainInteractive, - DummyInputStream, - stdout, - stderr, - env, - setIdle - ) - } -} - - -class Server[T](lockBase: String, - sm: MillServerMain[T], - interruptServer: () => Unit, - acceptTimeout: Int, - locks: Locks) { - - val originalStdout = System.out - def run() = { - Server.tryLockBlock(locks.processLock){ - var running = true - while (running) { - Server.lockBlock(locks.serverLock){ - val (serverSocket, socketClose) = if (Util.isWindows) { - val socketName = Util.WIN32_PIPE_PREFIX + new File(lockBase).getName - (new Win32NamedPipeServerSocket(socketName), () => new Win32NamedPipeSocket(socketName).close()) - } else { - val socketName = lockBase + "/io" - new File(socketName).delete() - (new UnixDomainServerSocket(socketName), () => new UnixDomainSocket(socketName).close()) - } - - val sockOpt = Server.interruptWith( - "MillSocketTimeoutInterruptThread", - acceptTimeout, - socketClose(), - serverSocket.accept() - ) - - sockOpt match{ - case None => running = false - case Some(sock) => - try { - handleRun(sock) - serverSocket.close() - } - catch{case e: Throwable => e.printStackTrace(originalStdout) } - } - } - // Make sure you give an opportunity for the client to probe the lock - // and realize the server has released it to signal completion - Thread.sleep(10) - } - }.getOrElse(throw new Exception("PID already present")) - } - - def handleRun(clientSocket: Socket) = { - - val currentOutErr = clientSocket.getOutputStream - val stdout = new PrintStream(new ProxyOutputStream(currentOutErr, 1), true) - val stderr = new PrintStream(new ProxyOutputStream(currentOutErr, -1), true) - val socketIn = clientSocket.getInputStream - val argStream = new FileInputStream(lockBase + "/run") - val interactive = argStream.read() != 0 - val clientMillVersion = Util.readString(argStream) - val serverMillVersion = sys.props("MILL_VERSION") - if (clientMillVersion != serverMillVersion) { - stdout.println(s"Mill version changed ($serverMillVersion -> $clientMillVersion), re-starting server") - System.exit(0) - } - val args = Util.parseArgs(argStream) - val env = Util.parseMap(argStream) - argStream.close() - - @volatile var done = false - @volatile var idle = false - val t = new Thread(() => - try { - val (result, newStateCache) = sm.main0( - args, - sm.stateCache, - interactive, - socketIn, - stdout, - stderr, - env.asScala.toMap, - idle = _ - ) - - sm.stateCache = newStateCache - java.nio.file.Files.write( - java.nio.file.Paths.get(lockBase + "/exitCode"), - (if (result) 0 else 1).toString.getBytes - ) - } finally{ - done = true - idle = true - }, - "MillServerActionRunner" - ) - t.start() - // We cannot simply use Lock#await here, because the filesystem doesn't - // realize the clientLock/serverLock are held by different threads in the - // two processes and gives a spurious deadlock error - while(!done && !locks.clientLock.probe()) Thread.sleep(3) - - if (!idle) interruptServer() - - - t.interrupt() - t.stop() - - if (Util.isWindows) { - // Closing Win32NamedPipeSocket can often take ~5s - // It seems OK to exit the client early and subsequently - // start up mill client again (perhaps closing the server - // socket helps speed up the process). - val t = new Thread(() => clientSocket.close()) - t.setDaemon(true) - t.start() - } else clientSocket.close() - } -} -object Server{ - def lockBlock[T](lock: Lock)(t: => T): T = { - val l = lock.lock() - try t - finally l.release() - } - def tryLockBlock[T](lock: Lock)(t: => T): Option[T] = { - lock.tryLock() match{ - case null => None - case l => - try Some(t) - finally l.release() - } - - } - def interruptWith[T](threadName: String, millis: Int, close: => Unit, t: => T): Option[T] = { - @volatile var interrupt = true - @volatile var interrupted = false - val thread = new Thread( - () => { - try Thread.sleep(millis) - catch{ case t: InterruptedException => /* Do Nothing */ } - if (interrupt) { - interrupted = true - close - } - }, - threadName - ) - - thread.start() - try { - val res = - try Some(t) - catch {case e: Throwable => None} - - if (interrupted) None - else res - - } finally { - thread.interrupt() - interrupt = false - } - } -} - - diff --git a/main/src/mill/main/ReplApplyHandler.scala b/main/src/mill/main/ReplApplyHandler.scala deleted file mode 100644 index a8e467d4..00000000 --- a/main/src/mill/main/ReplApplyHandler.scala +++ /dev/null @@ -1,163 +0,0 @@ -package mill.main - - -import mill.define.Applicative.ApplyHandler -import mill.define.Segment.Label -import mill.define._ -import mill.eval.{Evaluator, Result} - -import mill.util.Strict.Agg - -import scala.collection.mutable -object ReplApplyHandler{ - def apply[T](home: os.Path, - disableTicker: Boolean, - colors: ammonite.util.Colors, - pprinter0: pprint.PPrinter, - rootModule: mill.define.BaseModule, - discover: Discover[_], - debugLog: Boolean) = { - new ReplApplyHandler( - pprinter0, - new Evaluator( - home, - ammonite.ops.pwd / 'out, - ammonite.ops.pwd / 'out, - rootModule, - new mill.util.PrintLogger( - colors != ammonite.util.Colors.BlackWhite, - disableTicker, - colors, - System.out, - System.err, - System.err, - System.in, - debugEnabled = debugLog - ) - ) - ) - } - def pprintCross(c: mill.define.Cross[_], evaluator: Evaluator) = { - pprint.Tree.Lazy( ctx => - Iterator(c.millOuterCtx.enclosing , ":", c.millOuterCtx.lineNum.toString, ctx.applyPrefixColor("\nChildren:").toString) ++ - c.items.iterator.map(x => - "\n (" + x._1.map(pprint.PPrinter.BlackWhite.apply(_)).mkString(", ") + ")" - ) - ) - } - def pprintModule(m: mill.define.Module, evaluator: Evaluator) = { - pprint.Tree.Lazy( ctx => - Iterator(m.millInternal.millModuleEnclosing, ":", m.millInternal.millModuleLine.toString) ++ - (if (m.millInternal.reflectAll[mill.Module].isEmpty) Nil - else - ctx.applyPrefixColor("\nChildren:").toString +: - m.millInternal.reflectAll[mill.Module].map("\n ." + _.millOuterCtx.segment.pathSegments.mkString("."))) ++ - (evaluator.rootModule.millDiscover.value.get(m.getClass) match{ - case None => Nil - case Some(commands) => - ctx.applyPrefixColor("\nCommands:").toString +: commands.map{c => - "\n ." + c._2.name + "(" + - c._2.argSignatures.map(s => s.name + ": " + s.typeString).mkString(", ") + - ")()" - } - }) ++ - (if (m.millInternal.reflectAll[Target[_]].isEmpty) Nil - else { - Seq(ctx.applyPrefixColor("\nTargets:").toString) ++ - m.millInternal.reflectAll[Target[_]].map(t => - "\n ." + t.label + "()" - ) - }) - - ) - } - - def resolveParents(c: Class[_]): Seq[Class[_]] = { - Seq(c) ++ Option(c.getSuperclass).toSeq.flatMap(resolveParents) ++ c.getInterfaces.flatMap(resolveParents) - } - - def pprintTask(t: NamedTask[_], evaluator: Evaluator) = { - val seen = mutable.Set.empty[Task[_]] - def rec(t: Task[_]): Seq[Segments] = { - if (seen(t)) Nil // do nothing - else t match { - case t: Target[_] if evaluator.rootModule.millInternal.targets.contains(t) => - Seq(t.ctx.segments) - case _ => - seen.add(t) - t.inputs.flatMap(rec) - } - } - - val annots = for { - c <- resolveParents(t.ctx.enclosingCls) - m <- c.getMethods - if m.getName == t.ctx.segment.pathSegments.head - a = m.getAnnotation(classOf[mill.moduledefs.Scaladoc]) - if a != null - }yield a - - val allDocs = - for(a <- annots.distinct) - yield mill.modules.Util.cleanupScaladoc(a.value).map("\n " + _).mkString - - pprint.Tree.Lazy(ctx => - Iterator( - ctx.applyPrefixColor(t.toString).toString, "(", t.ctx.fileName.split('/').last, ":", t.ctx.lineNum.toString, ")", - allDocs.mkString("\n"), "\n", - "\n", ctx.applyPrefixColor("Inputs").toString, ":" - ) ++ t.inputs.iterator.flatMap(rec).map("\n " + _.render) - ) - } - -} -class ReplApplyHandler(pprinter0: pprint.PPrinter, - val evaluator: Evaluator) extends ApplyHandler[Task] { - // Evaluate classLoaderSig only once in the REPL to avoid busting caches - // as the user enters more REPL commands and changes the classpath - val classLoaderSig = Evaluator.classLoaderSig - override def apply[V](t: Task[V]) = { - val res = evaluator.evaluate(Agg(t)) - res.values match{ - case Seq(head: V) => head - case Nil => - val msg = new mutable.StringBuilder() - msg.append(res.failing.keyCount + " targets failed\n") - for((k, vs) <- res.failing.items){ - msg.append(k match{ - case Left(t) => "Anonymous Task\n" - case Right(k) => k.segments.render + "\n" - }) - - for(v <- vs){ - v match{ - case Result.Failure(m, _) => msg.append(m + "\n") - case Result.Exception(t, outerStack) => - msg.append( - t.toString + - t.getStackTrace.dropRight(outerStack.value.length).map("\n " + _).mkString + - "\n" - ) - - } - } - } - throw new Exception(msg.toString) - } - } - - val generatedEval = new EvalGenerated(evaluator) - - val millHandlers: PartialFunction[Any, pprint.Tree] = { - case c: Cross[_] => - ReplApplyHandler.pprintCross(c, evaluator) - case m: mill.Module if evaluator.rootModule.millInternal.modules.contains(m) => - ReplApplyHandler.pprintModule(m, evaluator) - case t: mill.define.Target[_] if evaluator.rootModule.millInternal.targets.contains(t) => - ReplApplyHandler.pprintTask(t, evaluator) - - } - val pprinter = pprinter0.copy( - additionalHandlers = millHandlers orElse pprinter0.additionalHandlers - ) -} diff --git a/main/src/mill/main/Resolve.scala b/main/src/mill/main/Resolve.scala deleted file mode 100644 index a2c186ed..00000000 --- a/main/src/mill/main/Resolve.scala +++ /dev/null @@ -1,443 +0,0 @@ -package mill.main - -import mill.define._ -import mill.define.TaskModule -import ammonite.util.Res -import mill.main.ResolveMetadata.singleModuleMeta -import mill.util.Router.EntryPoint -import mill.util.Scripts - -import scala.reflect.ClassTag - -object ResolveMetadata extends Resolve[String]{ - def singleModuleMeta(obj: Module, discover: Discover[_], isRootModule: Boolean) = { - val modules = obj.millModuleDirectChildren.map(_.toString) - val targets = - obj - .millInternal - .reflectAll[Target[_]] - .map(_.toString) - val commands = for{ - (cls, entryPoints) <- discover.value - if cls.isAssignableFrom(obj.getClass) - ep <- entryPoints - } yield { - if (isRootModule) ep._2.name - else obj + "." + ep._2.name - } - modules ++ targets ++ commands - } - - def endResolveLabel(obj: Module, - revSelectorsSoFar: List[Segment], - last: String, - discover: Discover[_], - rest: Seq[String]): Either[String, List[String]] = { - - val direct = singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty) - last match{ - case "__" => - Right( - // Filter out our own module in - obj.millInternal.modules - .filter(_ != obj) - .flatMap(m => singleModuleMeta(m, discover, m != obj)) - .toList - ) - case "_" => Right(direct.toList) - case _ => - direct.find(_.split('.').last == last) match{ - case None => Resolve.errorMsgLabel(direct, last, revSelectorsSoFar) - case Some(s) => Right(List(s)) - } - } - } - - def endResolveCross(obj: Module, - revSelectorsSoFar: List[Segment], - last: List[String], - discover: Discover[_], - rest: Seq[String]): Either[String, List[String]] = { - obj match{ - case c: Cross[Module] => - last match{ - case List("__") => Right(c.items.map(_._2.toString)) - case items => - c.items - .filter(_._1.length == items.length) - .filter(_._1.zip(last).forall{case (a, b) => b == "_" || a.toString == b}) - .map(_._2.toString) match{ - case Nil => - Resolve.errorMsgCross( - c.items.map(_._1.map(_.toString)), - last, - revSelectorsSoFar - ) - case res => Right(res) - } - - } - case _ => - Left( - Resolve.unableToResolve(Segment.Cross(last), revSelectorsSoFar) + - Resolve.hintListLabel(revSelectorsSoFar) - ) - } - } -} - -object ResolveSegments extends Resolve[Segments] { - - override def endResolveCross(obj: Module, - revSelectorsSoFar: List[Segment], - last: List[String], - discover: Discover[_], - rest: Seq[String]): Either[String, Seq[Segments]] = { - obj match{ - case c: Cross[Module] => - last match{ - case List("__") => Right(c.items.map(_._2.millModuleSegments)) - case items => - c.items - .filter(_._1.length == items.length) - .filter(_._1.zip(last).forall{case (a, b) => b == "_" || a.toString == b}) - .map(_._2.millModuleSegments) match { - case Nil => - Resolve.errorMsgCross( - c.items.map(_._1.map(_.toString)), - last, - revSelectorsSoFar - ) - case res => Right(res) - } - } - case _ => - Left( - Resolve.unableToResolve(Segment.Cross(last), revSelectorsSoFar) + - Resolve.hintListLabel(revSelectorsSoFar) - ) - } - } - - def endResolveLabel(obj: Module, - revSelectorsSoFar: List[Segment], - last: String, - discover: Discover[_], - rest: Seq[String]): Either[String, Seq[Segments]] = { - val target = - obj - .millInternal - .reflectSingle[Target[_]](last) - .map(t => Right(t.ctx.segments)) - - val command = - Resolve - .invokeCommand(obj, last, discover, rest) - .headOption - .map(_.map(_.ctx.segments)) - - val module = - obj.millInternal - .reflectNestedObjects[Module] - .find(_.millOuterCtx.segment == Segment.Label(last)) - .map(m => Right(m.millModuleSegments)) - - command orElse target orElse module match { - case None => - Resolve.errorMsgLabel( - singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty), - last, - revSelectorsSoFar - ) - - case Some(either) => either.right.map(Seq(_)) - } - } -} - -object ResolveTasks extends Resolve[NamedTask[Any]]{ - - - def endResolveCross(obj: Module, - revSelectorsSoFar: List[Segment], - last: List[String], - discover: Discover[_], - rest: Seq[String])= { - - obj match{ - case c: Cross[Module] => - - Resolve.runDefault(obj, Segment.Cross(last), discover, rest).flatten.headOption match{ - case None => - Left( - "Cannot find default task to evaluate for module " + - Segments((Segment.Cross(last) :: revSelectorsSoFar).reverse:_*).render - ) - case Some(v) => v.map(Seq(_)) - } - case _ => - Left( - Resolve.unableToResolve(Segment.Cross(last), revSelectorsSoFar) + - Resolve.hintListLabel(revSelectorsSoFar) - ) - } - } - - def endResolveLabel(obj: Module, - revSelectorsSoFar: List[Segment], - last: String, - discover: Discover[_], - rest: Seq[String]) = last match{ - case "__" => - Right( - obj.millInternal.modules - .filter(_ != obj) - .flatMap(m => m.millInternal.reflectAll[Target[_]]) - ) - case "_" => Right(obj.millInternal.reflectAll[Target[_]]) - - case _ => - val target = - obj - .millInternal - .reflectSingle[Target[_]](last) - .map(Right(_)) - - val command = Resolve.invokeCommand(obj, last, discover, rest).headOption - - command orElse target orElse Resolve.runDefault(obj, Segment.Label(last), discover, rest).flatten.headOption match { - case None => - Resolve.errorMsgLabel( - singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty), - last, - revSelectorsSoFar - ) - - // Contents of `either` *must* be a `Task`, because we only select - // methods returning `Task` in the discovery process - case Some(either) => either.right.map(Seq(_)) - } - } -} - -object Resolve{ - def minimum(i1: Int, i2: Int, i3: Int)= math.min(math.min(i1, i2), i3) - - /** - * Short Levenshtein distance algorithm, based on - * - * https://rosettacode.org/wiki/Levenshtein_distance#Scala - */ - def editDistance(s1: String, s2: String) = { - val dist = Array.tabulate(s2.length+1, s1.length+1){(j, i) => if(j==0) i else if (i==0) j else 0} - - for(j <- 1 to s2.length; i <- 1 to s1.length) - dist(j)(i) = if(s2(j - 1) == s1(i-1)) dist(j - 1)(i-1) - else minimum(dist(j - 1)(i) + 1, dist(j)(i - 1) + 1, dist(j - 1)(i - 1) + 1) - - dist(s2.length)(s1.length) - } - - def unableToResolve(last: Segment, revSelectorsSoFar: List[Segment]): String = { - unableToResolve(Segments((last :: revSelectorsSoFar).reverse: _*).render) - } - - def unableToResolve(segments: String): String = "Cannot resolve " + segments + "." - - def hintList(revSelectorsSoFar: List[Segment]) = { - val search = Segments(revSelectorsSoFar.reverse: _*).render - s" Try `mill resolve $search` to see what's available." - } - - def hintListLabel(revSelectorsSoFar: List[Segment]) = { - hintList(Segment.Label("_") :: revSelectorsSoFar) - } - - def hintListCross(revSelectorsSoFar: List[Segment]) = { - hintList(Segment.Cross(Seq("__")) :: revSelectorsSoFar) - } - - def errorMsgBase[T](direct: Seq[T], - last0: T, - revSelectorsSoFar: List[Segment], - editSplit: String => String, - defaultErrorMsg: String) - (strings: T => Seq[String], - render: T => String): Left[String, Nothing] = { - val last = strings(last0) - val similar = - direct - .map(x => (x, strings(x))) - .filter(_._2.length == last.length) - .map{ case (d, s) => (d, s.zip(last).map{case (a, b) => Resolve.editDistance(editSplit(a), b)}.sum)} - .filter(_._2 < 3) - .sortBy(_._2) - - if (similar.headOption.exists(_._1 == last0)){ - // Special case: if the most similar segment is the desired segment itself, - // this means we are trying to resolve a module where a task is present. - // Special case the error message to make it something meaningful - Left("Task " + last0 + " is not a module and has no children.") - }else{ - - val hint = similar match{ - case Nil => defaultErrorMsg - case items => " Did you mean " + render(items.head._1) + "?" - } - Left(unableToResolve(render(last0)) + hint) - } - } - - def errorMsgLabel(direct: Seq[String], last: String, revSelectorsSoFar: List[Segment]) = { - errorMsgBase( - direct, - Segments((Segment.Label(last) :: revSelectorsSoFar).reverse:_*).render, - revSelectorsSoFar, - _.split('.').last, - hintListLabel(revSelectorsSoFar) - )( - rendered => Seq(rendered.split('.').last), - x => x - ) - } - - def errorMsgCross(crossKeys: Seq[Seq[String]], - last: Seq[String], - revSelectorsSoFar: List[Segment]) = { - errorMsgBase( - crossKeys, - last, - revSelectorsSoFar, - x => x, - hintListCross(revSelectorsSoFar) - )( - crossKeys => crossKeys, - crossKeys => Segments((Segment.Cross(crossKeys) :: revSelectorsSoFar).reverse:_*).render - ) - } - - def invokeCommand(target: Module, - name: String, - discover: Discover[_], - rest: Seq[String]) = for { - (cls, entryPoints) <- discover.value - if cls.isAssignableFrom(target.getClass) - ep <- entryPoints - if ep._2.name == name - } yield Scripts.runMainMethod( - target, - ep._2.asInstanceOf[EntryPoint[Module]], - ammonite.main.Scripts.groupArgs(rest.toList) - ) match { - case Res.Success(v: Command[_]) => Right(v) - case Res.Failure(msg) => Left(msg) - case Res.Exception(ex, msg) => - val sw = new java.io.StringWriter() - ex.printStackTrace(new java.io.PrintWriter(sw)) - val prefix = if (msg.nonEmpty) msg + "\n" else msg - Left(prefix + sw.toString) - - } - - def runDefault(obj: Module, last: Segment, discover: Discover[_], rest: Seq[String]) = for { - child <- obj.millInternal.reflectNestedObjects[Module] - if child.millOuterCtx.segment == last - res <- child match { - case taskMod: TaskModule => - Some(invokeCommand(child, taskMod.defaultCommandName(), discover, rest).headOption) - case _ => None - } - } yield res - -} -abstract class Resolve[R: ClassTag] { - def endResolveCross(obj: Module, - revSelectorsSoFar: List[Segment], - last: List[String], - discover: Discover[_], - rest: Seq[String]): Either[String, Seq[R]] - def endResolveLabel(obj: Module, - revSelectorsSoFar: List[Segment], - last: String, - discover: Discover[_], - rest: Seq[String]): Either[String, Seq[R]] - - def resolve(remainingSelector: List[Segment], - obj: mill.Module, - discover: Discover[_], - rest: Seq[String], - remainingCrossSelectors: List[List[String]], - revSelectorsSoFar: List[Segment]): Either[String, Seq[R]] = { - - remainingSelector match{ - case Segment.Cross(last) :: Nil => - endResolveCross(obj, revSelectorsSoFar, last.map(_.toString).toList, discover, rest) - case Segment.Label(last) :: Nil => - endResolveLabel(obj, revSelectorsSoFar, last, discover, rest) - - case head :: tail => - val newRevSelectorsSoFar = head :: revSelectorsSoFar - - def recurse(searchModules: Seq[Module], resolveFailureMsg: => Left[String, Nothing]) = { - val matching = searchModules - .map(resolve(tail, _, discover, rest, remainingCrossSelectors, newRevSelectorsSoFar)) - - matching match{ - case Seq(Left(err)) => Left(err) - case items => - items.collect{case Right(v) => v} match{ - case Nil => resolveFailureMsg - case values => Right(values.flatten) - } - } - } - head match{ - case Segment.Label(singleLabel) => - recurse( - if (singleLabel == "__") obj.millInternal.modules - else if (singleLabel == "_") obj.millModuleDirectChildren.toSeq - else{ - obj.millInternal.reflectNestedObjects[mill.Module] - .find(_.millOuterCtx.segment == Segment.Label(singleLabel)) - .toSeq - }, - if (singleLabel != "_") Resolve.errorMsgLabel( - singleModuleMeta(obj, discover, revSelectorsSoFar.isEmpty), - singleLabel, - revSelectorsSoFar - ) - else Left( - "Cannot resolve " + Segments((remainingSelector.reverse ++ revSelectorsSoFar).reverse:_*).render + - ". Try `mill resolve " + Segments((Segment.Label("_") :: revSelectorsSoFar).reverse:_*).render + "` to see what's available" - ) - ) - case Segment.Cross(cross) => - obj match{ - case c: Cross[Module] => - recurse( - if(cross == Seq("__")) for ((k, v) <- c.items) yield v - else if (cross.contains("_")){ - for { - (k, v) <- c.items - if k.length == cross.length - if k.zip(cross).forall { case (l, r) => l == r || r == "_" } - } yield v - }else c.itemMap.get(cross.toList).toSeq, - Resolve.errorMsgCross( - c.items.map(_._1.map(_.toString)), - cross.map(_.toString), - revSelectorsSoFar - ) - ) - case _ => - Left( - Resolve.unableToResolve(Segment.Cross(cross.map(_.toString)), tail) + - Resolve.hintListLabel(tail) - ) - } - } - - case Nil => Left("Selector cannot be empty") - } - } -} diff --git a/main/src/mill/main/RunScript.scala b/main/src/mill/main/RunScript.scala deleted file mode 100644 index 47526631..00000000 --- a/main/src/mill/main/RunScript.scala +++ /dev/null @@ -1,262 +0,0 @@ -package mill.main - -import java.nio.file.NoSuchFileException - -import ammonite.interp.Interpreter -import ammonite.runtime.SpecialClassLoader -import ammonite.util.Util.CodeSource -import ammonite.util.{Name, Res, Util} -import mill.define -import mill.define._ -import mill.eval.{Evaluator, PathRef, Result} -import mill.util.{EitherOps, ParseArgs, Watched} -import mill.api.Logger -import mill.util.Strict.Agg - -import scala.collection.mutable -import scala.reflect.ClassTag - -/** - * Custom version of ammonite.main.Scripts, letting us run the build.sc script - * directly without going through Ammonite's main-method/argument-parsing - * subsystem - */ -object RunScript{ - def runScript(home: os.Path, - wd: os.Path, - path: os.Path, - instantiateInterpreter: => Either[(Res.Failing, Seq[(os.Path, Long)]), ammonite.interp.Interpreter], - scriptArgs: Seq[String], - stateCache: Option[Evaluator.State], - log: Logger, - env : Map[String, String]) - : (Res[(Evaluator, Seq[PathRef], Either[String, Seq[ujson.Value]])], Seq[(os.Path, Long)]) = { - - val (evalState, interpWatched) = stateCache match{ - case Some(s) if watchedSigUnchanged(s.watched) => Res.Success(s) -> s.watched - case _ => - instantiateInterpreter match{ - case Left((res, watched)) => (res, watched) - case Right(interp) => - interp.watch(path) - val eval = - for(rootModule <- evaluateRootModule(wd, path, interp, log)) - yield Evaluator.State( - rootModule, - rootModule.getClass.getClassLoader.asInstanceOf[SpecialClassLoader].classpathSignature, - mutable.Map.empty[Segments, (Int, Any)], - interp.watchedFiles - ) - (eval, interp.watchedFiles) - } - } - - val evalRes = - for(s <- evalState) - yield new Evaluator(home, wd / 'out, wd / 'out, s.rootModule, log, - s.classLoaderSig, s.workerCache, env) - - val evaluated = for{ - evaluator <- evalRes - (evalWatches, res) <- Res(evaluateTasks(evaluator, scriptArgs, multiSelect = false)) - } yield { - (evaluator, evalWatches, res.map(_.flatMap(_._2))) - } - (evaluated, interpWatched) - } - - def watchedSigUnchanged(sig: Seq[(os.Path, Long)]) = { - sig.forall{case (p, l) => Interpreter.pathSignature(p) == l} - } - - def evaluateRootModule(wd: os.Path, - path: os.Path, - interp: ammonite.interp.Interpreter, - log: Logger - ): Res[mill.define.BaseModule] = { - - val (pkg, wrapper) = Util.pathToPackageWrapper(Seq(), path relativeTo wd) - - for { - scriptTxt <- - try Res.Success(Util.normalizeNewlines(os.read(path))) - catch { case _: NoSuchFileException => - log.info("No build file found, you should create build.sc to do something useful") - Res.Success("") - } - - processed <- interp.processModule( - scriptTxt, - CodeSource(wrapper, pkg, Seq(Name("ammonite"), Name("$file")), Some(path)), - autoImport = true, - extraCode = "", - hardcoded = true - ) - - buildClsName <- processed.blockInfo.lastOption match { - case Some(meta) => Res.Success(meta.id.wrapperPath) - case None => Res.Skip - } - - buildCls = interp - .evalClassloader - .loadClass(buildClsName) - - module <- try { - Util.withContextClassloader(interp.evalClassloader) { - Res.Success( - buildCls.getMethod("millSelf") - .invoke(null) - .asInstanceOf[Some[mill.define.BaseModule]] - .get - ) - } - } catch { - case e: Throwable => Res.Exception(e, "") - } -// _ <- Res(consistencyCheck(mapping)) - } yield module - } - - def resolveTasks[T, R: ClassTag](resolver: mill.main.Resolve[R], - evaluator: Evaluator, - scriptArgs: Seq[String], - multiSelect: Boolean) = { - for { - parsed <- ParseArgs(scriptArgs, multiSelect = multiSelect) - (selectors, args) = parsed - taskss <- { - val selected = selectors.map { case (scopedSel, sel) => - for(res <- prepareResolve(evaluator, scopedSel, sel)) - yield { - val (rootModule, crossSelectors) = res - - - try { - // We inject the `evaluator.rootModule` into the TargetScopt, rather - // than the `rootModule`, because even if you are running an external - // module we still want you to be able to resolve targets from your - // main build. Resolving targets from external builds as CLI arguments - // is not currently supported - mill.eval.Evaluator.currentEvaluator.set(evaluator) - resolver.resolve( - sel.value.toList, rootModule, rootModule.millDiscover, - args, crossSelectors.toList, Nil - ) - } finally { - mill.eval.Evaluator.currentEvaluator.set(null) - } - } - } - EitherOps.sequence(selected) - } - res <- EitherOps.sequence(taskss) - } yield res.flatten - } - - def resolveRootModule[T](evaluator: Evaluator, scopedSel: Option[Segments]) = { - scopedSel match { - case None => Right(evaluator.rootModule) - case Some(scoping) => - for { - moduleCls <- - try Right(evaluator.rootModule.getClass.getClassLoader.loadClass(scoping.render + "$")) - catch {case e: ClassNotFoundException => Left ("Cannot resolve external module " + scoping.render)} - rootModule <- moduleCls.getField("MODULE$").get(moduleCls) match { - case rootModule: ExternalModule => Right(rootModule) - case _ => Left("Class " + scoping.render + " is not an external module") - } - } yield rootModule - } - } - - def prepareResolve[T](evaluator: Evaluator, scopedSel: Option[Segments], sel: Segments) = { - for (rootModule<- resolveRootModule(evaluator, scopedSel)) - yield { - val crossSelectors = sel.value.map { - case Segment.Cross(x) => x.toList.map(_.toString) - case _ => Nil - } - (rootModule, crossSelectors) - } - } - - def evaluateTasks[T](evaluator: Evaluator, - scriptArgs: Seq[String], - multiSelect: Boolean) = { - for (targets <- resolveTasks(mill.main.ResolveTasks, evaluator, scriptArgs, multiSelect)) yield { - val (watched, res) = evaluate(evaluator, Agg.from(targets.distinct)) - - val watched2 = for{ - x <- res.right.toSeq - (Watched(_, extraWatched), _) <- x - w <- extraWatched - } yield w - - (watched ++ watched2, res) - } - } - - def evaluate(evaluator: Evaluator, - targets: Agg[Task[Any]]): (Seq[PathRef], Either[String, Seq[(Any, Option[ujson.Value])]]) = { - val evaluated = evaluator.evaluate(targets) - val watched = evaluated.results - .iterator - .collect { - case (t: define.Sources, Result.Success(p: Seq[PathRef])) => p - } - .flatten - .toSeq - - val errorStr = - (for((k, fs) <- evaluated.failing.items()) yield { - val ks = k match{ - case Left(t) => t.toString - case Right(t) => t.segments.render - } - val fss = fs.map{ - case Result.Exception(t, outerStack) => - var current = List(t) - while(current.head.getCause != null){ - current = current.head.getCause :: current - } - current.reverse - .flatMap( ex => - Seq(ex.toString) ++ - ex.getStackTrace.dropRight(outerStack.value.length).map(" " + _) - ) - .mkString("\n") - case Result.Failure(t, _) => t - } - s"$ks ${fss.mkString(", ")}" - }).mkString("\n") - - evaluated.failing.keyCount match { - case 0 => - val json = for(t <- targets.toSeq) yield { - t match { - case t: mill.define.NamedTask[_] => - val jsonFile = Evaluator - .resolveDestPaths(evaluator.outPath, t.ctx.segments) - .meta - val metadata = upickle.default.read[Evaluator.Cached](ujson.read(jsonFile.toIO)) - Some(metadata.value) - - case _ => None - } - } - - watched -> Right(evaluated.values.zip(json)) - case n => watched -> Left(s"$n targets failed\n$errorStr") - } - } - -// def consistencyCheck[T](mapping: Discovered.Mapping[T]): Either[String, Unit] = { -// val consistencyErrors = Discovered.consistencyCheck(mapping) -// if (consistencyErrors.nonEmpty) { -// Left(s"Failed Discovered.consistencyCheck: ${consistencyErrors.map(_.render)}") -// } else { -// Right(()) -// } -// } -} diff --git a/main/src/mill/main/VisualizeModule.scala b/main/src/mill/main/VisualizeModule.scala deleted file mode 100644 index e950973f..00000000 --- a/main/src/mill/main/VisualizeModule.scala +++ /dev/null @@ -1,60 +0,0 @@ -package mill.main - -import java.util.concurrent.LinkedBlockingQueue - -import coursier.Cache -import coursier.core.Repository -import coursier.maven.MavenRepository -import mill.T -import mill.define.{Discover, ExternalModule} -import mill.eval.{PathRef, Result} - -object VisualizeModule extends ExternalModule with VisualizeModule { - def repositories = Seq( - Cache.ivy2Local, - MavenRepository("https://repo1.maven.org/maven2"), - MavenRepository("https://oss.sonatype.org/content/repositories/releases") - ) - - implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() - lazy val millDiscover = Discover[this.type] -} -trait VisualizeModule extends mill.define.TaskModule{ - def repositories: Seq[Repository] - def defaultCommandName() = "run" - def classpath = T{ - mill.modules.Util.millProjectModule("MILL_GRAPHVIZ", "mill-main-graphviz", repositories) - } - - /** - * The J2V8-based Graphviz library has a limitation that it can only ever - * be called from a single thread. Since Mill forks off a new thread every - * time you execute something, we need to keep around a worker thread that - * everyone can use to call into Graphviz, which the Mill execution threads - * can communicate via in/out queues. - */ - def worker = T.worker{ - val in = new LinkedBlockingQueue[(Seq[_], Seq[_], os.Path)]() - val out = new LinkedBlockingQueue[Result[Seq[PathRef]]]() - - val cl = mill.api.ClassLoader.create( - classpath().map(_.path.toNIO.toUri.toURL).toVector, - getClass.getClassLoader - ) - val visualizeThread = new java.lang.Thread(() => - while(true){ - val res = Result.create{ - val (targets, tasks, dest) = in.take() - cl.loadClass("mill.main.graphviz.GraphvizTools") - .getMethod("apply", classOf[Seq[_]], classOf[Seq[_]], classOf[os.Path]) - .invoke(null, targets, tasks, dest) - .asInstanceOf[Seq[PathRef]] - } - out.put(res) - } - ) - visualizeThread.setDaemon(true) - visualizeThread.start() - (in, out) - } -} diff --git a/main/src/mill/modules/Assembly.scala b/main/src/mill/modules/Assembly.scala deleted file mode 100644 index 141bc226..00000000 --- a/main/src/mill/modules/Assembly.scala +++ /dev/null @@ -1,126 +0,0 @@ -package mill.modules - -import java.io.InputStream -import java.util.jar.JarFile -import java.util.regex.Pattern - -import geny.Generator -import mill.Agg - -import scala.collection.JavaConverters._ - -object Assembly { - - val defaultRules: Seq[Rule] = Seq( - Rule.Append("reference.conf"), - Rule.Exclude(JarFile.MANIFEST_NAME), - Rule.ExcludePattern(".*\\.[sS][fF]"), - Rule.ExcludePattern(".*\\.[dD][sS][aA]"), - Rule.ExcludePattern(".*\\.[rR][sS][aA]") - ) - - sealed trait Rule extends Product with Serializable - object Rule { - case class Append(path: String) extends Rule - - object AppendPattern { - def apply(pattern: String): AppendPattern = AppendPattern(Pattern.compile(pattern)) - } - case class AppendPattern(pattern: Pattern) extends Rule - - case class Exclude(path: String) extends Rule - - object ExcludePattern { - def apply(pattern: String): ExcludePattern = ExcludePattern(Pattern.compile(pattern)) - } - case class ExcludePattern(pattern: Pattern) extends Rule - } - - def groupAssemblyEntries(inputPaths: Agg[os.Path], assemblyRules: Seq[Assembly.Rule]): Map[String, GroupedEntry] = { - val rulesMap = assemblyRules.collect { - case r@Rule.Append(path) => path -> r - case r@Rule.Exclude(path) => path -> r - }.toMap - - val appendPatterns = assemblyRules.collect { - case Rule.AppendPattern(pattern) => pattern.asPredicate().test(_) - } - - val excludePatterns = assemblyRules.collect { - case Rule.ExcludePattern(pattern) => pattern.asPredicate().test(_) - } - - classpathIterator(inputPaths).foldLeft(Map.empty[String, GroupedEntry]) { - case (entries, entry) => - val mapping = entry.mapping - - rulesMap.get(mapping) match { - case Some(_: Assembly.Rule.Exclude) => - entries - case Some(_: Assembly.Rule.Append) => - val newEntry = entries.getOrElse(mapping, AppendEntry.empty).append(entry) - entries + (mapping -> newEntry) - - case _ if excludePatterns.exists(_(mapping)) => - entries - case _ if appendPatterns.exists(_(mapping)) => - val newEntry = entries.getOrElse(mapping, AppendEntry.empty).append(entry) - entries + (mapping -> newEntry) - - case _ if !entries.contains(mapping) => - entries + (mapping -> WriteOnceEntry(entry)) - case _ => - entries - } - } - } - - private def classpathIterator(inputPaths: Agg[os.Path]): Generator[AssemblyEntry] = { - Generator.from(inputPaths) - .filter(os.exists) - .flatMap { - p => - if (os.isFile(p)) { - val jf = new JarFile(p.toIO) - Generator.from( - for(entry <- jf.entries().asScala if !entry.isDirectory) - yield JarFileEntry(entry.getName, () => jf.getInputStream(entry)) - ) - } - else { - os.walk.stream(p) - .filter(os.isFile) - .map(sub => PathEntry(sub.relativeTo(p).toString, sub)) - } - } - } -} - -private[modules] sealed trait GroupedEntry { - def append(entry: AssemblyEntry): GroupedEntry -} - -private[modules] object AppendEntry { - val empty: AppendEntry = AppendEntry(Nil) -} - -private[modules] case class AppendEntry(entries: List[AssemblyEntry]) extends GroupedEntry { - def append(entry: AssemblyEntry): GroupedEntry = copy(entries = entry :: this.entries) -} - -private[modules] case class WriteOnceEntry(entry: AssemblyEntry) extends GroupedEntry { - def append(entry: AssemblyEntry): GroupedEntry = this -} - -private[this] sealed trait AssemblyEntry { - def mapping: String - def inputStream: InputStream -} - -private[this] case class PathEntry(mapping: String, path: os.Path) extends AssemblyEntry { - def inputStream: InputStream = os.read.inputStream(path) -} - -private[this] case class JarFileEntry(mapping: String, getIs: () => InputStream) extends AssemblyEntry { - def inputStream: InputStream = getIs() -} diff --git a/main/src/mill/modules/Jvm.scala b/main/src/mill/modules/Jvm.scala deleted file mode 100644 index 1a51ed8b..00000000 --- a/main/src/mill/modules/Jvm.scala +++ /dev/null @@ -1,481 +0,0 @@ -package mill.modules - -import java.io._ -import java.lang.reflect.Modifier -import java.net.URI -import java.nio.file.{FileSystems, Files, StandardOpenOption} -import java.nio.file.attribute.PosixFilePermission -import java.util.Collections -import java.util.jar.{JarEntry, JarFile, JarOutputStream} - -import coursier.{Cache, Dependency, Fetch, Repository, Resolution} -import coursier.util.{Gather, Task} -import geny.Generator -import mill.main.client.InputPumper -import mill.eval.{PathRef, Result} -import mill.util.Ctx -import mill.api.IO -import mill.util.Loose.Agg - -import scala.collection.mutable -import scala.collection.JavaConverters._ - -object Jvm { - /** - * Runs a JVM subprocess with the given configuration and returns a - * [[os.CommandResult]] with it's aggregated output and error streams - */ - def callSubprocess(mainClass: String, - classPath: Agg[os.Path], - jvmArgs: Seq[String] = Seq.empty, - envArgs: Map[String, String] = Map.empty, - mainArgs: Seq[String] = Seq.empty, - workingDir: os.Path = null, - streamOut: Boolean = true) - (implicit ctx: Ctx) = { - - val commandArgs = - Vector("java") ++ - jvmArgs ++ - Vector("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ - mainArgs - - val workingDir1 = Option(workingDir).getOrElse(ctx.dest) - os.makeDir.all(workingDir1) - - os.proc(commandArgs).call(cwd = workingDir1, env = envArgs) - } - - /** - * Runs a JVM subprocess with the given configuration and streams - * it's stdout and stderr to the console. - */ - def runSubprocess(mainClass: String, - classPath: Agg[os.Path], - jvmArgs: Seq[String] = Seq.empty, - envArgs: Map[String, String] = Map.empty, - mainArgs: Seq[String] = Seq.empty, - workingDir: os.Path = null, - background: Boolean = false): Unit = { - val args = - Vector("java") ++ - jvmArgs ++ - Vector("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ - mainArgs - - if (background) spawnSubprocess(args, envArgs, workingDir) - else runSubprocess(args, envArgs, workingDir) - } - - @deprecated("Use runSubprocess instead") - def baseInteractiveSubprocess(commandArgs: Seq[String], - envArgs: Map[String, String], - workingDir: os.Path) = { - runSubprocess(commandArgs, envArgs, workingDir) - } - - /** - * Runs a generic subprocess and waits for it to terminate. - */ - def runSubprocess(commandArgs: Seq[String], - envArgs: Map[String, String], - workingDir: os.Path) = { - val process = spawnSubprocess(commandArgs, envArgs, workingDir) - - process.waitFor() - if (process.exitCode() == 0) () - else throw new Exception("Interactive Subprocess Failed") - } - - /** - * Spawns a generic subprocess, streaming the stdout and stderr to the - * console. If the System.out/System.err have been substituted, makes sure - * that the subprocess's stdout and stderr streams go to the subtituted - * streams - */ - def spawnSubprocess(commandArgs: Seq[String], - envArgs: Map[String, String], - workingDir: os.Path) = { - // If System.in is fake, then we pump output manually rather than relying - // on `os.Inherit`. That is because `os.Inherit` does not follow changes - // to System.in/System.out/System.err, so the subprocess's streams get sent - // to the parent process's origin outputs even if we want to direct them - // elsewhere - if (System.in.isInstanceOf[ByteArrayInputStream]){ - val process = os.proc(commandArgs).spawn( - cwd = workingDir, - env = envArgs, - stdin = os.Pipe, - stdout = os.Pipe, - stderr = os.Pipe - ) - - val sources = Seq( - process.stdout -> System.out, - process.stderr -> System.err, - System.in -> process.stdin - ) - - for((std, dest) <- sources){ - new Thread(new InputPumper(std, dest, false)).start() - } - - process - }else{ - os.proc(commandArgs).spawn( - cwd = workingDir, - env = envArgs, - stdin = os.Inherit, - stdout = os.Inherit, - stderr = os.Inherit - ) - } - } - - - def runLocal(mainClass: String, - classPath: Agg[os.Path], - mainArgs: Seq[String] = Seq.empty) - (implicit ctx: Ctx): Unit = { - inprocess(classPath, classLoaderOverrideSbtTesting = false, isolated = true, closeContextClassLoaderWhenDone = true, cl => { - getMainMethod(mainClass, cl).invoke(null, mainArgs.toArray) - }) - } - - private def getMainMethod(mainClassName: String, cl: ClassLoader) = { - val mainClass = cl.loadClass(mainClassName) - val method = mainClass.getMethod("main", classOf[Array[String]]) - // jvm allows the actual main class to be non-public and to run a method in the non-public class, - // we need to make it accessible - method.setAccessible(true) - val modifiers = method.getModifiers - if (!Modifier.isPublic(modifiers)) - throw new NoSuchMethodException(mainClassName + ".main is not public") - if (!Modifier.isStatic(modifiers)) - throw new NoSuchMethodException(mainClassName + ".main is not static") - method - } - - - def inprocess[T](classPath: Agg[os.Path], - classLoaderOverrideSbtTesting: Boolean, - isolated: Boolean, - closeContextClassLoaderWhenDone: Boolean, - body: ClassLoader => T) - (implicit ctx: Ctx.Home): T = { - val urls = classPath.map(_.toIO.toURI.toURL) - val cl = if (classLoaderOverrideSbtTesting) { - val outerClassLoader = getClass.getClassLoader - mill.api.ClassLoader.create(urls.toVector, null, customFindClass = { name => - if (name.startsWith("sbt.testing.")) - Some(outerClassLoader.loadClass(name)) - else None - }) - } else if (isolated) { - mill.api.ClassLoader.create(urls.toVector, null) - } else { - mill.api.ClassLoader.create(urls.toVector, getClass.getClassLoader) - } - - val oldCl = Thread.currentThread().getContextClassLoader - Thread.currentThread().setContextClassLoader(cl) - try { - body(cl) - } finally { - if (closeContextClassLoaderWhenDone) { - Thread.currentThread().setContextClassLoader(oldCl) - cl.close() - } - } - } - - - private def createManifest(mainClass: Option[String]) = { - val m = new java.util.jar.Manifest() - m.getMainAttributes.put(java.util.jar.Attributes.Name.MANIFEST_VERSION, "1.0") - m.getMainAttributes.putValue( "Created-By", "Scala mill" ) - mainClass.foreach( - m.getMainAttributes.put(java.util.jar.Attributes.Name.MAIN_CLASS, _) - ) - m - } - - /** - * Create a jar file containing all files from the specified input Paths, - * called out.jar in the implicit ctx.dest folder. An optional main class may - * be provided for the jar. An optional filter function may also be provided to - * selectively include/exclude specific files. - * @param inputPaths - `Agg` of `os.Path`s containing files to be included in the jar - * @param mainClass - optional main class for the jar - * @param fileFilter - optional file filter to select files to be included. - * Given a `os.Path` (from inputPaths) and a `os.RelPath` for the individual file, - * return true if the file is to be included in the jar. - * @param ctx - implicit `Ctx.Dest` used to determine the output directory for the jar. - * @return - a `PathRef` for the created jar. - */ - def createJar(inputPaths: Agg[os.Path], - mainClass: Option[String] = None, - fileFilter: (os.Path, os.RelPath) => Boolean = (p: os.Path, r: os.RelPath) => true) - (implicit ctx: Ctx.Dest): PathRef = { - val outputPath = ctx.dest / "out.jar" - os.remove.all(outputPath) - - val seen = mutable.Set.empty[os.RelPath] - seen.add(os.rel / "META-INF" / "MANIFEST.MF") - val jar = new JarOutputStream( - new FileOutputStream(outputPath.toIO), - createManifest(mainClass) - ) - - try{ - assert(inputPaths.forall(os.exists(_))) - for{ - p <- inputPaths - (file, mapping) <- - if (os.isFile(p)) Iterator(p -> os.rel / p.last) - else os.walk(p).filter(os.isFile).map(sub => sub -> sub.relativeTo(p)).sorted - if !seen(mapping) && fileFilter(p, mapping) - } { - seen.add(mapping) - val entry = new JarEntry(mapping.toString) - entry.setTime(os.mtime(file)) - jar.putNextEntry(entry) - jar.write(os.read.bytes(file)) - jar.closeEntry() - } - } finally { - jar.close() - } - - PathRef(outputPath) - } - - def createAssembly(inputPaths: Agg[os.Path], - mainClass: Option[String] = None, - prependShellScript: String = "", - base: Option[os.Path] = None, - assemblyRules: Seq[Assembly.Rule] = Assembly.defaultRules) - (implicit ctx: Ctx.Dest with Ctx.Log): PathRef = { - - val tmp = ctx.dest / "out-tmp.jar" - - val baseUri = "jar:" + tmp.toIO.getCanonicalFile.toURI.toASCIIString - val hm = new java.util.HashMap[String, String]() - - base match{ - case Some(b) => os.copy(b, tmp) - case None => hm.put("create", "true") - } - - val zipFs = FileSystems.newFileSystem(URI.create(baseUri), hm) - - val manifest = createManifest(mainClass) - val manifestPath = zipFs.getPath(JarFile.MANIFEST_NAME) - Files.createDirectories(manifestPath.getParent) - val manifestOut = Files.newOutputStream( - manifestPath, - StandardOpenOption.TRUNCATE_EXISTING, - StandardOpenOption.CREATE - ) - manifest.write(manifestOut) - manifestOut.close() - - Assembly.groupAssemblyEntries(inputPaths, assemblyRules).view - .foreach { - case (mapping, AppendEntry(entries)) => - val path = zipFs.getPath(mapping).toAbsolutePath - val concatenated = new SequenceInputStream( - Collections.enumeration(entries.map(_.inputStream).asJava)) - writeEntry(path, concatenated, append = true) - case (mapping, WriteOnceEntry(entry)) => - val path = zipFs.getPath(mapping).toAbsolutePath - if (Files.notExists(path)) { - writeEntry(path, entry.inputStream, append = false) - } - } - - zipFs.close() - val output = ctx.dest / "out.jar" - - // Prepend shell script and make it executable - if (prependShellScript.isEmpty) os.move(tmp, output) - else{ - val lineSep = if (!prependShellScript.endsWith("\n")) "\n\r\n" else "" - os.write( - output, - Seq[os.Source]( - prependShellScript + lineSep, - os.read.inputStream(tmp) - ) - ) - - if (!scala.util.Properties.isWin) { - os.perms.set( - output, - os.perms(output) - + PosixFilePermission.GROUP_EXECUTE - + PosixFilePermission.OWNER_EXECUTE - + PosixFilePermission.OTHERS_EXECUTE - ) - } - } - - PathRef(output) - } - - private def writeEntry(p: java.nio.file.Path, is: InputStream, append: Boolean): Unit = { - if (p.getParent != null) Files.createDirectories(p.getParent) - val options = - if(append) Seq(StandardOpenOption.APPEND, StandardOpenOption.CREATE) - else Seq(StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE) - - val outputStream = java.nio.file.Files.newOutputStream(p, options:_*) - IO.stream(is, outputStream) - outputStream.close() - is.close() - } - def universalScript(shellCommands: String, - cmdCommands: String, - shebang: Boolean = false): String = { - Seq( - if (shebang) "#!/usr/bin/env sh" else "", - "@ 2>/dev/null # 2>nul & echo off & goto BOF\r", - ":", - shellCommands.replaceAll("\r\n|\n", "\n"), - "exit", - Seq( - "", - ":BOF", - "@echo off", - cmdCommands.replaceAll("\r\n|\n", "\r\n"), - "exit /B %errorlevel%", - "" - ).mkString("\r\n") - ).filterNot(_.isEmpty).mkString("\n") - } - - def launcherUniversalScript(mainClass: String, - shellClassPath: Agg[String], - cmdClassPath: Agg[String], - jvmArgs: Seq[String], - shebang: Boolean = false) = { - universalScript( - shellCommands = - s"""exec java ${jvmArgs.mkString(" ")} $$JAVA_OPTS -cp "${shellClassPath.mkString(":")}" $mainClass "$$@"""", - cmdCommands = - s"""java ${jvmArgs.mkString(" ")} %JAVA_OPTS% -cp "${cmdClassPath.mkString(";")}" $mainClass %*""", - shebang = shebang - ) - } - def createLauncher(mainClass: String, - classPath: Agg[os.Path], - jvmArgs: Seq[String]) - (implicit ctx: Ctx.Dest)= { - val isWin = scala.util.Properties.isWin - val isBatch = isWin && - !(org.jline.utils.OSUtils.IS_CYGWIN - || org.jline.utils.OSUtils.IS_MINGW - || "MSYS" == System.getProperty("MSYSTEM")) - val outputPath = ctx.dest / (if (isBatch) "run.bat" else "run") - val classPathStrs = classPath.map(_.toString) - - os.write(outputPath, launcherUniversalScript(mainClass, classPathStrs, classPathStrs, jvmArgs)) - - if (!isWin) { - val perms = Files.getPosixFilePermissions(outputPath.toNIO) - perms.add(PosixFilePermission.GROUP_EXECUTE) - perms.add(PosixFilePermission.OWNER_EXECUTE) - perms.add(PosixFilePermission.OTHERS_EXECUTE) - Files.setPosixFilePermissions(outputPath.toNIO, perms) - } - PathRef(outputPath) - } - - /** - * Resolve dependencies using Coursier. - * - * We do not bother breaking this out into the separate ZincWorkerApi classpath, - * because Coursier is already bundled with mill/Ammonite to support the - * `import $ivy` syntax. - */ - def resolveDependencies(repositories: Seq[Repository], - deps: TraversableOnce[coursier.Dependency], - force: TraversableOnce[coursier.Dependency], - sources: Boolean = false, - mapDependencies: Option[Dependency => Dependency] = None): Result[Agg[PathRef]] = { - - val (_, resolution) = resolveDependenciesMetadata( - repositories, deps, force, mapDependencies - ) - val errs = resolution.metadataErrors - if(errs.nonEmpty) { - val header = - s"""| - |Resolution failed for ${errs.length} modules: - |-------------------------------------------- - |""".stripMargin - - val errLines = errs.map { - case ((module, vsn), errMsgs) => s" ${module.trim}:$vsn \n\t" + errMsgs.mkString("\n\t") - }.mkString("\n") - val msg = header + errLines + "\n" - Result.Failure(msg) - } else { - - def load(artifacts: Seq[coursier.Artifact]) = { - val logger = None - - import scala.concurrent.ExecutionContext.Implicits.global - val loadedArtifacts = Gather[Task].gather( - for (a <- artifacts) - yield coursier.Cache.file[Task](a, logger = logger).run - .map(a.isOptional -> _) - ).unsafeRun - - val errors = loadedArtifacts.collect { - case (false, Left(x)) => x - case (true, Left(x)) if !x.notFound => x - } - val successes = loadedArtifacts.collect { case (_, Right(x)) => x } - (errors, successes) - } - - val sourceOrJar = - if (sources) resolution.classifiersArtifacts(Seq("sources")) - else resolution.artifacts(true) - val (errors, successes) = load(sourceOrJar) - if(errors.isEmpty){ - mill.Agg.from( - successes.map(p => PathRef(os.Path(p), quick = true)).filter(_.path.ext == "jar") - ) - }else{ - val errorDetails = errors.map(e => s"${ammonite.util.Util.newLine} ${e.describe}").mkString - Result.Failure("Failed to load source dependencies" + errorDetails) - } - } - } - - - def resolveDependenciesMetadata(repositories: Seq[Repository], - deps: TraversableOnce[coursier.Dependency], - force: TraversableOnce[coursier.Dependency], - mapDependencies: Option[Dependency => Dependency] = None) = { - - val forceVersions = force - .map(mapDependencies.getOrElse(identity[Dependency](_))) - .map{d => d.module -> d.version} - .toMap - - val start = Resolution( - deps.map(mapDependencies.getOrElse(identity[Dependency](_))).toSet, - forceVersions = forceVersions, - mapDependencies = mapDependencies - ) - - val fetch = Fetch.from(repositories, Cache.fetch[Task]()) - - import scala.concurrent.ExecutionContext.Implicits.global - val resolution = start.process.run(fetch).unsafeRun() - (deps.toSeq, resolution) - } -} diff --git a/main/src/mill/modules/Util.scala b/main/src/mill/modules/Util.scala deleted file mode 100644 index 2b98a304..00000000 --- a/main/src/mill/modules/Util.scala +++ /dev/null @@ -1,73 +0,0 @@ -package mill.modules - - -import coursier.Repository -import mill.api.{PathRef, IO} -import mill.util.{Ctx, Loose} - - -object Util { - def cleanupScaladoc(v: String) = { - v.linesIterator.map( - _.dropWhile(_.isWhitespace) - .stripPrefix("/**") - .stripPrefix("*/") - .stripPrefix("*") - .dropWhile(_.isWhitespace) - ).toArray - .dropWhile(_.isEmpty) - .reverse - .dropWhile(_.isEmpty) - .reverse - } - def download(url: String, dest: os.RelPath = "download")(implicit ctx: Ctx.Dest) = { - val out = ctx.dest / dest - - val website = new java.net.URI(url).toURL - val rbc = java.nio.channels.Channels.newChannel(website.openStream) - try{ - val fos = new java.io.FileOutputStream(out.toIO) - try{ - fos.getChannel.transferFrom(rbc, 0, java.lang.Long.MAX_VALUE) - PathRef(out) - } finally{ - fos.close() - } - } finally{ - rbc.close() - } - } - - def downloadUnpackZip(url: String, dest: os.RelPath = "unpacked") - (implicit ctx: Ctx.Dest) = { - - val tmpName = if (dest == os.rel / "tmp.zip") "tmp2.zip" else "tmp.zip" - val downloaded = download(url, tmpName) - IO.unpackZip(downloaded.path, dest) - } - - - def millProjectModule(key: String, - artifact: String, - repositories: Seq[Repository], - resolveFilter: os.Path => Boolean = _ => true, - artifactSuffix: String = "_2.12") = { - val localPath = sys.props(key) - if (localPath != null) { - mill.api.Result.Success( - Loose.Agg.from(localPath.split(',').map(p => PathRef(os.Path(p), quick = true))) - ) - } else { - mill.modules.Jvm.resolveDependencies( - repositories, - Seq( - coursier.Dependency( - coursier.Module("com.lihaoyi", artifact + artifactSuffix), - sys.props("MILL_VERSION") - ) - ), - Nil - ).map(_.filter(x => resolveFilter(x.path))) - } - } -} diff --git a/main/src/mill/package.scala b/main/src/mill/package.scala deleted file mode 100644 index 0ccd094f..00000000 --- a/main/src/mill/package.scala +++ /dev/null @@ -1,12 +0,0 @@ -import mill.util.JsonFormatters - -package object mill extends JsonFormatters{ - val T = define.Target - type T[T] = define.Target[T] - val PathRef = mill.api.PathRef - type PathRef = mill.api.PathRef - type Module = define.Module - type Cross[T] = define.Cross[T] - type Agg[T] = util.Loose.Agg[T] - val Agg = util.Loose.Agg -} diff --git a/main/src/modules/Assembly.scala b/main/src/modules/Assembly.scala new file mode 100644 index 00000000..141bc226 --- /dev/null +++ b/main/src/modules/Assembly.scala @@ -0,0 +1,126 @@ +package mill.modules + +import java.io.InputStream +import java.util.jar.JarFile +import java.util.regex.Pattern + +import geny.Generator +import mill.Agg + +import scala.collection.JavaConverters._ + +object Assembly { + + val defaultRules: Seq[Rule] = Seq( + Rule.Append("reference.conf"), + Rule.Exclude(JarFile.MANIFEST_NAME), + Rule.ExcludePattern(".*\\.[sS][fF]"), + Rule.ExcludePattern(".*\\.[dD][sS][aA]"), + Rule.ExcludePattern(".*\\.[rR][sS][aA]") + ) + + sealed trait Rule extends Product with Serializable + object Rule { + case class Append(path: String) extends Rule + + object AppendPattern { + def apply(pattern: String): AppendPattern = AppendPattern(Pattern.compile(pattern)) + } + case class AppendPattern(pattern: Pattern) extends Rule + + case class Exclude(path: String) extends Rule + + object ExcludePattern { + def apply(pattern: String): ExcludePattern = ExcludePattern(Pattern.compile(pattern)) + } + case class ExcludePattern(pattern: Pattern) extends Rule + } + + def groupAssemblyEntries(inputPaths: Agg[os.Path], assemblyRules: Seq[Assembly.Rule]): Map[String, GroupedEntry] = { + val rulesMap = assemblyRules.collect { + case r@Rule.Append(path) => path -> r + case r@Rule.Exclude(path) => path -> r + }.toMap + + val appendPatterns = assemblyRules.collect { + case Rule.AppendPattern(pattern) => pattern.asPredicate().test(_) + } + + val excludePatterns = assemblyRules.collect { + case Rule.ExcludePattern(pattern) => pattern.asPredicate().test(_) + } + + classpathIterator(inputPaths).foldLeft(Map.empty[String, GroupedEntry]) { + case (entries, entry) => + val mapping = entry.mapping + + rulesMap.get(mapping) match { + case Some(_: Assembly.Rule.Exclude) => + entries + case Some(_: Assembly.Rule.Append) => + val newEntry = entries.getOrElse(mapping, AppendEntry.empty).append(entry) + entries + (mapping -> newEntry) + + case _ if excludePatterns.exists(_(mapping)) => + entries + case _ if appendPatterns.exists(_(mapping)) => + val newEntry = entries.getOrElse(mapping, AppendEntry.empty).append(entry) + entries + (mapping -> newEntry) + + case _ if !entries.contains(mapping) => + entries + (mapping -> WriteOnceEntry(entry)) + case _ => + entries + } + } + } + + private def classpathIterator(inputPaths: Agg[os.Path]): Generator[AssemblyEntry] = { + Generator.from(inputPaths) + .filter(os.exists) + .flatMap { + p => + if (os.isFile(p)) { + val jf = new JarFile(p.toIO) + Generator.from( + for(entry <- jf.entries().asScala if !entry.isDirectory) + yield JarFileEntry(entry.getName, () => jf.getInputStream(entry)) + ) + } + else { + os.walk.stream(p) + .filter(os.isFile) + .map(sub => PathEntry(sub.relativeTo(p).toString, sub)) + } + } + } +} + +private[modules] sealed trait GroupedEntry { + def append(entry: AssemblyEntry): GroupedEntry +} + +private[modules] object AppendEntry { + val empty: AppendEntry = AppendEntry(Nil) +} + +private[modules] case class AppendEntry(entries: List[AssemblyEntry]) extends GroupedEntry { + def append(entry: AssemblyEntry): GroupedEntry = copy(entries = entry :: this.entries) +} + +private[modules] case class WriteOnceEntry(entry: AssemblyEntry) extends GroupedEntry { + def append(entry: AssemblyEntry): GroupedEntry = this +} + +private[this] sealed trait AssemblyEntry { + def mapping: String + def inputStream: InputStream +} + +private[this] case class PathEntry(mapping: String, path: os.Path) extends AssemblyEntry { + def inputStream: InputStream = os.read.inputStream(path) +} + +private[this] case class JarFileEntry(mapping: String, getIs: () => InputStream) extends AssemblyEntry { + def inputStream: InputStream = getIs() +} diff --git a/main/src/modules/Jvm.scala b/main/src/modules/Jvm.scala new file mode 100644 index 00000000..1a51ed8b --- /dev/null +++ b/main/src/modules/Jvm.scala @@ -0,0 +1,481 @@ +package mill.modules + +import java.io._ +import java.lang.reflect.Modifier +import java.net.URI +import java.nio.file.{FileSystems, Files, StandardOpenOption} +import java.nio.file.attribute.PosixFilePermission +import java.util.Collections +import java.util.jar.{JarEntry, JarFile, JarOutputStream} + +import coursier.{Cache, Dependency, Fetch, Repository, Resolution} +import coursier.util.{Gather, Task} +import geny.Generator +import mill.main.client.InputPumper +import mill.eval.{PathRef, Result} +import mill.util.Ctx +import mill.api.IO +import mill.util.Loose.Agg + +import scala.collection.mutable +import scala.collection.JavaConverters._ + +object Jvm { + /** + * Runs a JVM subprocess with the given configuration and returns a + * [[os.CommandResult]] with it's aggregated output and error streams + */ + def callSubprocess(mainClass: String, + classPath: Agg[os.Path], + jvmArgs: Seq[String] = Seq.empty, + envArgs: Map[String, String] = Map.empty, + mainArgs: Seq[String] = Seq.empty, + workingDir: os.Path = null, + streamOut: Boolean = true) + (implicit ctx: Ctx) = { + + val commandArgs = + Vector("java") ++ + jvmArgs ++ + Vector("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ + mainArgs + + val workingDir1 = Option(workingDir).getOrElse(ctx.dest) + os.makeDir.all(workingDir1) + + os.proc(commandArgs).call(cwd = workingDir1, env = envArgs) + } + + /** + * Runs a JVM subprocess with the given configuration and streams + * it's stdout and stderr to the console. + */ + def runSubprocess(mainClass: String, + classPath: Agg[os.Path], + jvmArgs: Seq[String] = Seq.empty, + envArgs: Map[String, String] = Map.empty, + mainArgs: Seq[String] = Seq.empty, + workingDir: os.Path = null, + background: Boolean = false): Unit = { + val args = + Vector("java") ++ + jvmArgs ++ + Vector("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ + mainArgs + + if (background) spawnSubprocess(args, envArgs, workingDir) + else runSubprocess(args, envArgs, workingDir) + } + + @deprecated("Use runSubprocess instead") + def baseInteractiveSubprocess(commandArgs: Seq[String], + envArgs: Map[String, String], + workingDir: os.Path) = { + runSubprocess(commandArgs, envArgs, workingDir) + } + + /** + * Runs a generic subprocess and waits for it to terminate. + */ + def runSubprocess(commandArgs: Seq[String], + envArgs: Map[String, String], + workingDir: os.Path) = { + val process = spawnSubprocess(commandArgs, envArgs, workingDir) + + process.waitFor() + if (process.exitCode() == 0) () + else throw new Exception("Interactive Subprocess Failed") + } + + /** + * Spawns a generic subprocess, streaming the stdout and stderr to the + * console. If the System.out/System.err have been substituted, makes sure + * that the subprocess's stdout and stderr streams go to the subtituted + * streams + */ + def spawnSubprocess(commandArgs: Seq[String], + envArgs: Map[String, String], + workingDir: os.Path) = { + // If System.in is fake, then we pump output manually rather than relying + // on `os.Inherit`. That is because `os.Inherit` does not follow changes + // to System.in/System.out/System.err, so the subprocess's streams get sent + // to the parent process's origin outputs even if we want to direct them + // elsewhere + if (System.in.isInstanceOf[ByteArrayInputStream]){ + val process = os.proc(commandArgs).spawn( + cwd = workingDir, + env = envArgs, + stdin = os.Pipe, + stdout = os.Pipe, + stderr = os.Pipe + ) + + val sources = Seq( + process.stdout -> System.out, + process.stderr -> System.err, + System.in -> process.stdin + ) + + for((std, dest) <- sources){ + new Thread(new InputPumper(std, dest, false)).start() + } + + process + }else{ + os.proc(commandArgs).spawn( + cwd = workingDir, + env = envArgs, + stdin = os.Inherit, + stdout = os.Inherit, + stderr = os.Inherit + ) + } + } + + + def runLocal(mainClass: String, + classPath: Agg[os.Path], + mainArgs: Seq[String] = Seq.empty) + (implicit ctx: Ctx): Unit = { + inprocess(classPath, classLoaderOverrideSbtTesting = false, isolated = true, closeContextClassLoaderWhenDone = true, cl => { + getMainMethod(mainClass, cl).invoke(null, mainArgs.toArray) + }) + } + + private def getMainMethod(mainClassName: String, cl: ClassLoader) = { + val mainClass = cl.loadClass(mainClassName) + val method = mainClass.getMethod("main", classOf[Array[String]]) + // jvm allows the actual main class to be non-public and to run a method in the non-public class, + // we need to make it accessible + method.setAccessible(true) + val modifiers = method.getModifiers + if (!Modifier.isPublic(modifiers)) + throw new NoSuchMethodException(mainClassName + ".main is not public") + if (!Modifier.isStatic(modifiers)) + throw new NoSuchMethodException(mainClassName + ".main is not static") + method + } + + + def inprocess[T](classPath: Agg[os.Path], + classLoaderOverrideSbtTesting: Boolean, + isolated: Boolean, + closeContextClassLoaderWhenDone: Boolean, + body: ClassLoader => T) + (implicit ctx: Ctx.Home): T = { + val urls = classPath.map(_.toIO.toURI.toURL) + val cl = if (classLoaderOverrideSbtTesting) { + val outerClassLoader = getClass.getClassLoader + mill.api.ClassLoader.create(urls.toVector, null, customFindClass = { name => + if (name.startsWith("sbt.testing.")) + Some(outerClassLoader.loadClass(name)) + else None + }) + } else if (isolated) { + mill.api.ClassLoader.create(urls.toVector, null) + } else { + mill.api.ClassLoader.create(urls.toVector, getClass.getClassLoader) + } + + val oldCl = Thread.currentThread().getContextClassLoader + Thread.currentThread().setContextClassLoader(cl) + try { + body(cl) + } finally { + if (closeContextClassLoaderWhenDone) { + Thread.currentThread().setContextClassLoader(oldCl) + cl.close() + } + } + } + + + private def createManifest(mainClass: Option[String]) = { + val m = new java.util.jar.Manifest() + m.getMainAttributes.put(java.util.jar.Attributes.Name.MANIFEST_VERSION, "1.0") + m.getMainAttributes.putValue( "Created-By", "Scala mill" ) + mainClass.foreach( + m.getMainAttributes.put(java.util.jar.Attributes.Name.MAIN_CLASS, _) + ) + m + } + + /** + * Create a jar file containing all files from the specified input Paths, + * called out.jar in the implicit ctx.dest folder. An optional main class may + * be provided for the jar. An optional filter function may also be provided to + * selectively include/exclude specific files. + * @param inputPaths - `Agg` of `os.Path`s containing files to be included in the jar + * @param mainClass - optional main class for the jar + * @param fileFilter - optional file filter to select files to be included. + * Given a `os.Path` (from inputPaths) and a `os.RelPath` for the individual file, + * return true if the file is to be included in the jar. + * @param ctx - implicit `Ctx.Dest` used to determine the output directory for the jar. + * @return - a `PathRef` for the created jar. + */ + def createJar(inputPaths: Agg[os.Path], + mainClass: Option[String] = None, + fileFilter: (os.Path, os.RelPath) => Boolean = (p: os.Path, r: os.RelPath) => true) + (implicit ctx: Ctx.Dest): PathRef = { + val outputPath = ctx.dest / "out.jar" + os.remove.all(outputPath) + + val seen = mutable.Set.empty[os.RelPath] + seen.add(os.rel / "META-INF" / "MANIFEST.MF") + val jar = new JarOutputStream( + new FileOutputStream(outputPath.toIO), + createManifest(mainClass) + ) + + try{ + assert(inputPaths.forall(os.exists(_))) + for{ + p <- inputPaths + (file, mapping) <- + if (os.isFile(p)) Iterator(p -> os.rel / p.last) + else os.walk(p).filter(os.isFile).map(sub => sub -> sub.relativeTo(p)).sorted + if !seen(mapping) && fileFilter(p, mapping) + } { + seen.add(mapping) + val entry = new JarEntry(mapping.toString) + entry.setTime(os.mtime(file)) + jar.putNextEntry(entry) + jar.write(os.read.bytes(file)) + jar.closeEntry() + } + } finally { + jar.close() + } + + PathRef(outputPath) + } + + def createAssembly(inputPaths: Agg[os.Path], + mainClass: Option[String] = None, + prependShellScript: String = "", + base: Option[os.Path] = None, + assemblyRules: Seq[Assembly.Rule] = Assembly.defaultRules) + (implicit ctx: Ctx.Dest with Ctx.Log): PathRef = { + + val tmp = ctx.dest / "out-tmp.jar" + + val baseUri = "jar:" + tmp.toIO.getCanonicalFile.toURI.toASCIIString + val hm = new java.util.HashMap[String, String]() + + base match{ + case Some(b) => os.copy(b, tmp) + case None => hm.put("create", "true") + } + + val zipFs = FileSystems.newFileSystem(URI.create(baseUri), hm) + + val manifest = createManifest(mainClass) + val manifestPath = zipFs.getPath(JarFile.MANIFEST_NAME) + Files.createDirectories(manifestPath.getParent) + val manifestOut = Files.newOutputStream( + manifestPath, + StandardOpenOption.TRUNCATE_EXISTING, + StandardOpenOption.CREATE + ) + manifest.write(manifestOut) + manifestOut.close() + + Assembly.groupAssemblyEntries(inputPaths, assemblyRules).view + .foreach { + case (mapping, AppendEntry(entries)) => + val path = zipFs.getPath(mapping).toAbsolutePath + val concatenated = new SequenceInputStream( + Collections.enumeration(entries.map(_.inputStream).asJava)) + writeEntry(path, concatenated, append = true) + case (mapping, WriteOnceEntry(entry)) => + val path = zipFs.getPath(mapping).toAbsolutePath + if (Files.notExists(path)) { + writeEntry(path, entry.inputStream, append = false) + } + } + + zipFs.close() + val output = ctx.dest / "out.jar" + + // Prepend shell script and make it executable + if (prependShellScript.isEmpty) os.move(tmp, output) + else{ + val lineSep = if (!prependShellScript.endsWith("\n")) "\n\r\n" else "" + os.write( + output, + Seq[os.Source]( + prependShellScript + lineSep, + os.read.inputStream(tmp) + ) + ) + + if (!scala.util.Properties.isWin) { + os.perms.set( + output, + os.perms(output) + + PosixFilePermission.GROUP_EXECUTE + + PosixFilePermission.OWNER_EXECUTE + + PosixFilePermission.OTHERS_EXECUTE + ) + } + } + + PathRef(output) + } + + private def writeEntry(p: java.nio.file.Path, is: InputStream, append: Boolean): Unit = { + if (p.getParent != null) Files.createDirectories(p.getParent) + val options = + if(append) Seq(StandardOpenOption.APPEND, StandardOpenOption.CREATE) + else Seq(StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE) + + val outputStream = java.nio.file.Files.newOutputStream(p, options:_*) + IO.stream(is, outputStream) + outputStream.close() + is.close() + } + def universalScript(shellCommands: String, + cmdCommands: String, + shebang: Boolean = false): String = { + Seq( + if (shebang) "#!/usr/bin/env sh" else "", + "@ 2>/dev/null # 2>nul & echo off & goto BOF\r", + ":", + shellCommands.replaceAll("\r\n|\n", "\n"), + "exit", + Seq( + "", + ":BOF", + "@echo off", + cmdCommands.replaceAll("\r\n|\n", "\r\n"), + "exit /B %errorlevel%", + "" + ).mkString("\r\n") + ).filterNot(_.isEmpty).mkString("\n") + } + + def launcherUniversalScript(mainClass: String, + shellClassPath: Agg[String], + cmdClassPath: Agg[String], + jvmArgs: Seq[String], + shebang: Boolean = false) = { + universalScript( + shellCommands = + s"""exec java ${jvmArgs.mkString(" ")} $$JAVA_OPTS -cp "${shellClassPath.mkString(":")}" $mainClass "$$@"""", + cmdCommands = + s"""java ${jvmArgs.mkString(" ")} %JAVA_OPTS% -cp "${cmdClassPath.mkString(";")}" $mainClass %*""", + shebang = shebang + ) + } + def createLauncher(mainClass: String, + classPath: Agg[os.Path], + jvmArgs: Seq[String]) + (implicit ctx: Ctx.Dest)= { + val isWin = scala.util.Properties.isWin + val isBatch = isWin && + !(org.jline.utils.OSUtils.IS_CYGWIN + || org.jline.utils.OSUtils.IS_MINGW + || "MSYS" == System.getProperty("MSYSTEM")) + val outputPath = ctx.dest / (if (isBatch) "run.bat" else "run") + val classPathStrs = classPath.map(_.toString) + + os.write(outputPath, launcherUniversalScript(mainClass, classPathStrs, classPathStrs, jvmArgs)) + + if (!isWin) { + val perms = Files.getPosixFilePermissions(outputPath.toNIO) + perms.add(PosixFilePermission.GROUP_EXECUTE) + perms.add(PosixFilePermission.OWNER_EXECUTE) + perms.add(PosixFilePermission.OTHERS_EXECUTE) + Files.setPosixFilePermissions(outputPath.toNIO, perms) + } + PathRef(outputPath) + } + + /** + * Resolve dependencies using Coursier. + * + * We do not bother breaking this out into the separate ZincWorkerApi classpath, + * because Coursier is already bundled with mill/Ammonite to support the + * `import $ivy` syntax. + */ + def resolveDependencies(repositories: Seq[Repository], + deps: TraversableOnce[coursier.Dependency], + force: TraversableOnce[coursier.Dependency], + sources: Boolean = false, + mapDependencies: Option[Dependency => Dependency] = None): Result[Agg[PathRef]] = { + + val (_, resolution) = resolveDependenciesMetadata( + repositories, deps, force, mapDependencies + ) + val errs = resolution.metadataErrors + if(errs.nonEmpty) { + val header = + s"""| + |Resolution failed for ${errs.length} modules: + |-------------------------------------------- + |""".stripMargin + + val errLines = errs.map { + case ((module, vsn), errMsgs) => s" ${module.trim}:$vsn \n\t" + errMsgs.mkString("\n\t") + }.mkString("\n") + val msg = header + errLines + "\n" + Result.Failure(msg) + } else { + + def load(artifacts: Seq[coursier.Artifact]) = { + val logger = None + + import scala.concurrent.ExecutionContext.Implicits.global + val loadedArtifacts = Gather[Task].gather( + for (a <- artifacts) + yield coursier.Cache.file[Task](a, logger = logger).run + .map(a.isOptional -> _) + ).unsafeRun + + val errors = loadedArtifacts.collect { + case (false, Left(x)) => x + case (true, Left(x)) if !x.notFound => x + } + val successes = loadedArtifacts.collect { case (_, Right(x)) => x } + (errors, successes) + } + + val sourceOrJar = + if (sources) resolution.classifiersArtifacts(Seq("sources")) + else resolution.artifacts(true) + val (errors, successes) = load(sourceOrJar) + if(errors.isEmpty){ + mill.Agg.from( + successes.map(p => PathRef(os.Path(p), quick = true)).filter(_.path.ext == "jar") + ) + }else{ + val errorDetails = errors.map(e => s"${ammonite.util.Util.newLine} ${e.describe}").mkString + Result.Failure("Failed to load source dependencies" + errorDetails) + } + } + } + + + def resolveDependenciesMetadata(repositories: Seq[Repository], + deps: TraversableOnce[coursier.Dependency], + force: TraversableOnce[coursier.Dependency], + mapDependencies: Option[Dependency => Dependency] = None) = { + + val forceVersions = force + .map(mapDependencies.getOrElse(identity[Dependency](_))) + .map{d => d.module -> d.version} + .toMap + + val start = Resolution( + deps.map(mapDependencies.getOrElse(identity[Dependency](_))).toSet, + forceVersions = forceVersions, + mapDependencies = mapDependencies + ) + + val fetch = Fetch.from(repositories, Cache.fetch[Task]()) + + import scala.concurrent.ExecutionContext.Implicits.global + val resolution = start.process.run(fetch).unsafeRun() + (deps.toSeq, resolution) + } +} diff --git a/main/src/modules/Util.scala b/main/src/modules/Util.scala new file mode 100644 index 00000000..2b98a304 --- /dev/null +++ b/main/src/modules/Util.scala @@ -0,0 +1,73 @@ +package mill.modules + + +import coursier.Repository +import mill.api.{PathRef, IO} +import mill.util.{Ctx, Loose} + + +object Util { + def cleanupScaladoc(v: String) = { + v.linesIterator.map( + _.dropWhile(_.isWhitespace) + .stripPrefix("/**") + .stripPrefix("*/") + .stripPrefix("*") + .dropWhile(_.isWhitespace) + ).toArray + .dropWhile(_.isEmpty) + .reverse + .dropWhile(_.isEmpty) + .reverse + } + def download(url: String, dest: os.RelPath = "download")(implicit ctx: Ctx.Dest) = { + val out = ctx.dest / dest + + val website = new java.net.URI(url).toURL + val rbc = java.nio.channels.Channels.newChannel(website.openStream) + try{ + val fos = new java.io.FileOutputStream(out.toIO) + try{ + fos.getChannel.transferFrom(rbc, 0, java.lang.Long.MAX_VALUE) + PathRef(out) + } finally{ + fos.close() + } + } finally{ + rbc.close() + } + } + + def downloadUnpackZip(url: String, dest: os.RelPath = "unpacked") + (implicit ctx: Ctx.Dest) = { + + val tmpName = if (dest == os.rel / "tmp.zip") "tmp2.zip" else "tmp.zip" + val downloaded = download(url, tmpName) + IO.unpackZip(downloaded.path, dest) + } + + + def millProjectModule(key: String, + artifact: String, + repositories: Seq[Repository], + resolveFilter: os.Path => Boolean = _ => true, + artifactSuffix: String = "_2.12") = { + val localPath = sys.props(key) + if (localPath != null) { + mill.api.Result.Success( + Loose.Agg.from(localPath.split(',').map(p => PathRef(os.Path(p), quick = true))) + ) + } else { + mill.modules.Jvm.resolveDependencies( + repositories, + Seq( + coursier.Dependency( + coursier.Module("com.lihaoyi", artifact + artifactSuffix), + sys.props("MILL_VERSION") + ) + ), + Nil + ).map(_.filter(x => resolveFilter(x.path))) + } + } +} diff --git a/main/src/package.scala b/main/src/package.scala new file mode 100644 index 00000000..0ccd094f --- /dev/null +++ b/main/src/package.scala @@ -0,0 +1,12 @@ +import mill.util.JsonFormatters + +package object mill extends JsonFormatters{ + val T = define.Target + type T[T] = define.Target[T] + val PathRef = mill.api.PathRef + type PathRef = mill.api.PathRef + type Module = define.Module + type Cross[T] = define.Cross[T] + type Agg[T] = util.Loose.Agg[T] + val Agg = util.Loose.Agg +} diff --git a/main/test/src/TestMain.scala b/main/test/src/TestMain.scala new file mode 100644 index 00000000..80e7e627 --- /dev/null +++ b/main/test/src/TestMain.scala @@ -0,0 +1,6 @@ +package mill + +object TestMain { + def main(args: Array[String]): Unit = { + } +} diff --git a/main/test/src/UTestFramework.scala b/main/test/src/UTestFramework.scala new file mode 100644 index 00000000..c234151b --- /dev/null +++ b/main/test/src/UTestFramework.scala @@ -0,0 +1,11 @@ +package mill + +class UTestFramework extends utest.runner.Framework { + override def exceptionStackFrameHighlighter(s: StackTraceElement) = { + s.getClassName.startsWith("mill.") + } + override def setup() = { + + os.remove.all(os.pwd / 'target / 'workspace) + } +} diff --git a/main/test/src/define/ApplicativeTests.scala b/main/test/src/define/ApplicativeTests.scala new file mode 100644 index 00000000..9dd2132f --- /dev/null +++ b/main/test/src/define/ApplicativeTests.scala @@ -0,0 +1,125 @@ +package mill.define + +import mill.api.Ctx.ImplicitStub +import utest._ + +import scala.annotation.compileTimeOnly +import scala.language.experimental.macros + + +object ApplicativeTests extends TestSuite { + implicit def optionToOpt[T](o: Option[T]): Opt[T] = new Opt(o) + class Opt[T](val self: Option[T]) extends Applicative.Applyable[Option, T] + object Opt extends OptGenerated with Applicative.Applyer[Opt, Option, Applicative.Id, String]{ + + val injectedCtx = "helloooo" + def underlying[A](v: Opt[A]) = v.self + def apply[T](t: T): Option[T] = macro Applicative.impl[Option, T, String] + + def mapCtx[A, B](a: Option[A])(f: (A, String) => B): Option[B] = a.map(f(_, injectedCtx)) + def zip() = Some(()) + def zip[A](a: Option[A]) = a.map(Tuple1(_)) + } + class Counter{ + var value = 0 + def apply() = { + value += 1 + value + } + } + @compileTimeOnly("Target.ctx() can only be used with a T{...} block") + @ImplicitStub + implicit def taskCtx: String = ??? + + val tests = Tests{ + + 'selfContained - { + + 'simple - assert(Opt("lol " + 1) == Some("lol 1")) + 'singleSome - assert(Opt("lol " + Some("hello")()) == Some("lol hello")) + 'twoSomes - assert(Opt(Some("lol ")() + Some("hello")()) == Some("lol hello")) + 'singleNone - assert(Opt("lol " + None()) == None) + 'twoNones - assert(Opt("lol " + None() + None()) == None) + } + 'context - { + assert(Opt(Opt.ctx() + Some("World")()) == Some("hellooooWorld")) + } + 'capturing - { + val lol = "lol " + def hell(o: String) = "hell" + o + 'simple - assert(Opt(lol + 1) == Some("lol 1")) + 'singleSome - assert(Opt(lol + Some(hell("o"))()) == Some("lol hello")) + 'twoSomes - assert(Opt(Some(lol)() + Some(hell("o"))()) == Some("lol hello")) + 'singleNone - assert(Opt(lol + None()) == None) + 'twoNones - assert(Opt(lol + None() + None()) == None) + } + 'allowedLocalDef - { + // Although x is defined inside the Opt{...} block, it is also defined + // within the LHS of the Applyable#apply call, so it is safe to life it + // out into the `zipMap` arguments list. + val res = Opt{ "lol " + Some("hello").flatMap(x => Some(x)).apply() } + assert(res == Some("lol hello")) + } + 'upstreamAlwaysEvaluated - { + // Whether or not control-flow reaches the Applyable#apply call inside an + // Opt{...} block, we always evaluate the LHS of the Applyable#apply + // because it gets lifted out of any control flow statements + val counter = new Counter() + def up = Opt{ "lol " + counter() } + val down = Opt{ if ("lol".length > 10) up() else "fail" } + assert( + down == Some("fail"), + counter.value == 1 + ) + } + 'upstreamEvaluatedOnlyOnce - { + // Even if control-flow reaches the Applyable#apply call more than once, + // it only gets evaluated once due to its lifting out of the Opt{...} block + val counter = new Counter() + def up = Opt{ "lol " + counter() } + def runTwice[T](t: => T) = (t, t) + val down = Opt{ runTwice(up()) } + assert( + down == Some(("lol 1", "lol 1")), + counter.value == 1 + ) + } + 'evaluationsInsideLambdasWork - { + // This required some fiddling with owner chains inside the macro to get + // working, so ensure it doesn't regress + val counter = new Counter() + def up = Opt{ "hello" + counter() } + val down1 = Opt{ (() => up())() } + val down2 = Opt{ Seq(1, 2, 3).map(n => up() * n) } + assert( + down1 == Some("hello1"), + down2 == Some(Seq("hello2", "hello2hello2", "hello2hello2hello2")) + ) + } + 'appliesEvaluatedOncePerLexicalCallsite - { + // If you have multiple Applyable#apply() lexically in the source code of + // your Opt{...} call, each one gets evaluated once, even if the LHS of each + // apply() call is identical. It's up to the downstream zipMap() + // implementation to decide if it wants to dedup them or do other things. + val counter = new Counter() + def up = Opt{ "hello" + counter() } + val down = Opt{ Seq(1, 2, 3).map(n => n + up() + up()) } + assert(down == Some(Seq("1hello1hello2", "2hello1hello2", "3hello1hello2"))) + } + 'appliesEvaluateBeforehand - { + // Every Applyable#apply() within a Opt{...} block evaluates before any + // other logic within that block, even if they would happen first in the + // normal Scala evaluation order + val counter = new Counter() + def up = Opt{ counter() } + val down = Opt{ + val res = counter() + val one = up() + val two = up() + val three = up() + (res, one, two, three) + } + assert(down == Some((4, 1, 2, 3))) + } + } +} diff --git a/main/test/src/define/BasePathTests.scala b/main/test/src/define/BasePathTests.scala new file mode 100644 index 00000000..b8a653c8 --- /dev/null +++ b/main/test/src/define/BasePathTests.scala @@ -0,0 +1,73 @@ +package mill.define + +import mill.util.{TestGraphs, TestUtil} +import utest._ +import mill.{Module, T} +object BasePathTests extends TestSuite{ + val testGraphs = new TestGraphs + val tests = Tests{ + def check[T <: Module](m: T)(f: T => Module, segments: String*) = { + val remaining = f(m).millSourcePath.relativeTo(m.millSourcePath).segments + assert(remaining == segments) + } + 'singleton - { + check(testGraphs.singleton)(identity) + } + 'backtickIdentifiers - { + check(testGraphs.bactickIdentifiers)( + _.`nested-module`, + "nested-module" + ) + } + 'separateGroups - { + check(TestGraphs.triangleTask)(identity) + } + 'TraitWithModuleObject - { + check(TestGraphs.TraitWithModuleObject)( + _.TraitModule, + "TraitModule" + ) + } + 'nestedModuleNested - { + check(TestGraphs.nestedModule)(_.nested, "nested") + } + 'nestedModuleInstance - { + check(TestGraphs.nestedModule)(_.classInstance, "classInstance") + } + 'singleCross - { + check(TestGraphs.singleCross)(_.cross, "cross") + check(TestGraphs.singleCross)(_.cross("210"), "cross", "210") + check(TestGraphs.singleCross)(_.cross("211"), "cross", "211") + } + 'doubleCross - { + check(TestGraphs.doubleCross)(_.cross, "cross") + check(TestGraphs.doubleCross)(_.cross("210", "jvm"), "cross", "210", "jvm") + check(TestGraphs.doubleCross)(_.cross("212", "js"), "cross", "212", "js") + } + 'nestedCrosses - { + check(TestGraphs.nestedCrosses)(_.cross, "cross") + check(TestGraphs.nestedCrosses)( + _.cross("210").cross2("js"), + "cross", "210", "cross2", "js" + ) + } + 'overriden - { + object overridenBasePath extends TestUtil.BaseModule { + override def millSourcePath = os.pwd / 'overridenBasePathRootValue + object nested extends Module{ + override def millSourcePath = super.millSourcePath / 'overridenBasePathNested + object nested extends Module{ + override def millSourcePath = super.millSourcePath / 'overridenBasePathDoubleNested + } + } + } + assert( + overridenBasePath.millSourcePath == os.pwd / 'overridenBasePathRootValue, + overridenBasePath.nested.millSourcePath == os.pwd / 'overridenBasePathRootValue / 'nested / 'overridenBasePathNested, + overridenBasePath.nested.nested.millSourcePath == os.pwd / 'overridenBasePathRootValue / 'nested / 'overridenBasePathNested / 'nested / 'overridenBasePathDoubleNested + ) + } + + } +} + diff --git a/main/test/src/define/CacherTests.scala b/main/test/src/define/CacherTests.scala new file mode 100644 index 00000000..59ebf3f6 --- /dev/null +++ b/main/test/src/define/CacherTests.scala @@ -0,0 +1,75 @@ +package mill.define + +import mill.util.{DummyLogger, TestEvaluator, TestUtil} +import mill.util.Strict.Agg +import mill.T +import mill.api.Result.Success +import utest._ +import utest.framework.TestPath + + +object CacherTests extends TestSuite{ + object Base extends Base + trait Base extends TestUtil.BaseModule{ + def value = T{ 1 } + def result = T{ Success(1) } + } + object Middle extends Middle + trait Middle extends Base{ + override def value = T{ super.value() + 2} + def overriden = T{ super.value()} + } + object Terminal extends Terminal + trait Terminal extends Middle{ + override def value = T{ super.value() + 4} + } + + val tests = Tests{ + def eval[T <: TestUtil.BaseModule, V](mapping: T, v: Task[V]) + (implicit tp: TestPath) = { + val evaluator = new TestEvaluator(mapping) + evaluator(v).right.get._1 + } + def check(x: Any, y: Any) = assert(x == y) + + 'simpleDefIsCached - { + Predef.assert(Base.value eq Base.value) + Predef.assert(eval(Base, Base.value) == 1) + } + + 'resultDefIsCached - { + Predef.assert(Base.result eq Base.result) + Predef.assert(eval(Base, Base.result) == 1) + } + + + 'overridingDefIsAlsoCached - { + Predef.assert(eval(Middle, Middle.value) == 3) + Predef.assert(Middle.value eq Middle.value) + } + + 'overridenDefRemainsAvailable - { + Predef.assert(eval(Middle, Middle.overriden) == 1) + } + + + 'multipleOverridesWork- { + Predef.assert(eval(Terminal, Terminal.value) == 7) + Predef.assert(eval(Terminal, Terminal.overriden) == 1) + } + // Doesn't fail, presumably compileError doesn't go far enough in the + // compilation pipeline to hit the override checks + // + // 'overrideOutsideModuleFails - { + // compileError(""" + // trait Foo{ + // def x = 1 + // } + // object Bar extends Foo{ + // def x = 2 + // } + // """) + // } + } +} + diff --git a/main/test/src/define/DiscoverTests.scala b/main/test/src/define/DiscoverTests.scala new file mode 100644 index 00000000..248d6afe --- /dev/null +++ b/main/test/src/define/DiscoverTests.scala @@ -0,0 +1,63 @@ +package mill.define + +import mill.util.TestGraphs +import utest._ + +object DiscoverTests extends TestSuite{ + val testGraphs = new TestGraphs + val tests = Tests{ + def check[T <: Module](m: T)(targets: (T => Target[_])*) = { + val discovered = m.millInternal.targets + val expected = targets.map(_(m)).toSet + assert(discovered == expected) + } + 'singleton - { + check(testGraphs.singleton)(_.single) + } + 'backtickIdentifiers { + check(testGraphs.bactickIdentifiers)(_.`up-target`, _.`a-down-target`, _.`nested-module`.`nested-target`) + } + 'separateGroups - { + check(TestGraphs.triangleTask)(_.left, _.right) + } + 'TraitWithModuleObject - { + check(TestGraphs.TraitWithModuleObject)(_.TraitModule.testFrameworks) + } + 'nestedModule - { + check(TestGraphs.nestedModule)(_.single, _.nested.single, _.classInstance.single) + } + 'singleCross - { + check(TestGraphs.singleCross)( + _.cross("210").suffix, + _.cross("211").suffix, + _.cross("212").suffix + ) + } + 'doubleCross - { + check(TestGraphs.doubleCross)( + _.cross("210", "jvm").suffix, + _.cross("210", "js").suffix, + _.cross("211", "jvm").suffix, + _.cross("211", "js").suffix, + _.cross("212", "jvm").suffix, + _.cross("212", "js").suffix, + _.cross("212", "native").suffix + ) + } + 'nestedCrosses - { + check(TestGraphs.nestedCrosses)( + _.cross("210").cross2("jvm").suffix, + _.cross("210").cross2("js").suffix, + _.cross("210").cross2("native").suffix, + _.cross("211").cross2("jvm").suffix, + _.cross("211").cross2("js").suffix, + _.cross("211").cross2("native").suffix, + _.cross("212").cross2("jvm").suffix, + _.cross("212").cross2("js").suffix, + _.cross("212").cross2("native").suffix + ) + } + + } +} + diff --git a/main/test/src/define/GraphTests.scala b/main/test/src/define/GraphTests.scala new file mode 100644 index 00000000..224ce59f --- /dev/null +++ b/main/test/src/define/GraphTests.scala @@ -0,0 +1,211 @@ +package mill.define + + +import mill.eval.Evaluator +import mill.util.{TestGraphs, TestUtil} +import utest._ +import mill.util.Strict.Agg +object GraphTests extends TestSuite{ + + val tests = Tests{ + + + val graphs = new TestGraphs() + import graphs._ + import TestGraphs._ + + 'topoSortedTransitiveTargets - { + def check(targets: Agg[Task[_]], expected: Agg[Task[_]]) = { + val result = Graph.topoSorted(Graph.transitiveTargets(targets)).values + TestUtil.checkTopological(result) + assert(result == expected) + } + + 'singleton - check( + targets = Agg(singleton.single), + expected = Agg(singleton.single) + ) + 'backtickIdentifiers - check( + targets = Agg(bactickIdentifiers.`a-down-target`), + expected = Agg(bactickIdentifiers.`up-target`, bactickIdentifiers.`a-down-target`) + ) + 'pair - check( + targets = Agg(pair.down), + expected = Agg(pair.up, pair.down) + ) + 'anonTriple - check( + targets = Agg(anonTriple.down), + expected = Agg(anonTriple.up, anonTriple.down.inputs(0), anonTriple.down) + ) + 'diamond - check( + targets = Agg(diamond.down), + expected = Agg(diamond.up, diamond.left, diamond.right, diamond.down) + ) + 'anonDiamond - check( + targets = Agg(diamond.down), + expected = Agg( + diamond.up, + diamond.down.inputs(0), + diamond.down.inputs(1), + diamond.down + ) + ) + 'defCachedDiamond - check( + targets = Agg(defCachedDiamond.down), + expected = Agg( + defCachedDiamond.up.inputs(0), + defCachedDiamond.up, + defCachedDiamond.down.inputs(0).inputs(0).inputs(0), + defCachedDiamond.down.inputs(0).inputs(0), + defCachedDiamond.down.inputs(0).inputs(1).inputs(0), + defCachedDiamond.down.inputs(0).inputs(1), + defCachedDiamond.down.inputs(0), + defCachedDiamond.down + ) + ) + 'bigSingleTerminal - { + val result = Graph.topoSorted(Graph.transitiveTargets(Agg(bigSingleTerminal.j))).values + TestUtil.checkTopological(result) + assert(result.size == 28) + } + } + + 'groupAroundNamedTargets - { + def check[T, R <: Target[Int]](base: T) + (target: T => R, + important0: Agg[T => Target[_]], + expected: Agg[(R, Int)]) = { + + val topoSorted = Graph.topoSorted(Graph.transitiveTargets(Agg(target(base)))) + + val important = important0.map(_ (base)) + val grouped = Graph.groupAroundImportantTargets(topoSorted) { + case t: Target[_] if important.contains(t) => t + } + val flattened = Agg.from(grouped.values().flatMap(_.items)) + + TestUtil.checkTopological(flattened) + for ((terminal, expectedSize) <- expected) { + val grouping = grouped.lookupKey(terminal) + assert( + grouping.size == expectedSize, + grouping.flatMap(_.asTarget: Option[Target[_]]).filter(important.contains) == Agg(terminal) + ) + } + } + + 'singleton - check(singleton)( + _.single, + Agg(_.single), + Agg(singleton.single -> 1) + ) + 'backtickIdentifiers - check(bactickIdentifiers)( + _.`a-down-target`, + Agg(_.`up-target`, _.`a-down-target`), + Agg( + bactickIdentifiers.`up-target` -> 1, + bactickIdentifiers.`a-down-target` -> 1 + ) + ) + 'pair - check(pair)( + _.down, + Agg(_.up, _.down), + Agg(pair.up -> 1, pair.down -> 1) + ) + 'anonTriple - check(anonTriple)( + _.down, + Agg(_.up, _.down), + Agg(anonTriple.up -> 1, anonTriple.down -> 2) + ) + 'diamond - check(diamond)( + _.down, + Agg(_.up, _.left, _.right, _.down), + Agg( + diamond.up -> 1, + diamond.left -> 1, + diamond.right -> 1, + diamond.down -> 1 + ) + ) + + 'defCachedDiamond - check(defCachedDiamond)( + _.down, + Agg(_.up, _.left, _.right, _.down), + Agg( + defCachedDiamond.up -> 2, + defCachedDiamond.left -> 2, + defCachedDiamond.right -> 2, + defCachedDiamond.down -> 2 + ) + ) + + 'anonDiamond - check(anonDiamond)( + _.down, + Agg(_.down, _.up), + Agg( + anonDiamond.up -> 1, + anonDiamond.down -> 3 + ) + ) + 'bigSingleTerminal - check(bigSingleTerminal)( + _.j, + Agg(_.a, _.b, _.e, _.f, _.i, _.j), + Agg( + bigSingleTerminal.a -> 3, + bigSingleTerminal.b -> 2, + bigSingleTerminal.e -> 9, + bigSingleTerminal.i -> 6, + bigSingleTerminal.f -> 4, + bigSingleTerminal.j -> 4 + ) + ) + } + 'multiTerminalGroupCounts - { + def countGroups(goals: Task[_]*) = { + + val topoSorted = Graph.topoSorted( + Graph.transitiveTargets(Agg.from(goals)) + ) + val grouped = Graph.groupAroundImportantTargets(topoSorted) { + case t: NamedTask[Any] => t + case t if goals.contains(t) => t + } + grouped.keyCount + } + + 'separateGroups - { + import separateGroups._ + val groupCount = countGroups(right, left) + assert(groupCount == 3) + } + + 'triangleTask - { + // Make sure the following graph ends up as a single group, since although + // `right` depends on `left`, both of them depend on the un-cached `task` + // which would force them both to re-compute every time `task` changes + import triangleTask._ + val groupCount = countGroups(right, left) + assert(groupCount == 2) + } + + + 'multiTerminalGroup - { + // Make sure the following graph ends up as two groups + import multiTerminalGroup._ + val groupCount = countGroups(right, left) + assert(groupCount == 2) + } + + + 'multiTerminalBoundary - { + // Make sure the following graph ends up as a three groups: one for + // each cached target, and one for the downstream task we are running + import multiTerminalBoundary._ + val groupCount = countGroups(task2) + assert(groupCount == 3) + } + } + + + } +} diff --git a/main/test/src/define/MacroErrorTests.scala b/main/test/src/define/MacroErrorTests.scala new file mode 100644 index 00000000..c8b140fa --- /dev/null +++ b/main/test/src/define/MacroErrorTests.scala @@ -0,0 +1,145 @@ +package mill.define + +import utest._ +import mill.{T, Module} +import mill.util.TestUtil +object MacroErrorTests extends TestSuite{ + + val tests = Tests{ + + 'errors{ + val expectedMsg = + "T{} members must be defs defined in a Cacher class/trait/object body" + + val err = compileError("object Foo extends TestUtil.BaseModule{ val x = T{1} }") + assert(err.msg == expectedMsg) + } + + 'badParameterSets - { + 'command - { + val e = compileError(""" + object foo extends mill.util.TestUtil.BaseModule{ + def w = T.command{1} + } + mill.define.Discover[foo.type] + """) + assert( + e.msg.contains("`T.command` definitions must have 1 parameter list"), + e.pos.contains("def w = ") + ) + } + 'target - { + val e = compileError(""" + object foo extends mill.util.TestUtil.BaseModule{ + def x() = T{1} + } + mill.define.Discover[foo.type] + """) + assert( + e.msg.contains("`T{...}` definitions must have 0 parameter lists"), + e.pos.contains("def x() = ") + ) + } + 'input - { + val e = compileError(""" + object foo extends mill.util.TestUtil.BaseModule{ + def y() = T.input{1} + } + mill.define.Discover[foo.type] + """) + assert( + e.msg.contains("`T.input` definitions must have 0 parameter lists"), + e.pos.contains("def y() = ") + ) + } + 'sources - { + val e = compileError(""" + object foo extends mill.util.TestUtil.BaseModule{ + def z() = T.sources{ammonite.ops.pwd} + } + mill.define.Discover[foo.type] + """) + assert( + e.msg.contains("`T.sources` definitions must have 0 parameter lists"), + e.pos.contains("def z() = ") + ) + } + 'persistent - { + val e = compileError(""" + object foo extends mill.util.TestUtil.BaseModule{ + def a() = T.persistent{1} + } + mill.define.Discover[foo.type] + """) + assert( + e.msg.contains("`T.persistent` definitions must have 0 parameter lists"), + e.pos.contains("def a() = ") + ) + } + } + 'badTmacro - { + // Make sure we can reference values from outside the T{...} block as part + // of our `Target#apply()` calls, but we cannot reference any values that + // come from inside the T{...} block + 'pos - { + val e = compileError(""" + val a = T{ 1 } + val arr = Array(a) + val b = { + val c = 0 + T{ + arr(c)() + } + } + """) + assert(e.msg.contains( + "Modules, Targets and Commands can only be defined within a mill Module") + ) + } + 'neg - { + + val expectedMsg = + "Target#apply() call cannot use `value n` defined within the T{...} block" + val err = compileError("""new Module{ + def a = T{ 1 } + val arr = Array(a) + def b = { + T{ + val n = 0 + arr(n)() + } + } + }""") + assert(err.msg == expectedMsg) + } + 'neg2 - { + + val expectedMsg = + "Target#apply() call cannot use `value x` defined within the T{...} block" + val err = compileError("""new Module{ + def a = T{ 1 } + val arr = Array(a) + def b = { + T{ + arr.map{x => x()} + } + } + }""") + assert(err.msg == expectedMsg) + } + 'neg3{ + val borkedCachedDiamond1 = utest.compileError(""" + object borkedCachedDiamond1 { + def up = T{ TestUtil.test() } + def left = T{ TestUtil.test(up) } + def right = T{ TestUtil.test(up) } + def down = T{ TestUtil.test(left, right) } + } + """) + assert(borkedCachedDiamond1.msg.contains( + "Modules, Targets and Commands can only be defined within a mill Module") + ) + } + } + } +} diff --git a/main/test/src/eval/CrossTests.scala b/main/test/src/eval/CrossTests.scala new file mode 100644 index 00000000..f194924e --- /dev/null +++ b/main/test/src/eval/CrossTests.scala @@ -0,0 +1,56 @@ +package mill.eval + + +import mill.define.Discover +import mill.util.TestEvaluator + +import mill.util.TestGraphs.{crossResolved, doubleCross, nestedCrosses, singleCross} +import utest._ +object CrossTests extends TestSuite{ + val tests = Tests{ + 'singleCross - { + val check = new TestEvaluator(singleCross) + + val Right(("210", 1)) = check.apply(singleCross.cross("210").suffix) + val Right(("211", 1)) = check.apply(singleCross.cross("211").suffix) + val Right(("212", 1)) = check.apply(singleCross.cross("212").suffix) + } + + 'crossResolved - { + val check = new TestEvaluator(crossResolved) + + val Right(("2.10", 1)) = check.apply(crossResolved.foo("2.10").suffix) + val Right(("2.11", 1)) = check.apply(crossResolved.foo("2.11").suffix) + val Right(("2.12", 1)) = check.apply(crossResolved.foo("2.12").suffix) + + val Right(("_2.10", 1)) = check.apply(crossResolved.bar("2.10").longSuffix) + val Right(("_2.11", 1)) = check.apply(crossResolved.bar("2.11").longSuffix) + val Right(("_2.12", 1)) = check.apply(crossResolved.bar("2.12").longSuffix) + } + + + 'doubleCross - { + val check = new TestEvaluator(doubleCross) + + val Right(("210_jvm", 1)) = check.apply(doubleCross.cross("210", "jvm").suffix) + val Right(("210_js", 1)) = check.apply(doubleCross.cross("210", "js").suffix) + val Right(("211_jvm", 1)) = check.apply(doubleCross.cross("211", "jvm").suffix) + val Right(("211_js", 1)) = check.apply(doubleCross.cross("211", "js").suffix) + val Right(("212_jvm", 1)) = check.apply(doubleCross.cross("212", "jvm").suffix) + val Right(("212_js", 1)) = check.apply(doubleCross.cross("212", "js").suffix) + val Right(("212_native", 1)) = check.apply(doubleCross.cross("212", "native").suffix) + } + + 'nestedCrosses - { + val check = new TestEvaluator(nestedCrosses) + + val Right(("210_jvm", 1)) = check.apply(nestedCrosses.cross("210").cross2("jvm").suffix) + val Right(("210_js", 1)) = check.apply(nestedCrosses.cross("210").cross2("js").suffix) + val Right(("211_jvm", 1)) = check.apply(nestedCrosses.cross("211").cross2("jvm").suffix) + val Right(("211_js", 1)) = check.apply(nestedCrosses.cross("211").cross2("js").suffix) + val Right(("212_jvm", 1)) = check.apply(nestedCrosses.cross("212").cross2("jvm").suffix) + val Right(("212_js", 1)) = check.apply(nestedCrosses.cross("212").cross2("js").suffix) + val Right(("212_native", 1)) = check.apply(nestedCrosses.cross("212").cross2("native").suffix) + } + } +} diff --git a/main/test/src/eval/EvaluationTests.scala b/main/test/src/eval/EvaluationTests.scala new file mode 100644 index 00000000..74f9088c --- /dev/null +++ b/main/test/src/eval/EvaluationTests.scala @@ -0,0 +1,354 @@ +package mill.eval + + +import mill.util.TestUtil.{Test, test} +import mill.define.{Discover, Graph, Target, Task} +import mill.{Module, T} +import mill.util.{DummyLogger, TestEvaluator, TestGraphs, TestUtil} +import mill.util.Strict.Agg +import utest._ +import utest.framework.TestPath + + + +object EvaluationTests extends TestSuite{ + class Checker[T <: TestUtil.BaseModule](module: T)(implicit tp: TestPath) { + // Make sure data is persisted even if we re-create the evaluator each time + + def evaluator = new TestEvaluator(module).evaluator + + def apply(target: Task[_], expValue: Any, + expEvaled: Agg[Task[_]], + // How many "other" tasks were evaluated other than those listed above. + // Pass in -1 to skip the check entirely + extraEvaled: Int = 0, + // Perform a second evaluation of the same tasks, and make sure the + // outputs are the same but nothing was evaluated. Disable this if you + // are directly evaluating tasks which need to re-evaluate every time + secondRunNoOp: Boolean = true) = { + + val evaled = evaluator.evaluate(Agg(target)) + + val (matchingReturnedEvaled, extra) = + evaled.evaluated.indexed.partition(expEvaled.contains) + + assert( + evaled.values == Seq(expValue), + matchingReturnedEvaled.toSet == expEvaled.toSet, + extraEvaled == -1 || extra.length == extraEvaled + ) + + // Second time the value is already cached, so no evaluation needed + if (secondRunNoOp){ + val evaled2 = evaluator.evaluate(Agg(target)) + val expecteSecondRunEvaluated = Agg() + assert( + evaled2.values == evaled.values, + evaled2.evaluated == expecteSecondRunEvaluated + ) + } + } + } + + + val tests = Tests{ + object graphs extends TestGraphs() + import graphs._ + import TestGraphs._ + 'evaluateSingle - { + + 'singleton - { + import singleton._ + val check = new Checker(singleton) + // First time the target is evaluated + check(single, expValue = 0, expEvaled = Agg(single)) + + single.counter += 1 + // After incrementing the counter, it forces re-evaluation + check(single, expValue = 1, expEvaled = Agg(single)) + } + 'backtickIdentifiers - { + import graphs.bactickIdentifiers._ + val check = new Checker(bactickIdentifiers) + + check(`a-down-target`, expValue = 0, expEvaled = Agg(`up-target`, `a-down-target`)) + + `a-down-target`.counter += 1 + check(`a-down-target`, expValue = 1, expEvaled = Agg(`a-down-target`)) + + `up-target`.counter += 1 + check(`a-down-target`, expValue = 2, expEvaled = Agg(`up-target`, `a-down-target`)) + } + 'pair - { + import pair._ + val check = new Checker(pair) + check(down, expValue = 0, expEvaled = Agg(up, down)) + + down.counter += 1 + check(down, expValue = 1, expEvaled = Agg(down)) + + up.counter += 1 + check(down, expValue = 2, expEvaled = Agg(up, down)) + } + 'anonTriple - { + import anonTriple._ + val check = new Checker(anonTriple) + val middle = down.inputs(0) + check(down, expValue = 0, expEvaled = Agg(up, middle, down)) + + down.counter += 1 + check(down, expValue = 1, expEvaled = Agg(middle, down)) + + up.counter += 1 + check(down, expValue = 2, expEvaled = Agg(up, middle, down)) + + middle.asInstanceOf[TestUtil.Test].counter += 1 + + check(down, expValue = 3, expEvaled = Agg(middle, down)) + } + 'diamond - { + import diamond._ + val check = new Checker(diamond) + check(down, expValue = 0, expEvaled = Agg(up, left, right, down)) + + down.counter += 1 + check(down, expValue = 1, expEvaled = Agg(down)) + + up.counter += 1 + // Increment by 2 because up is referenced twice: once by left once by right + check(down, expValue = 3, expEvaled = Agg(up, left, right, down)) + + left.counter += 1 + check(down, expValue = 4, expEvaled = Agg(left, down)) + + right.counter += 1 + check(down, expValue = 5, expEvaled = Agg(right, down)) + } + 'anonDiamond - { + import anonDiamond._ + val check = new Checker(anonDiamond) + val left = down.inputs(0).asInstanceOf[TestUtil.Test] + val right = down.inputs(1).asInstanceOf[TestUtil.Test] + check(down, expValue = 0, expEvaled = Agg(up, left, right, down)) + + down.counter += 1 + check(down, expValue = 1, expEvaled = Agg(left, right, down)) + + up.counter += 1 + // Increment by 2 because up is referenced twice: once by left once by right + check(down, expValue = 3, expEvaled = Agg(up, left, right, down)) + + left.counter += 1 + check(down, expValue = 4, expEvaled = Agg(left, right, down)) + + right.counter += 1 + check(down, expValue = 5, expEvaled = Agg(left, right, down)) + } + + 'bigSingleTerminal - { + import bigSingleTerminal._ + val check = new Checker(bigSingleTerminal) + + check(j, expValue = 0, expEvaled = Agg(a, b, e, f, i, j), extraEvaled = 22) + + j.counter += 1 + check(j, expValue = 1, expEvaled = Agg(j), extraEvaled = 3) + + i.counter += 1 + // increment value by 2 because `i` is used twice on the way to `j` + check(j, expValue = 3, expEvaled = Agg(j, i), extraEvaled = 8) + + b.counter += 1 + // increment value by 4 because `b` is used four times on the way to `j` + check(j, expValue = 7, expEvaled = Agg(b, e, f, i, j), extraEvaled = 20) + } + } + + 'evaluateMixed - { + 'separateGroups - { + // Make sure that `left` and `right` are able to recompute separately, + // even though one depends on the other + + import separateGroups._ + val checker = new Checker(separateGroups) + val evaled1 = checker.evaluator.evaluate(Agg(right, left)) + val filtered1 = evaled1.evaluated.filter(_.isInstanceOf[Target[_]]) + assert(filtered1 == Agg(change, left, right)) + val evaled2 = checker.evaluator.evaluate(Agg(right, left)) + val filtered2 = evaled2.evaluated.filter(_.isInstanceOf[Target[_]]) + assert(filtered2 == Agg()) + change.counter += 1 + val evaled3 = checker.evaluator.evaluate(Agg(right, left)) + val filtered3 = evaled3.evaluated.filter(_.isInstanceOf[Target[_]]) + assert(filtered3 == Agg(change, right)) + + + } + 'triangleTask - { + + import triangleTask._ + val checker = new Checker(triangleTask) + checker(right, 3, Agg(left, right), extraEvaled = -1) + checker(left, 1, Agg(), extraEvaled = -1) + + } + 'multiTerminalGroup - { + import multiTerminalGroup._ + + val checker = new Checker(multiTerminalGroup) + checker(right, 1, Agg(right), extraEvaled = -1) + checker(left, 1, Agg(left), extraEvaled = -1) + } + + 'multiTerminalBoundary - { + + import multiTerminalBoundary._ + + val checker = new Checker(multiTerminalBoundary) + checker(task2, 4, Agg(right, left), extraEvaled = -1, secondRunNoOp = false) + checker(task2, 4, Agg(), extraEvaled = -1, secondRunNoOp = false) + } + + 'overrideSuperTask - { + // Make sure you can override targets, call their supers, and have the + // overriden target be allocated a spot within the overriden/ folder of + // the main publically-available target + import canOverrideSuper._ + + val checker = new Checker(canOverrideSuper) + checker(foo, Seq("base", "object"), Agg(foo), extraEvaled = -1) + + + val public = ammonite.ops.read(checker.evaluator.outPath / 'foo / "meta.json") + val overriden = ammonite.ops.read( + checker.evaluator.outPath / 'foo / + 'overriden / "mill" / "util" / "TestGraphs" / "BaseModule" / "foo" / "meta.json" + ) + assert( + public.contains("base"), + public.contains("object"), + overriden.contains("base"), + !overriden.contains("object") + ) + } + 'overrideSuperCommand - { + // Make sure you can override commands, call their supers, and have the + // overriden command be allocated a spot within the overriden/ folder of + // the main publically-available command + import canOverrideSuper._ + + val checker = new Checker(canOverrideSuper) + val runCmd = cmd(1) + checker( + runCmd, + Seq("base1", "object1"), + Agg(runCmd), + extraEvaled = -1, + secondRunNoOp = false + ) + + val public = ammonite.ops.read(checker.evaluator.outPath / 'cmd / "meta.json") + val overriden = ammonite.ops.read( + checker.evaluator.outPath / 'cmd / + 'overriden / "mill" / "util" / "TestGraphs" / "BaseModule"/ "cmd" / "meta.json" + ) + assert( + public.contains("base1"), + public.contains("object1"), + overriden.contains("base1"), + !overriden.contains("object1") + ) + } + 'nullTasks - { + import nullTasks._ + val checker = new Checker(nullTasks) + checker(nullTarget1, null, Agg(nullTarget1), extraEvaled = -1) + checker(nullTarget1, null, Agg(), extraEvaled = -1) + checker(nullTarget2, null, Agg(nullTarget2), extraEvaled = -1) + checker(nullTarget2, null, Agg(), extraEvaled = -1) + checker(nullTarget3, null, Agg(nullTarget3), extraEvaled = -1) + checker(nullTarget3, null, Agg(), extraEvaled = -1) + checker(nullTarget4, null, Agg(nullTarget4), extraEvaled = -1) + checker(nullTarget4, null, Agg(), extraEvaled = -1) + + val nc1 = nullCommand1() + val nc2 = nullCommand2() + val nc3 = nullCommand3() + val nc4 = nullCommand4() + + checker(nc1, null, Agg(nc1), extraEvaled = -1, secondRunNoOp = false) + checker(nc1, null, Agg(nc1), extraEvaled = -1, secondRunNoOp = false) + checker(nc2, null, Agg(nc2), extraEvaled = -1, secondRunNoOp = false) + checker(nc2, null, Agg(nc2), extraEvaled = -1, secondRunNoOp = false) + checker(nc3, null, Agg(nc3), extraEvaled = -1, secondRunNoOp = false) + checker(nc3, null, Agg(nc3), extraEvaled = -1, secondRunNoOp = false) + checker(nc4, null, Agg(nc4), extraEvaled = -1, secondRunNoOp = false) + checker(nc4, null, Agg(nc4), extraEvaled = -1, secondRunNoOp = false) + } + + 'tasksAreUncached - { + // Make sure the tasks `left` and `middle` re-compute every time, while + // the target `right` does not + // + // ___ left ___ + // / \ + // up middle -- down + // / + // right + object build extends TestUtil.BaseModule{ + var leftCount = 0 + var rightCount = 0 + var middleCount = 0 + def up = T{ test.anon() } + def left = T.task{ leftCount += 1; up() + 1 } + def middle = T.task{ middleCount += 1; 100 } + def right = T{ rightCount += 1; 10000 } + def down = T{ left() + middle() + right() } + } + + import build._ + + // Ensure task objects themselves are not cached, and recomputed each time + assert( + up eq up, + left ne left, + middle ne middle, + right eq right, + down eq down + ) + + // During the first evaluation, they get computed normally like any + // cached target + val check = new Checker(build) + assert(leftCount == 0, rightCount == 0) + check(down, expValue = 10101, expEvaled = Agg(up, right, down), extraEvaled = 8) + assert(leftCount == 1, middleCount == 1, rightCount == 1) + + // If the upstream `up` doesn't change, the entire block of tasks + // doesn't need to recompute + check(down, expValue = 10101, expEvaled = Agg()) + assert(leftCount == 1, middleCount == 1, rightCount == 1) + + // But if `up` changes, the entire block of downstream tasks needs to + // recompute together, including `middle` which doesn't depend on `up`, + // because tasks have no cached value that can be used. `right`, which + // is a cached Target, does not recompute + up.inputs(0).asInstanceOf[Test].counter += 1 + check(down, expValue = 10102, expEvaled = Agg(up, down), extraEvaled = 6) + assert(leftCount == 2, middleCount == 2, rightCount == 1) + + // Running the tasks themselves results in them being recomputed every + // single time, even if nothing changes + check(left, expValue = 2, expEvaled = Agg(), extraEvaled = 1, secondRunNoOp = false) + assert(leftCount == 3, middleCount == 2, rightCount == 1) + check(left, expValue = 2, expEvaled = Agg(), extraEvaled = 1, secondRunNoOp = false) + assert(leftCount == 4, middleCount == 2, rightCount == 1) + + check(middle, expValue = 100, expEvaled = Agg(), extraEvaled = 2, secondRunNoOp = false) + assert(leftCount == 4, middleCount == 3, rightCount == 1) + check(middle, expValue = 100, expEvaled = Agg(), extraEvaled = 2, secondRunNoOp = false) + assert(leftCount == 4, middleCount == 4, rightCount == 1) + } + } + } +} diff --git a/main/test/src/eval/FailureTests.scala b/main/test/src/eval/FailureTests.scala new file mode 100644 index 00000000..dcfbcb60 --- /dev/null +++ b/main/test/src/eval/FailureTests.scala @@ -0,0 +1,132 @@ +package mill.eval +import mill.T +import mill.util.{TestEvaluator, TestUtil} +import mill.api.Result.OuterStack +import utest._ +import utest.framework.TestPath + + +object FailureTests extends TestSuite{ + + val tests = Tests{ + val graphs = new mill.util.TestGraphs() + import graphs._ + + 'evaluateSingle - { + val check = new TestEvaluator(singleton) + check.fail( + target = singleton.single, + expectedFailCount = 0, + expectedRawValues = Seq(Result.Success(0)) + ) + + singleton.single.failure = Some("lols") + + check.fail( + target = singleton.single, + expectedFailCount = 1, + expectedRawValues = Seq(Result.Failure("lols")) + ) + + singleton.single.failure = None + + check.fail( + target = singleton.single, + expectedFailCount = 0, + expectedRawValues = Seq(Result.Success(0)) + ) + + + val ex = new IndexOutOfBoundsException() + singleton.single.exception = Some(ex) + + + check.fail( + target = singleton.single, + expectedFailCount = 1, + expectedRawValues = Seq(Result.Exception(ex, new OuterStack(Nil))) + ) + } + 'evaluatePair - { + val check = new TestEvaluator(pair) + check.fail( + pair.down, + expectedFailCount = 0, + expectedRawValues = Seq(Result.Success(0)) + ) + + pair.up.failure = Some("lols") + + check.fail( + pair.down, + expectedFailCount = 1, + expectedRawValues = Seq(Result.Skipped) + ) + + pair.up.failure = None + + check.fail( + pair.down, + expectedFailCount = 0, + expectedRawValues = Seq(Result.Success(0)) + ) + + pair.up.exception = Some(new IndexOutOfBoundsException()) + + check.fail( + pair.down, + expectedFailCount = 1, + expectedRawValues = Seq(Result.Skipped) + ) + } + 'evaluateBacktickIdentifiers - { + val check = new TestEvaluator(bactickIdentifiers) + import bactickIdentifiers._ + check.fail( + `a-down-target`, + expectedFailCount = 0, + expectedRawValues = Seq(Result.Success(0)) + ) + + `up-target`.failure = Some("lols") + + check.fail( + `a-down-target`, + expectedFailCount = 1, + expectedRawValues = Seq(Result.Skipped) + ) + + `up-target`.failure = None + + check.fail( + `a-down-target`, + expectedFailCount = 0, + expectedRawValues = Seq(Result.Success(0)) + ) + + `up-target`.exception = Some(new IndexOutOfBoundsException()) + + check.fail( + `a-down-target`, + expectedFailCount = 1, + expectedRawValues = Seq(Result.Skipped) + ) + } + 'multipleUsesOfDest - { + object build extends TestUtil.BaseModule { + // Using `T.ctx( ).dest` twice in a single task is ok + def left = T{ + T.ctx().dest.toString.length + T.ctx().dest.toString.length } + + // Using `T.ctx( ).dest` once in two different tasks is not ok + val task = T.task{ T.ctx().dest.toString.length } + def right = T{ task() + left() + T.ctx().dest.toString().length } + } + + val check = new TestEvaluator(build) + val Right(_) = check(build.left) + val Left(Result.Exception(e, _)) = check(build.right) + assert(e.getMessage.contains("`dest` can only be used in one place")) + } + } +} + diff --git a/main/test/src/eval/JavaCompileJarTests.scala b/main/test/src/eval/JavaCompileJarTests.scala new file mode 100644 index 00000000..426c6ea6 --- /dev/null +++ b/main/test/src/eval/JavaCompileJarTests.scala @@ -0,0 +1,164 @@ +package mill.eval + +import mill.define.{Discover, Input, Target, Task} +import mill.modules.Jvm +import mill.api.Ctx.Dest +import mill.{Module, T} +import mill.util.{DummyLogger, Loose, TestEvaluator, TestUtil} +import mill.util.Strict.Agg +import utest._ +import mill._ +object JavaCompileJarTests extends TestSuite{ + def compileAll(sources: mill.util.Loose.Agg[PathRef])(implicit ctx: Dest) = { + os.makeDir.all(ctx.dest) + + os.proc("javac", sources.map(_.path.toString()).toSeq, "-d", ctx.dest).call(ctx.dest) + PathRef(ctx.dest) + } + + val tests = Tests{ + 'javac { + val javacSrcPath = os.pwd / 'main / 'test / 'resources / 'examples / 'javac + val javacDestPath = TestUtil.getOutPath() / 'src + + os.makeDir.all(javacDestPath / os.up) + os.copy(javacSrcPath, javacDestPath) + + object Build extends TestUtil.BaseModule{ + def sourceRootPath = javacDestPath / 'src + def resourceRootPath = javacDestPath / 'resources + + // sourceRoot -> allSources -> classFiles + // | + // v + // resourceRoot ----> jar + def sourceRoot = T.sources{ sourceRootPath } + def resourceRoot = T.sources{ resourceRootPath } + def allSources = T{ sourceRoot().flatMap(p => os.walk(p.path)).map(PathRef(_)) } + def classFiles = T{ compileAll(allSources()) } + def jar = T{ Jvm.createJar(Loose.Agg(classFiles().path) ++ resourceRoot().map(_.path)) } + // Test createJar() with optional file filter. + def filterJar(fileFilter: (os.Path, os.RelPath) => Boolean) = T{ Jvm.createJar(Loose.Agg(classFiles().path) ++ resourceRoot().map(_.path), None, fileFilter) } + + def run(mainClsName: String) = T.command{ + os.proc('java, "-Duser.language=en", "-cp", classFiles().path, mainClsName).call() + } + } + + import Build._ + + var evaluator = new TestEvaluator(Build) + def eval[T](t: Task[T]) = { + evaluator.apply(t) + } + def check(targets: Agg[Task[_]], expected: Agg[Task[_]]) = { + evaluator.check(targets, expected) + } + + def append(path: os.Path, txt: String) = ammonite.ops.write.append(path, txt) + + + check( + targets = Agg(jar), + expected = Agg(allSources, classFiles, jar) + ) + + // Re-running with no changes results in nothing being evaluated + check(targets = Agg(jar), expected = Agg()) + // Appending an empty string gets ignored due to file-content hashing + append(sourceRootPath / "Foo.java", "") + check(targets = Agg(jar), expected = Agg()) + + // Appending whitespace forces a recompile, but the classfilesend up + // exactly the same so no re-jarring. + append(sourceRootPath / "Foo.java", " ") + // Note that `sourceRoot` and `resourceRoot` never turn up in the `expected` + // list, because they are `Source`s not `Target`s + check(targets = Agg(jar), expected = Agg(/*sourceRoot, */allSources, classFiles)) + + // Appending a new class changes the classfiles, which forces us to + // re-create the final jar + append(sourceRootPath / "Foo.java", "\nclass FooTwo{}") + check(targets = Agg(jar), expected = Agg(allSources, classFiles, jar)) + + // Tweaking the resources forces rebuild of the final jar, without + // recompiling classfiles + append(resourceRootPath / "hello.txt", " ") + check(targets = Agg(jar), expected = Agg(jar)) + + // You can swap evaluators halfway without any ill effects + evaluator = new TestEvaluator(Build) + + // Asking for an intermediate target forces things to be build up to that + // target only; these are re-used for any downstream targets requested + append(sourceRootPath / "Bar.java", "\nclass BarTwo{}") + append(resourceRootPath / "hello.txt", " ") + check(targets = Agg(classFiles), expected = Agg(allSources, classFiles)) + check(targets = Agg(jar), expected = Agg(jar)) + check(targets = Agg(allSources), expected = Agg()) + + append(sourceRootPath / "Bar.java", "\nclass BarThree{}") + append(resourceRootPath / "hello.txt", " ") + check(targets = Agg(resourceRoot), expected = Agg()) + check(targets = Agg(allSources), expected = Agg(allSources)) + check(targets = Agg(jar), expected = Agg(classFiles, jar)) + + val jarContents = os.proc('jar, "-tf", evaluator.outPath/'jar/'dest/"out.jar").call(evaluator.outPath).out.string + val expectedJarContents = + """META-INF/MANIFEST.MF + |test/Bar.class + |test/BarThree.class + |test/BarTwo.class + |test/Foo.class + |test/FooTwo.class + |hello.txt + |""".stripMargin + assert(jarContents.linesIterator.toSeq == expectedJarContents.linesIterator.toSeq) + + // Create the Jar again, but this time, filter out the Foo files. + def noFoos(s: String) = !s.contains("Foo") + val filterFunc = (p: os.Path, r: os.RelPath) => noFoos(r.last) + eval(filterJar(filterFunc)) + val filteredJarContents = os.proc('jar, "-tf", evaluator.outPath/'filterJar/'dest/"out.jar").call(evaluator.outPath).out.string + assert(filteredJarContents.linesIterator.toSeq == expectedJarContents.linesIterator.filter(noFoos(_)).toSeq) + + val executed = os.proc('java, "-cp", evaluator.outPath/'jar/'dest/"out.jar", "test.Foo").call(evaluator.outPath).out.string + assert(executed == (31337 + 271828) + System.lineSeparator) + + for(i <- 0 until 3){ + // Build.run is not cached, so every time we eval it it has to + // re-evaluate + val Right((runOutput, evalCount)) = eval(Build.run("test.Foo")) + assert( + runOutput.out.string == (31337 + 271828) + System.lineSeparator, + evalCount == 1 + ) + } + + val Left(Result.Exception(ex, _)) = eval(Build.run("test.BarFour")) + + assert(ex.getMessage.contains("Could not find or load main class")) + + append( + sourceRootPath / "Bar.java", + """ + class BarFour{ + public static void main(String[] args){ + System.out.println("New Cls!"); + } + } + """ + ) + val Right((runOutput2, evalCount2)) = eval(Build.run("test.BarFour")) + assert( + runOutput2.out.string == "New Cls!" + System.lineSeparator, + evalCount2 == 3 + ) + val Right((runOutput3, evalCount3)) = eval(Build.run("test.BarFour")) + assert( + runOutput3.out.string == "New Cls!" + System.lineSeparator, + evalCount3 == 1 + ) + } + } +} diff --git a/main/test/src/eval/ModuleTests.scala b/main/test/src/eval/ModuleTests.scala new file mode 100644 index 00000000..f28fc9b6 --- /dev/null +++ b/main/test/src/eval/ModuleTests.scala @@ -0,0 +1,45 @@ +package mill.eval + + +import mill.util.{TestEvaluator, TestUtil} +import mill.T +import mill.define.Discover + +import utest._ + +object ModuleTests extends TestSuite{ + object ExternalModule extends mill.define.ExternalModule { + def x = T{13} + object inner extends mill.Module{ + def y = T{17} + } + lazy val millDiscover = Discover[this.type] + } + object Build extends TestUtil.BaseModule{ + def z = T{ ExternalModule.x() + ExternalModule.inner.y() } + } + val tests = Tests { + os.remove.all(TestEvaluator.externalOutPath) + 'externalModuleTargetsAreNamespacedByModulePackagePath - { + val check = new TestEvaluator(Build) + val zresult = check.apply(Build.z) + assert( + zresult == Right((30, 1)), + os.read(check.evaluator.outPath / 'z / "meta.json").contains("30"), + os.read(TestEvaluator.externalOutPath / 'mill / 'eval / 'ModuleTests / 'ExternalModule / 'x / "meta.json").contains("13"), + os.read(TestEvaluator.externalOutPath / 'mill / 'eval / 'ModuleTests / 'ExternalModule / 'inner / 'y / "meta.json").contains("17") + ) + } + 'externalModuleMustBeGlobalStatic - { + + + object Build extends mill.define.ExternalModule { + + def z = T{ ExternalModule.x() + ExternalModule.inner.y() } + lazy val millDiscover = Discover[this.type] + } + + intercept[java.lang.AssertionError]{ Build } + } + } +} diff --git a/main/test/src/eval/TarjanTests.scala b/main/test/src/eval/TarjanTests.scala new file mode 100644 index 00000000..2f9d0a4d --- /dev/null +++ b/main/test/src/eval/TarjanTests.scala @@ -0,0 +1,91 @@ +package mill.eval + +import utest._ + +object TarjanTests extends TestSuite{ + def check(input: Seq[Seq[Int]], expected: Seq[Seq[Int]]) = { + val result = Tarjans(input).map(_.sorted) + val sortedExpected = expected.map(_.sorted) + assert(result == sortedExpected) + } + val tests = Tests{ + // + 'empty - check(Seq(), Seq()) + + // (0) + 'singleton - check(Seq(Seq()), Seq(Seq(0))) + + + // (0)-. + // ^._/ + 'selfCycle - check(Seq(Seq(0)), Seq(Seq(0))) + + // (0) <-> (1) + 'simpleCycle- check(Seq(Seq(1), Seq(0)), Seq(Seq(1, 0))) + + // (0) (1) (2) + 'multipleSingletons - check( + Seq(Seq(), Seq(), Seq()), + Seq(Seq(0), Seq(1), Seq(2)) + ) + + // (0) -> (1) -> (2) + 'straightLineNoCycles- check( + Seq(Seq(1), Seq(2), Seq()), + Seq(Seq(2), Seq(1), Seq(0)) + ) + + // (0) <- (1) <- (2) + 'straightLineNoCyclesReversed- check( + Seq(Seq(), Seq(0), Seq(1)), + Seq(Seq(0), Seq(1), Seq(2)) + ) + + // (0) <-> (1) (2) -> (3) -> (4) + // ^.____________/ + 'independentSimpleCycles - check( + Seq(Seq(1), Seq(0), Seq(3), Seq(4), Seq(2)), + Seq(Seq(1, 0), Seq(4, 3, 2)) + ) + + // ___________________ + // v \ + // (0) <-> (1) (2) -> (3) -> (4) + // ^.____________/ + 'independentLinkedCycles - check( + Seq(Seq(1), Seq(0), Seq(3), Seq(4), Seq(2, 1)), + Seq(Seq(1, 0), Seq(4, 3, 2)) + ) + // _____________ + // / v + // (0) <-> (1) (2) -> (3) -> (4) + // ^.____________/ + 'independentLinkedCycles2 - check( + Seq(Seq(1, 2), Seq(0), Seq(3), Seq(4), Seq(2)), + Seq(Seq(4, 3, 2), Seq(1, 0)) + ) + + // _____________ + // / v + // (0) <-> (1) (2) -> (3) -> (4) + // ^. ^.____________/ + // \________________/ + 'combinedCycles - check( + Seq(Seq(1, 2), Seq(0), Seq(3), Seq(4), Seq(2, 1)), + Seq(Seq(4, 3, 2, 1, 0)) + ) + // + // (0) <-> (1) <- (2) <- (3) <-> (4) <- (5) + // ^.____________/ / / + // / / + // (6) <- (7) <-/ (8) <-' + // / / + // v / + // (9) <--------' + 'combinedCycles - check( + Seq(Seq(1), Seq(0), Seq(0, 1), Seq(2, 4, 7, 9), Seq(3), Seq(4, 8), Seq(9), Seq(6), Seq(), Seq()), + Seq(Seq(0, 1), Seq(2), Seq(9), Seq(6), Seq(7), Seq(3, 4), Seq(8), Seq(5)) + ) + + } +} \ No newline at end of file diff --git a/main/test/src/eval/TaskTests.scala b/main/test/src/eval/TaskTests.scala new file mode 100644 index 00000000..0bfd8efc --- /dev/null +++ b/main/test/src/eval/TaskTests.scala @@ -0,0 +1,95 @@ +package mill.eval + +import utest._ + +import mill.T + +import mill.util.TestEvaluator +object TaskTests extends TestSuite{ + val tests = Tests{ + object build extends mill.util.TestUtil.BaseModule{ + var count = 0 + // Explicitly instantiate `Function1` objects to make sure we get + // different instances each time + def staticWorker = T.worker{ + new Function1[Int, Int] { + def apply(v1: Int) = v1 + 1 + } + } + def noisyWorker = T.worker{ + new Function1[Int, Int] { + def apply(v1: Int) = input() + 1 + } + } + def input = T.input{ + count += 1 + count + } + def task = T.task{ + count += 1 + count + } + def taskInput = T{ input() } + def taskNoInput = T{ task() } + + def persistent = T.persistent{ + input() // force re-computation + os.makeDir.all(T.ctx().dest) + os.write.append(T.ctx().dest/'count, "hello\n") + os.read.lines(T.ctx().dest/'count).length + } + def nonPersistent = T{ + input() // force re-computation + os.makeDir.all(T.ctx().dest) + os.write.append(T.ctx().dest/'count, "hello\n") + os.read.lines(T.ctx().dest/'count).length + } + + def staticWorkerDownstream = T{ + staticWorker().apply(1) + } + def noisyWorkerDownstream = T{ + noisyWorker().apply(1) + } + } + + 'inputs - { + // Inputs always re-evaluate, including forcing downstream cached Targets + // to re-evaluate, but normal Tasks behind a Target run once then are cached + val check = new TestEvaluator(build) + + val Right((1, 1)) = check.apply(build.taskInput) + val Right((2, 1)) = check.apply(build.taskInput) + val Right((3, 1)) = check.apply(build.taskInput) + + val Right((4, 1)) = check.apply(build.taskNoInput) + val Right((4, 0)) = check.apply(build.taskNoInput) + val Right((4, 0)) = check.apply(build.taskNoInput) + } + + 'persistent - { + // Persistent tasks keep the working dir around between runs + val check = new TestEvaluator(build) + val Right((1, 1)) = check.apply(build.persistent) + val Right((2, 1)) = check.apply(build.persistent) + val Right((3, 1)) = check.apply(build.persistent) + + val Right((1, 1)) = check.apply(build.nonPersistent) + val Right((1, 1)) = check.apply(build.nonPersistent) + val Right((1, 1)) = check.apply(build.nonPersistent) + } + + 'worker - { + // Persistent task + def check = new TestEvaluator(build) + + val Right((2, 1)) = check.apply(build.noisyWorkerDownstream) + val Right((3, 1)) = check.apply(build.noisyWorkerDownstream) + val Right((4, 1)) = check.apply(build.noisyWorkerDownstream) + + val Right((2, 1)) = check.apply(build.staticWorkerDownstream) + val Right((2, 0)) = check.apply(build.staticWorkerDownstream) + val Right((2, 0)) = check.apply(build.staticWorkerDownstream) + } + } +} diff --git a/main/test/src/main/ClientServerTests.scala b/main/test/src/main/ClientServerTests.scala new file mode 100644 index 00000000..05238a5f --- /dev/null +++ b/main/test/src/main/ClientServerTests.scala @@ -0,0 +1,214 @@ +package mill.main +import java.io._ + +import mill.main.client.{Util, Locks} + +import scala.collection.JavaConverters._ +import utest._ +class EchoServer extends MillServerMain[Int]{ + def main0(args: Array[String], + stateCache: Option[Int], + mainInteractive: Boolean, + stdin: InputStream, + stdout: PrintStream, + stderr: PrintStream, + env: Map[String, String], + setIdle: Boolean => Unit) = { + + val reader = new BufferedReader(new InputStreamReader(stdin)) + val str = reader.readLine() + if (args.nonEmpty){ + stdout.println(str + args(0)) + } + env.toSeq.sortBy(_._1).foreach{ + case (key, value) => stdout.println(s"$key=$value") + } + stdout.flush() + if (args.nonEmpty){ + stderr.println(str.toUpperCase + args(0)) + } + stderr.flush() + (true, None) + } +} + +object ClientServerTests extends TestSuite{ + def initStreams() = { + val in = new ByteArrayInputStream("hello\n".getBytes()) + val out = new ByteArrayOutputStream() + val err = new ByteArrayOutputStream() + (in, out, err) + } + def init() = { + val tmpDir = java.nio.file.Files.createTempDirectory("") + val locks = Locks.memory() + + (tmpDir, locks) + } + + def spawnEchoServer(tmpDir : java.nio.file.Path, locks: Locks): Unit = { + new Thread(() => new Server( + tmpDir.toString, + new EchoServer(), + () => (), + 1000, + locks + ).run()).start() + } + + def runClientAux(tmpDir : java.nio.file.Path, locks: Locks) + (env : Map[String, String], args: Array[String]) = { + val (in, out, err) = initStreams() + Server.lockBlock(locks.clientLock){ + mill.main.client.MillClientMain.run( + tmpDir.toString, + () => spawnEchoServer(tmpDir, locks), + locks, + in, + out, + err, + args, + env.asJava + ) + Thread.sleep(100) + (new String(out.toByteArray), new String(err.toByteArray)) + } + } + + def tests = Tests{ + 'hello - { + if (!Util.isWindows){ + val (tmpDir, locks) = init() + def runClient(s: String) = runClientAux(tmpDir, locks)(Map.empty, Array(s)) + + // Make sure the simple "have the client start a server and + // exchange one message" workflow works from end to end. + + assert( + locks.clientLock.probe(), + locks.serverLock.probe(), + locks.processLock.probe() + ) + + val (out1, err1) = runClient("world") + + assert( + out1 == "helloworld\n", + err1 == "HELLOworld\n" + ) + + // Give a bit of time for the server to release the lock and + // re-acquire it to signal to the client that it's done + Thread.sleep(100) + + assert( + locks.clientLock.probe(), + !locks.serverLock.probe(), + !locks.processLock.probe() + ) + + // A seecond client in sequence connect to the same server + val (out2, err2) = runClient(" WORLD") + + assert( + out2 == "hello WORLD\n", + err2 == "HELLO WORLD\n" + ) + + // Make sure the server times out of not used for a while + Thread.sleep(2000) + assert( + locks.clientLock.probe(), + locks.serverLock.probe(), + locks.processLock.probe() + ) + + // Have a third client spawn/connect-to a new server at the same path + val (out3, err3) = runClient(" World") + assert( + out3 == "hello World\n", + err3 == "HELLO World\n" + ) + } + + 'envVars - { + if (!Util.isWindows){ + val (tmpDir, locks) = init() + + def runClient(env : Map[String, String]) = runClientAux(tmpDir, locks)(env, Array()) + + // Make sure the simple "have the client start a server and + // exchange one message" workflow works from end to end. + + assert( + locks.clientLock.probe(), + locks.serverLock.probe(), + locks.processLock.probe() + ) + + def longString(s : String) = Array.fill(1000)(s).mkString + val b1000 = longString("b") + val c1000 = longString("c") + val a1000 = longString("a") + + val env = Map( + "a" -> a1000, + "b" -> b1000, + "c" -> c1000 + ) + + + val (out1, err1) = runClient(env) + val expected = s"a=$a1000\nb=$b1000\nc=$c1000\n" + + assert( + out1 == expected, + err1 == "" + ) + + // Give a bit of time for the server to release the lock and + // re-acquire it to signal to the client that it's done + Thread.sleep(100) + + assert( + locks.clientLock.probe(), + !locks.serverLock.probe(), + !locks.processLock.probe() + ) + + val path = List( + "/Users/foo/Library/Haskell/bin", + "/usr/local/git/bin", + "/sw/bin/", + "/usr/local/bin", + "/usr/local/", + "/usr/local/sbin", + "/usr/local/mysql/bin", + "/usr/local/bin", + "/usr/bin", + "/bin", + "/usr/sbin", + "/sbin", + "/opt/X11/bin", + "/usr/local/MacGPG2/bin", + "/Library/TeX/texbin", + "/usr/local/bin/", + "/Users/foo/bin", + "/Users/foo/go/bin", + "~/.bloop" + ) + + val pathEnvVar = path.mkString(":") + val (out2, err2) = runClient(Map("PATH" -> pathEnvVar)) + + val expected2 = s"PATH=$pathEnvVar\n" + + assert( + out2 == expected2, + err2 == "" + ) + } + } + } + } +} diff --git a/main/test/src/main/ForeignBuildsTest.scala b/main/test/src/main/ForeignBuildsTest.scala new file mode 100644 index 00000000..cfc8d00c --- /dev/null +++ b/main/test/src/main/ForeignBuildsTest.scala @@ -0,0 +1,30 @@ +package mill.main + +import mill.util.ScriptTestSuite +import utest._ + +object ForeignBuildsTest extends ScriptTestSuite(fork = false) { + def workspaceSlug = "foreign-builds" + def scriptSourcePath = + os.pwd / 'main / 'test / 'resources / 'examples / 'foreign + override def buildPath = os.rel / 'project / "build.sc" + + val tests = Tests { + initWorkspace() + 'test - { + // See https://github.com/lihaoyi/mill/issues/302 + if (!ammonite.util.Util.java9OrAbove) { + assert( + eval("checkProjectPaths"), + eval("checkInnerPaths"), + eval("checkOuterPaths"), + eval("checkOuterInnerPaths"), + eval("checkProjectDests"), + eval("checkInnerDests"), + eval("checkOuterDests"), + eval("checkOuterInnerDests") + ) + } + } + } +} diff --git a/main/test/src/main/ForeignConflictTest.scala b/main/test/src/main/ForeignConflictTest.scala new file mode 100644 index 00000000..a4352bb6 --- /dev/null +++ b/main/test/src/main/ForeignConflictTest.scala @@ -0,0 +1,25 @@ +package mill.main + + +import mill.util.ScriptTestSuite +import utest._ + +object ForeignConflictTest extends ScriptTestSuite(fork = false) { + def workspaceSlug = "foreign-conflict" + def scriptSourcePath = + os.pwd / 'main / 'test / 'resources / 'examples / 'foreign + override def buildPath = os.rel / 'conflict / "build.sc" + + val tests = Tests { + initWorkspace() + 'test - { + // see https://github.com/lihaoyi/mill/issues/302 + if (!ammonite.util.Util.java9OrAbove) { + assert( + eval("checkPaths"), + eval("checkDests") + ) + } + } + } +} diff --git a/main/test/src/main/JavaCompileJarTests.scala b/main/test/src/main/JavaCompileJarTests.scala new file mode 100644 index 00000000..37c64b05 --- /dev/null +++ b/main/test/src/main/JavaCompileJarTests.scala @@ -0,0 +1,67 @@ +package mill.main + +import mill.util.ScriptTestSuite +import utest._ + +object JavaCompileJarTests extends ScriptTestSuite(fork = false) { + def workspaceSlug = "java-compile-jar" + def scriptSourcePath = os.pwd / 'main / 'test / 'resources / 'examples / 'javac + val tests = Tests{ + initWorkspace() + 'test - { + if (!ammonite.util.Util.java9OrAbove) { + // Basic target evaluation works + assert(eval("classFiles")) + assert(eval("jar")) + + val classFiles1 = meta("classFiles") + val jar1 = meta("jar") + + assert(eval("classFiles")) + assert(eval("jar")) + + // Repeated evaluation has the same results + val classFiles2 = meta("classFiles") + val jar2 = meta("jar") + + assert( + jar1 == jar2, + classFiles1 == classFiles2 + ) + + // If we update resources, classFiles are unchanged but jar changes + for(scalaFile <- os.walk(workspacePath).filter(_.ext == "txt")){ + os.write.append(scalaFile, "\n") + } + + assert(eval("classFiles")) + assert(eval("jar")) + + val classFiles3 = meta("classFiles") + val jar3 = meta("jar") + + assert( + jar2 != jar3, + classFiles2 == classFiles3 + ) + + // We can intentionally break the code, have the targets break, then + // fix the code and have them recover. + for(scalaFile <- os.walk(workspacePath).filter(_.ext == "java")){ + os.write.append(scalaFile, "\n}") + } + + assert(!eval("classFiles")) + assert(!eval("jar")) + + for(scalaFile <- os.walk(workspacePath).filter(_.ext == "java")){ + os.write.over(scalaFile, os.read(scalaFile).dropRight(2)) + } + + assert(eval("classFiles")) + assert(eval("jar")) + } + } + } +} + diff --git a/main/test/src/main/MainTests.scala b/main/test/src/main/MainTests.scala new file mode 100644 index 00000000..e836099c --- /dev/null +++ b/main/test/src/main/MainTests.scala @@ -0,0 +1,272 @@ +package mill.main + +import mill.define.{Discover, Segment, Task} +import mill.util.TestGraphs._ + +import utest._ +object MainTests extends TestSuite{ + + def check[T <: mill.define.BaseModule](module: T)( + selectorString: String, + expected0: Either[String, Seq[T => Task[_]]])= { + + val expected = expected0.map(_.map(_(module))) + val resolved = for{ + selectors <- mill.util.ParseArgs(Seq(selectorString), multiSelect = false).map(_._1.head) + val crossSelectors = selectors._2.value.map{case Segment.Cross(x) => x.toList.map(_.toString) case _ => Nil} + task <- mill.main.ResolveTasks.resolve( + selectors._2.value.toList, module, module.millDiscover, Nil, crossSelectors.toList, Nil + ) + } yield task + assert(resolved == expected) + } + val tests = Tests{ + val graphs = new mill.util.TestGraphs() + import graphs._ + 'single - { + val check = MainTests.check(singleton) _ + 'pos - check("single", Right(Seq(_.single))) + 'neg1 - check("sngle", Left("Cannot resolve sngle. Did you mean single?")) + 'neg2 - check("snigle", Left("Cannot resolve snigle. Did you mean single?")) + 'neg3 - check("nsiigle", Left("Cannot resolve nsiigle. Did you mean single?")) + 'neg4 - check("ansiigle", Left("Cannot resolve ansiigle. Try `mill resolve _` to see what's available.")) + 'neg5 - check("doesntExist", Left("Cannot resolve doesntExist. Try `mill resolve _` to see what's available.")) + 'neg6 - check("single.doesntExist", Left("Task single is not a module and has no children.")) + 'neg7 - check("", Left("Selector cannot be empty")) + } + 'backtickIdentifiers - { + val check = MainTests.check(bactickIdentifiers) _ + 'pos1 - check("up-target", Right(Seq(_.`up-target`))) + 'pos2 - check("a-down-target", Right(Seq(_.`a-down-target`))) + 'neg1 - check("uptarget", Left("Cannot resolve uptarget. Did you mean up-target?")) + 'neg2 - check("upt-arget", Left("Cannot resolve upt-arget. Did you mean up-target?")) + 'neg3 - check("up-target.doesntExist", Left("Task up-target is not a module and has no children.")) + 'neg4 - check("", Left("Selector cannot be empty")) + 'neg5 - check("invisible&", Left("Cannot resolve invisible. Try `mill resolve _` to see what's available.")) + 'nested - { + 'pos - check("nested-module.nested-target", Right(Seq(_.`nested-module`.`nested-target`))) + 'neg - check("nested-module.doesntExist", Left("Cannot resolve nested-module.doesntExist. Try `mill resolve nested-module._` to see what's available.")) + } + } + 'nested - { + val check = MainTests.check(nestedModule) _ + 'pos1 - check("single", Right(Seq(_.single))) + 'pos2 - check("nested.single", Right(Seq(_.nested.single))) + 'pos3 - check("classInstance.single", Right(Seq(_.classInstance.single))) + 'neg1 - check( + "doesntExist", + Left("Cannot resolve doesntExist. Try `mill resolve _` to see what's available.") + ) + 'neg2 - check( + "single.doesntExist", + Left("Task single is not a module and has no children.") + ) + 'neg3 - check( + "nested.doesntExist", + Left("Cannot resolve nested.doesntExist. Try `mill resolve nested._` to see what's available.") + ) + 'neg3 - check( + "nested.singel", + Left("Cannot resolve nested.singel. Did you mean nested.single?") + ) + 'neg4 - check( + "classInstance.doesntExist", + Left("Cannot resolve classInstance.doesntExist. Try `mill resolve classInstance._` to see what's available.") + ) + 'wildcard - check( + "_.single", + Right(Seq( + _.classInstance.single, + _.nested.single + )) + ) + 'wildcardNeg - check( + "_._.single", + Left("Cannot resolve _._.single. Try `mill resolve _` to see what's available") + ) + 'wildcardNeg2 - check( + "_._.__", + Left("Cannot resolve _._.__. Try `mill resolve _` to see what's available") + ) + 'wildcardNeg3 - check( + "nested._.foobar", + Left("Cannot resolve nested._.foobar. Try `mill resolve nested._` to see what's available") + ) + 'wildcard2 - check( + "__.single", + Right(Seq( + _.single, + _.classInstance.single, + _.nested.single + )) + ) + + 'wildcard3 - check( + "_.__.single", + Right(Seq( + _.classInstance.single, + _.nested.single + )) + ) + + } + 'cross - { + 'single - { + val check = MainTests.check(singleCross) _ + 'pos1 - check("cross[210].suffix", Right(Seq(_.cross("210").suffix))) + 'pos2 - check("cross[211].suffix", Right(Seq(_.cross("211").suffix))) + 'neg1 - check( + "cross[210].doesntExist", + Left("Cannot resolve cross[210].doesntExist. Try `mill resolve cross[210]._` to see what's available.") + ) + 'neg2 - check( + "cross[doesntExist].doesntExist", + Left("Cannot resolve cross[doesntExist]. Try `mill resolve cross[__]` to see what's available.") + ) + 'neg3 - check( + "cross[221].doesntExist", + Left("Cannot resolve cross[221]. Did you mean cross[211]?") + ) + 'neg4 - check( + "cross[doesntExist].suffix", + Left("Cannot resolve cross[doesntExist]. Try `mill resolve cross[__]` to see what's available.") + ) + 'wildcard - check( + "cross[_].suffix", + Right(Seq( + _.cross("210").suffix, + _.cross("211").suffix, + _.cross("212").suffix + )) + ) + 'wildcard2 - check( + "cross[__].suffix", + Right(Seq( + _.cross("210").suffix, + _.cross("211").suffix, + _.cross("212").suffix + )) + ) + } + 'double - { + val check = MainTests.check(doubleCross) _ + 'pos1 - check( + "cross[210,jvm].suffix", + Right(Seq(_.cross("210", "jvm").suffix)) + ) + 'pos2 - check( + "cross[211,jvm].suffix", + Right(Seq(_.cross("211", "jvm").suffix)) + ) + 'wildcard - { + 'labelNeg - check( + "_.suffix", + Left("Cannot resolve _.suffix. Try `mill resolve _._` to see what's available.") + ) + 'labelPos - check( + "__.suffix", + Right(Seq( + _.cross("210", "jvm").suffix, + _.cross("210", "js").suffix, + + _.cross("211", "jvm").suffix, + _.cross("211", "js").suffix, + + _.cross("212", "jvm").suffix, + _.cross("212", "js").suffix, + _.cross("212", "native").suffix + )) + ) + 'first - check( + "cross[_,jvm].suffix", + Right(Seq( + _.cross("210", "jvm").suffix, + _.cross("211", "jvm").suffix, + _.cross("212", "jvm").suffix + )) + ) + 'second - check( + "cross[210,_].suffix", + Right(Seq( + _.cross("210", "jvm").suffix, + _.cross("210", "js").suffix + )) + ) + 'both - check( + "cross[_,_].suffix", + Right(Seq( + _.cross("210", "jvm").suffix, + _.cross("210", "js").suffix, + + _.cross("211", "jvm").suffix, + _.cross("211", "js").suffix, + + _.cross("212", "jvm").suffix, + _.cross("212", "js").suffix, + _.cross("212", "native").suffix + )) + ) + 'both2 - check( + "cross[__].suffix", + Right(Seq( + _.cross("210", "jvm").suffix, + _.cross("210", "js").suffix, + + _.cross("211", "jvm").suffix, + _.cross("211", "js").suffix, + + _.cross("212", "jvm").suffix, + _.cross("212", "js").suffix, + _.cross("212", "native").suffix + )) + ) + } + } + 'nested - { + val check = MainTests.check(nestedCrosses) _ + 'pos1 - check( + "cross[210].cross2[js].suffix", + Right(Seq(_.cross("210").cross2("js").suffix)) + ) + 'pos2 - check( + "cross[211].cross2[jvm].suffix", + Right(Seq(_.cross("211").cross2("jvm").suffix)) + ) + 'wildcard - { + 'first - check( + "cross[_].cross2[jvm].suffix", + Right(Seq( + _.cross("210").cross2("jvm").suffix, + _.cross("211").cross2("jvm").suffix, + _.cross("212").cross2("jvm").suffix + )) + ) + 'second - check( + "cross[210].cross2[_].suffix", + Right(Seq( + _.cross("210").cross2("jvm").suffix, + _.cross("210").cross2("js").suffix, + _.cross("210").cross2("native").suffix + )) + ) + 'both - check( + "cross[_].cross2[_].suffix", + Right(Seq( + _.cross("210").cross2("jvm").suffix, + _.cross("210").cross2("js").suffix, + _.cross("210").cross2("native").suffix, + + _.cross("211").cross2("jvm").suffix, + _.cross("211").cross2("js").suffix, + _.cross("211").cross2("native").suffix, + + _.cross("212").cross2("jvm").suffix, + _.cross("212").cross2("js").suffix, + _.cross("212").cross2("native").suffix + )) + ) + } + } + } + } +} diff --git a/main/test/src/mill/TestMain.scala b/main/test/src/mill/TestMain.scala deleted file mode 100644 index 80e7e627..00000000 --- a/main/test/src/mill/TestMain.scala +++ /dev/null @@ -1,6 +0,0 @@ -package mill - -object TestMain { - def main(args: Array[String]): Unit = { - } -} diff --git a/main/test/src/mill/UTestFramework.scala b/main/test/src/mill/UTestFramework.scala deleted file mode 100644 index c234151b..00000000 --- a/main/test/src/mill/UTestFramework.scala +++ /dev/null @@ -1,11 +0,0 @@ -package mill - -class UTestFramework extends utest.runner.Framework { - override def exceptionStackFrameHighlighter(s: StackTraceElement) = { - s.getClassName.startsWith("mill.") - } - override def setup() = { - - os.remove.all(os.pwd / 'target / 'workspace) - } -} diff --git a/main/test/src/mill/define/ApplicativeTests.scala b/main/test/src/mill/define/ApplicativeTests.scala deleted file mode 100644 index 9dd2132f..00000000 --- a/main/test/src/mill/define/ApplicativeTests.scala +++ /dev/null @@ -1,125 +0,0 @@ -package mill.define - -import mill.api.Ctx.ImplicitStub -import utest._ - -import scala.annotation.compileTimeOnly -import scala.language.experimental.macros - - -object ApplicativeTests extends TestSuite { - implicit def optionToOpt[T](o: Option[T]): Opt[T] = new Opt(o) - class Opt[T](val self: Option[T]) extends Applicative.Applyable[Option, T] - object Opt extends OptGenerated with Applicative.Applyer[Opt, Option, Applicative.Id, String]{ - - val injectedCtx = "helloooo" - def underlying[A](v: Opt[A]) = v.self - def apply[T](t: T): Option[T] = macro Applicative.impl[Option, T, String] - - def mapCtx[A, B](a: Option[A])(f: (A, String) => B): Option[B] = a.map(f(_, injectedCtx)) - def zip() = Some(()) - def zip[A](a: Option[A]) = a.map(Tuple1(_)) - } - class Counter{ - var value = 0 - def apply() = { - value += 1 - value - } - } - @compileTimeOnly("Target.ctx() can only be used with a T{...} block") - @ImplicitStub - implicit def taskCtx: String = ??? - - val tests = Tests{ - - 'selfContained - { - - 'simple - assert(Opt("lol " + 1) == Some("lol 1")) - 'singleSome - assert(Opt("lol " + Some("hello")()) == Some("lol hello")) - 'twoSomes - assert(Opt(Some("lol ")() + Some("hello")()) == Some("lol hello")) - 'singleNone - assert(Opt("lol " + None()) == None) - 'twoNones - assert(Opt("lol " + None() + None()) == None) - } - 'context - { - assert(Opt(Opt.ctx() + Some("World")()) == Some("hellooooWorld")) - } - 'capturing - { - val lol = "lol " - def hell(o: String) = "hell" + o - 'simple - assert(Opt(lol + 1) == Some("lol 1")) - 'singleSome - assert(Opt(lol + Some(hell("o"))()) == Some("lol hello")) - 'twoSomes - assert(Opt(Some(lol)() + Some(hell("o"))()) == Some("lol hello")) - 'singleNone - assert(Opt(lol + None()) == None) - 'twoNones - assert(Opt(lol + None() + None()) == None) - } - 'allowedLocalDef - { - // Although x is defined inside the Opt{...} block, it is also defined - // within the LHS of the Applyable#apply call, so it is safe to life it - // out into the `zipMap` arguments list. - val res = Opt{ "lol " + Some("hello").flatMap(x => Some(x)).apply() } - assert(res == Some("lol hello")) - } - 'upstreamAlwaysEvaluated - { - // Whether or not control-flow reaches the Applyable#apply call inside an - // Opt{...} block, we always evaluate the LHS of the Applyable#apply - // because it gets lifted out of any control flow statements - val counter = new Counter() - def up = Opt{ "lol " + counter() } - val down = Opt{ if ("lol".length > 10) up() else "fail" } - assert( - down == Some("fail"), - counter.value == 1 - ) - } - 'upstreamEvaluatedOnlyOnce - { - // Even if control-flow reaches the Applyable#apply call more than once, - // it only gets evaluated once due to its lifting out of the Opt{...} block - val counter = new Counter() - def up = Opt{ "lol " + counter() } - def runTwice[T](t: => T) = (t, t) - val down = Opt{ runTwice(up()) } - assert( - down == Some(("lol 1", "lol 1")), - counter.value == 1 - ) - } - 'evaluationsInsideLambdasWork - { - // This required some fiddling with owner chains inside the macro to get - // working, so ensure it doesn't regress - val counter = new Counter() - def up = Opt{ "hello" + counter() } - val down1 = Opt{ (() => up())() } - val down2 = Opt{ Seq(1, 2, 3).map(n => up() * n) } - assert( - down1 == Some("hello1"), - down2 == Some(Seq("hello2", "hello2hello2", "hello2hello2hello2")) - ) - } - 'appliesEvaluatedOncePerLexicalCallsite - { - // If you have multiple Applyable#apply() lexically in the source code of - // your Opt{...} call, each one gets evaluated once, even if the LHS of each - // apply() call is identical. It's up to the downstream zipMap() - // implementation to decide if it wants to dedup them or do other things. - val counter = new Counter() - def up = Opt{ "hello" + counter() } - val down = Opt{ Seq(1, 2, 3).map(n => n + up() + up()) } - assert(down == Some(Seq("1hello1hello2", "2hello1hello2", "3hello1hello2"))) - } - 'appliesEvaluateBeforehand - { - // Every Applyable#apply() within a Opt{...} block evaluates before any - // other logic within that block, even if they would happen first in the - // normal Scala evaluation order - val counter = new Counter() - def up = Opt{ counter() } - val down = Opt{ - val res = counter() - val one = up() - val two = up() - val three = up() - (res, one, two, three) - } - assert(down == Some((4, 1, 2, 3))) - } - } -} diff --git a/main/test/src/mill/define/BasePathTests.scala b/main/test/src/mill/define/BasePathTests.scala deleted file mode 100644 index b8a653c8..00000000 --- a/main/test/src/mill/define/BasePathTests.scala +++ /dev/null @@ -1,73 +0,0 @@ -package mill.define - -import mill.util.{TestGraphs, TestUtil} -import utest._ -import mill.{Module, T} -object BasePathTests extends TestSuite{ - val testGraphs = new TestGraphs - val tests = Tests{ - def check[T <: Module](m: T)(f: T => Module, segments: String*) = { - val remaining = f(m).millSourcePath.relativeTo(m.millSourcePath).segments - assert(remaining == segments) - } - 'singleton - { - check(testGraphs.singleton)(identity) - } - 'backtickIdentifiers - { - check(testGraphs.bactickIdentifiers)( - _.`nested-module`, - "nested-module" - ) - } - 'separateGroups - { - check(TestGraphs.triangleTask)(identity) - } - 'TraitWithModuleObject - { - check(TestGraphs.TraitWithModuleObject)( - _.TraitModule, - "TraitModule" - ) - } - 'nestedModuleNested - { - check(TestGraphs.nestedModule)(_.nested, "nested") - } - 'nestedModuleInstance - { - check(TestGraphs.nestedModule)(_.classInstance, "classInstance") - } - 'singleCross - { - check(TestGraphs.singleCross)(_.cross, "cross") - check(TestGraphs.singleCross)(_.cross("210"), "cross", "210") - check(TestGraphs.singleCross)(_.cross("211"), "cross", "211") - } - 'doubleCross - { - check(TestGraphs.doubleCross)(_.cross, "cross") - check(TestGraphs.doubleCross)(_.cross("210", "jvm"), "cross", "210", "jvm") - check(TestGraphs.doubleCross)(_.cross("212", "js"), "cross", "212", "js") - } - 'nestedCrosses - { - check(TestGraphs.nestedCrosses)(_.cross, "cross") - check(TestGraphs.nestedCrosses)( - _.cross("210").cross2("js"), - "cross", "210", "cross2", "js" - ) - } - 'overriden - { - object overridenBasePath extends TestUtil.BaseModule { - override def millSourcePath = os.pwd / 'overridenBasePathRootValue - object nested extends Module{ - override def millSourcePath = super.millSourcePath / 'overridenBasePathNested - object nested extends Module{ - override def millSourcePath = super.millSourcePath / 'overridenBasePathDoubleNested - } - } - } - assert( - overridenBasePath.millSourcePath == os.pwd / 'overridenBasePathRootValue, - overridenBasePath.nested.millSourcePath == os.pwd / 'overridenBasePathRootValue / 'nested / 'overridenBasePathNested, - overridenBasePath.nested.nested.millSourcePath == os.pwd / 'overridenBasePathRootValue / 'nested / 'overridenBasePathNested / 'nested / 'overridenBasePathDoubleNested - ) - } - - } -} - diff --git a/main/test/src/mill/define/CacherTests.scala b/main/test/src/mill/define/CacherTests.scala deleted file mode 100644 index 59ebf3f6..00000000 --- a/main/test/src/mill/define/CacherTests.scala +++ /dev/null @@ -1,75 +0,0 @@ -package mill.define - -import mill.util.{DummyLogger, TestEvaluator, TestUtil} -import mill.util.Strict.Agg -import mill.T -import mill.api.Result.Success -import utest._ -import utest.framework.TestPath - - -object CacherTests extends TestSuite{ - object Base extends Base - trait Base extends TestUtil.BaseModule{ - def value = T{ 1 } - def result = T{ Success(1) } - } - object Middle extends Middle - trait Middle extends Base{ - override def value = T{ super.value() + 2} - def overriden = T{ super.value()} - } - object Terminal extends Terminal - trait Terminal extends Middle{ - override def value = T{ super.value() + 4} - } - - val tests = Tests{ - def eval[T <: TestUtil.BaseModule, V](mapping: T, v: Task[V]) - (implicit tp: TestPath) = { - val evaluator = new TestEvaluator(mapping) - evaluator(v).right.get._1 - } - def check(x: Any, y: Any) = assert(x == y) - - 'simpleDefIsCached - { - Predef.assert(Base.value eq Base.value) - Predef.assert(eval(Base, Base.value) == 1) - } - - 'resultDefIsCached - { - Predef.assert(Base.result eq Base.result) - Predef.assert(eval(Base, Base.result) == 1) - } - - - 'overridingDefIsAlsoCached - { - Predef.assert(eval(Middle, Middle.value) == 3) - Predef.assert(Middle.value eq Middle.value) - } - - 'overridenDefRemainsAvailable - { - Predef.assert(eval(Middle, Middle.overriden) == 1) - } - - - 'multipleOverridesWork- { - Predef.assert(eval(Terminal, Terminal.value) == 7) - Predef.assert(eval(Terminal, Terminal.overriden) == 1) - } - // Doesn't fail, presumably compileError doesn't go far enough in the - // compilation pipeline to hit the override checks - // - // 'overrideOutsideModuleFails - { - // compileError(""" - // trait Foo{ - // def x = 1 - // } - // object Bar extends Foo{ - // def x = 2 - // } - // """) - // } - } -} - diff --git a/main/test/src/mill/define/DiscoverTests.scala b/main/test/src/mill/define/DiscoverTests.scala deleted file mode 100644 index 248d6afe..00000000 --- a/main/test/src/mill/define/DiscoverTests.scala +++ /dev/null @@ -1,63 +0,0 @@ -package mill.define - -import mill.util.TestGraphs -import utest._ - -object DiscoverTests extends TestSuite{ - val testGraphs = new TestGraphs - val tests = Tests{ - def check[T <: Module](m: T)(targets: (T => Target[_])*) = { - val discovered = m.millInternal.targets - val expected = targets.map(_(m)).toSet - assert(discovered == expected) - } - 'singleton - { - check(testGraphs.singleton)(_.single) - } - 'backtickIdentifiers { - check(testGraphs.bactickIdentifiers)(_.`up-target`, _.`a-down-target`, _.`nested-module`.`nested-target`) - } - 'separateGroups - { - check(TestGraphs.triangleTask)(_.left, _.right) - } - 'TraitWithModuleObject - { - check(TestGraphs.TraitWithModuleObject)(_.TraitModule.testFrameworks) - } - 'nestedModule - { - check(TestGraphs.nestedModule)(_.single, _.nested.single, _.classInstance.single) - } - 'singleCross - { - check(TestGraphs.singleCross)( - _.cross("210").suffix, - _.cross("211").suffix, - _.cross("212").suffix - ) - } - 'doubleCross - { - check(TestGraphs.doubleCross)( - _.cross("210", "jvm").suffix, - _.cross("210", "js").suffix, - _.cross("211", "jvm").suffix, - _.cross("211", "js").suffix, - _.cross("212", "jvm").suffix, - _.cross("212", "js").suffix, - _.cross("212", "native").suffix - ) - } - 'nestedCrosses - { - check(TestGraphs.nestedCrosses)( - _.cross("210").cross2("jvm").suffix, - _.cross("210").cross2("js").suffix, - _.cross("210").cross2("native").suffix, - _.cross("211").cross2("jvm").suffix, - _.cross("211").cross2("js").suffix, - _.cross("211").cross2("native").suffix, - _.cross("212").cross2("jvm").suffix, - _.cross("212").cross2("js").suffix, - _.cross("212").cross2("native").suffix - ) - } - - } -} - diff --git a/main/test/src/mill/define/GraphTests.scala b/main/test/src/mill/define/GraphTests.scala deleted file mode 100644 index 224ce59f..00000000 --- a/main/test/src/mill/define/GraphTests.scala +++ /dev/null @@ -1,211 +0,0 @@ -package mill.define - - -import mill.eval.Evaluator -import mill.util.{TestGraphs, TestUtil} -import utest._ -import mill.util.Strict.Agg -object GraphTests extends TestSuite{ - - val tests = Tests{ - - - val graphs = new TestGraphs() - import graphs._ - import TestGraphs._ - - 'topoSortedTransitiveTargets - { - def check(targets: Agg[Task[_]], expected: Agg[Task[_]]) = { - val result = Graph.topoSorted(Graph.transitiveTargets(targets)).values - TestUtil.checkTopological(result) - assert(result == expected) - } - - 'singleton - check( - targets = Agg(singleton.single), - expected = Agg(singleton.single) - ) - 'backtickIdentifiers - check( - targets = Agg(bactickIdentifiers.`a-down-target`), - expected = Agg(bactickIdentifiers.`up-target`, bactickIdentifiers.`a-down-target`) - ) - 'pair - check( - targets = Agg(pair.down), - expected = Agg(pair.up, pair.down) - ) - 'anonTriple - check( - targets = Agg(anonTriple.down), - expected = Agg(anonTriple.up, anonTriple.down.inputs(0), anonTriple.down) - ) - 'diamond - check( - targets = Agg(diamond.down), - expected = Agg(diamond.up, diamond.left, diamond.right, diamond.down) - ) - 'anonDiamond - check( - targets = Agg(diamond.down), - expected = Agg( - diamond.up, - diamond.down.inputs(0), - diamond.down.inputs(1), - diamond.down - ) - ) - 'defCachedDiamond - check( - targets = Agg(defCachedDiamond.down), - expected = Agg( - defCachedDiamond.up.inputs(0), - defCachedDiamond.up, - defCachedDiamond.down.inputs(0).inputs(0).inputs(0), - defCachedDiamond.down.inputs(0).inputs(0), - defCachedDiamond.down.inputs(0).inputs(1).inputs(0), - defCachedDiamond.down.inputs(0).inputs(1), - defCachedDiamond.down.inputs(0), - defCachedDiamond.down - ) - ) - 'bigSingleTerminal - { - val result = Graph.topoSorted(Graph.transitiveTargets(Agg(bigSingleTerminal.j))).values - TestUtil.checkTopological(result) - assert(result.size == 28) - } - } - - 'groupAroundNamedTargets - { - def check[T, R <: Target[Int]](base: T) - (target: T => R, - important0: Agg[T => Target[_]], - expected: Agg[(R, Int)]) = { - - val topoSorted = Graph.topoSorted(Graph.transitiveTargets(Agg(target(base)))) - - val important = important0.map(_ (base)) - val grouped = Graph.groupAroundImportantTargets(topoSorted) { - case t: Target[_] if important.contains(t) => t - } - val flattened = Agg.from(grouped.values().flatMap(_.items)) - - TestUtil.checkTopological(flattened) - for ((terminal, expectedSize) <- expected) { - val grouping = grouped.lookupKey(terminal) - assert( - grouping.size == expectedSize, - grouping.flatMap(_.asTarget: Option[Target[_]]).filter(important.contains) == Agg(terminal) - ) - } - } - - 'singleton - check(singleton)( - _.single, - Agg(_.single), - Agg(singleton.single -> 1) - ) - 'backtickIdentifiers - check(bactickIdentifiers)( - _.`a-down-target`, - Agg(_.`up-target`, _.`a-down-target`), - Agg( - bactickIdentifiers.`up-target` -> 1, - bactickIdentifiers.`a-down-target` -> 1 - ) - ) - 'pair - check(pair)( - _.down, - Agg(_.up, _.down), - Agg(pair.up -> 1, pair.down -> 1) - ) - 'anonTriple - check(anonTriple)( - _.down, - Agg(_.up, _.down), - Agg(anonTriple.up -> 1, anonTriple.down -> 2) - ) - 'diamond - check(diamond)( - _.down, - Agg(_.up, _.left, _.right, _.down), - Agg( - diamond.up -> 1, - diamond.left -> 1, - diamond.right -> 1, - diamond.down -> 1 - ) - ) - - 'defCachedDiamond - check(defCachedDiamond)( - _.down, - Agg(_.up, _.left, _.right, _.down), - Agg( - defCachedDiamond.up -> 2, - defCachedDiamond.left -> 2, - defCachedDiamond.right -> 2, - defCachedDiamond.down -> 2 - ) - ) - - 'anonDiamond - check(anonDiamond)( - _.down, - Agg(_.down, _.up), - Agg( - anonDiamond.up -> 1, - anonDiamond.down -> 3 - ) - ) - 'bigSingleTerminal - check(bigSingleTerminal)( - _.j, - Agg(_.a, _.b, _.e, _.f, _.i, _.j), - Agg( - bigSingleTerminal.a -> 3, - bigSingleTerminal.b -> 2, - bigSingleTerminal.e -> 9, - bigSingleTerminal.i -> 6, - bigSingleTerminal.f -> 4, - bigSingleTerminal.j -> 4 - ) - ) - } - 'multiTerminalGroupCounts - { - def countGroups(goals: Task[_]*) = { - - val topoSorted = Graph.topoSorted( - Graph.transitiveTargets(Agg.from(goals)) - ) - val grouped = Graph.groupAroundImportantTargets(topoSorted) { - case t: NamedTask[Any] => t - case t if goals.contains(t) => t - } - grouped.keyCount - } - - 'separateGroups - { - import separateGroups._ - val groupCount = countGroups(right, left) - assert(groupCount == 3) - } - - 'triangleTask - { - // Make sure the following graph ends up as a single group, since although - // `right` depends on `left`, both of them depend on the un-cached `task` - // which would force them both to re-compute every time `task` changes - import triangleTask._ - val groupCount = countGroups(right, left) - assert(groupCount == 2) - } - - - 'multiTerminalGroup - { - // Make sure the following graph ends up as two groups - import multiTerminalGroup._ - val groupCount = countGroups(right, left) - assert(groupCount == 2) - } - - - 'multiTerminalBoundary - { - // Make sure the following graph ends up as a three groups: one for - // each cached target, and one for the downstream task we are running - import multiTerminalBoundary._ - val groupCount = countGroups(task2) - assert(groupCount == 3) - } - } - - - } -} diff --git a/main/test/src/mill/define/MacroErrorTests.scala b/main/test/src/mill/define/MacroErrorTests.scala deleted file mode 100644 index c8b140fa..00000000 --- a/main/test/src/mill/define/MacroErrorTests.scala +++ /dev/null @@ -1,145 +0,0 @@ -package mill.define - -import utest._ -import mill.{T, Module} -import mill.util.TestUtil -object MacroErrorTests extends TestSuite{ - - val tests = Tests{ - - 'errors{ - val expectedMsg = - "T{} members must be defs defined in a Cacher class/trait/object body" - - val err = compileError("object Foo extends TestUtil.BaseModule{ val x = T{1} }") - assert(err.msg == expectedMsg) - } - - 'badParameterSets - { - 'command - { - val e = compileError(""" - object foo extends mill.util.TestUtil.BaseModule{ - def w = T.command{1} - } - mill.define.Discover[foo.type] - """) - assert( - e.msg.contains("`T.command` definitions must have 1 parameter list"), - e.pos.contains("def w = ") - ) - } - 'target - { - val e = compileError(""" - object foo extends mill.util.TestUtil.BaseModule{ - def x() = T{1} - } - mill.define.Discover[foo.type] - """) - assert( - e.msg.contains("`T{...}` definitions must have 0 parameter lists"), - e.pos.contains("def x() = ") - ) - } - 'input - { - val e = compileError(""" - object foo extends mill.util.TestUtil.BaseModule{ - def y() = T.input{1} - } - mill.define.Discover[foo.type] - """) - assert( - e.msg.contains("`T.input` definitions must have 0 parameter lists"), - e.pos.contains("def y() = ") - ) - } - 'sources - { - val e = compileError(""" - object foo extends mill.util.TestUtil.BaseModule{ - def z() = T.sources{ammonite.ops.pwd} - } - mill.define.Discover[foo.type] - """) - assert( - e.msg.contains("`T.sources` definitions must have 0 parameter lists"), - e.pos.contains("def z() = ") - ) - } - 'persistent - { - val e = compileError(""" - object foo extends mill.util.TestUtil.BaseModule{ - def a() = T.persistent{1} - } - mill.define.Discover[foo.type] - """) - assert( - e.msg.contains("`T.persistent` definitions must have 0 parameter lists"), - e.pos.contains("def a() = ") - ) - } - } - 'badTmacro - { - // Make sure we can reference values from outside the T{...} block as part - // of our `Target#apply()` calls, but we cannot reference any values that - // come from inside the T{...} block - 'pos - { - val e = compileError(""" - val a = T{ 1 } - val arr = Array(a) - val b = { - val c = 0 - T{ - arr(c)() - } - } - """) - assert(e.msg.contains( - "Modules, Targets and Commands can only be defined within a mill Module") - ) - } - 'neg - { - - val expectedMsg = - "Target#apply() call cannot use `value n` defined within the T{...} block" - val err = compileError("""new Module{ - def a = T{ 1 } - val arr = Array(a) - def b = { - T{ - val n = 0 - arr(n)() - } - } - }""") - assert(err.msg == expectedMsg) - } - 'neg2 - { - - val expectedMsg = - "Target#apply() call cannot use `value x` defined within the T{...} block" - val err = compileError("""new Module{ - def a = T{ 1 } - val arr = Array(a) - def b = { - T{ - arr.map{x => x()} - } - } - }""") - assert(err.msg == expectedMsg) - } - 'neg3{ - val borkedCachedDiamond1 = utest.compileError(""" - object borkedCachedDiamond1 { - def up = T{ TestUtil.test() } - def left = T{ TestUtil.test(up) } - def right = T{ TestUtil.test(up) } - def down = T{ TestUtil.test(left, right) } - } - """) - assert(borkedCachedDiamond1.msg.contains( - "Modules, Targets and Commands can only be defined within a mill Module") - ) - } - } - } -} diff --git a/main/test/src/mill/eval/CrossTests.scala b/main/test/src/mill/eval/CrossTests.scala deleted file mode 100644 index f194924e..00000000 --- a/main/test/src/mill/eval/CrossTests.scala +++ /dev/null @@ -1,56 +0,0 @@ -package mill.eval - - -import mill.define.Discover -import mill.util.TestEvaluator - -import mill.util.TestGraphs.{crossResolved, doubleCross, nestedCrosses, singleCross} -import utest._ -object CrossTests extends TestSuite{ - val tests = Tests{ - 'singleCross - { - val check = new TestEvaluator(singleCross) - - val Right(("210", 1)) = check.apply(singleCross.cross("210").suffix) - val Right(("211", 1)) = check.apply(singleCross.cross("211").suffix) - val Right(("212", 1)) = check.apply(singleCross.cross("212").suffix) - } - - 'crossResolved - { - val check = new TestEvaluator(crossResolved) - - val Right(("2.10", 1)) = check.apply(crossResolved.foo("2.10").suffix) - val Right(("2.11", 1)) = check.apply(crossResolved.foo("2.11").suffix) - val Right(("2.12", 1)) = check.apply(crossResolved.foo("2.12").suffix) - - val Right(("_2.10", 1)) = check.apply(crossResolved.bar("2.10").longSuffix) - val Right(("_2.11", 1)) = check.apply(crossResolved.bar("2.11").longSuffix) - val Right(("_2.12", 1)) = check.apply(crossResolved.bar("2.12").longSuffix) - } - - - 'doubleCross - { - val check = new TestEvaluator(doubleCross) - - val Right(("210_jvm", 1)) = check.apply(doubleCross.cross("210", "jvm").suffix) - val Right(("210_js", 1)) = check.apply(doubleCross.cross("210", "js").suffix) - val Right(("211_jvm", 1)) = check.apply(doubleCross.cross("211", "jvm").suffix) - val Right(("211_js", 1)) = check.apply(doubleCross.cross("211", "js").suffix) - val Right(("212_jvm", 1)) = check.apply(doubleCross.cross("212", "jvm").suffix) - val Right(("212_js", 1)) = check.apply(doubleCross.cross("212", "js").suffix) - val Right(("212_native", 1)) = check.apply(doubleCross.cross("212", "native").suffix) - } - - 'nestedCrosses - { - val check = new TestEvaluator(nestedCrosses) - - val Right(("210_jvm", 1)) = check.apply(nestedCrosses.cross("210").cross2("jvm").suffix) - val Right(("210_js", 1)) = check.apply(nestedCrosses.cross("210").cross2("js").suffix) - val Right(("211_jvm", 1)) = check.apply(nestedCrosses.cross("211").cross2("jvm").suffix) - val Right(("211_js", 1)) = check.apply(nestedCrosses.cross("211").cross2("js").suffix) - val Right(("212_jvm", 1)) = check.apply(nestedCrosses.cross("212").cross2("jvm").suffix) - val Right(("212_js", 1)) = check.apply(nestedCrosses.cross("212").cross2("js").suffix) - val Right(("212_native", 1)) = check.apply(nestedCrosses.cross("212").cross2("native").suffix) - } - } -} diff --git a/main/test/src/mill/eval/EvaluationTests.scala b/main/test/src/mill/eval/EvaluationTests.scala deleted file mode 100644 index 74f9088c..00000000 --- a/main/test/src/mill/eval/EvaluationTests.scala +++ /dev/null @@ -1,354 +0,0 @@ -package mill.eval - - -import mill.util.TestUtil.{Test, test} -import mill.define.{Discover, Graph, Target, Task} -import mill.{Module, T} -import mill.util.{DummyLogger, TestEvaluator, TestGraphs, TestUtil} -import mill.util.Strict.Agg -import utest._ -import utest.framework.TestPath - - - -object EvaluationTests extends TestSuite{ - class Checker[T <: TestUtil.BaseModule](module: T)(implicit tp: TestPath) { - // Make sure data is persisted even if we re-create the evaluator each time - - def evaluator = new TestEvaluator(module).evaluator - - def apply(target: Task[_], expValue: Any, - expEvaled: Agg[Task[_]], - // How many "other" tasks were evaluated other than those listed above. - // Pass in -1 to skip the check entirely - extraEvaled: Int = 0, - // Perform a second evaluation of the same tasks, and make sure the - // outputs are the same but nothing was evaluated. Disable this if you - // are directly evaluating tasks which need to re-evaluate every time - secondRunNoOp: Boolean = true) = { - - val evaled = evaluator.evaluate(Agg(target)) - - val (matchingReturnedEvaled, extra) = - evaled.evaluated.indexed.partition(expEvaled.contains) - - assert( - evaled.values == Seq(expValue), - matchingReturnedEvaled.toSet == expEvaled.toSet, - extraEvaled == -1 || extra.length == extraEvaled - ) - - // Second time the value is already cached, so no evaluation needed - if (secondRunNoOp){ - val evaled2 = evaluator.evaluate(Agg(target)) - val expecteSecondRunEvaluated = Agg() - assert( - evaled2.values == evaled.values, - evaled2.evaluated == expecteSecondRunEvaluated - ) - } - } - } - - - val tests = Tests{ - object graphs extends TestGraphs() - import graphs._ - import TestGraphs._ - 'evaluateSingle - { - - 'singleton - { - import singleton._ - val check = new Checker(singleton) - // First time the target is evaluated - check(single, expValue = 0, expEvaled = Agg(single)) - - single.counter += 1 - // After incrementing the counter, it forces re-evaluation - check(single, expValue = 1, expEvaled = Agg(single)) - } - 'backtickIdentifiers - { - import graphs.bactickIdentifiers._ - val check = new Checker(bactickIdentifiers) - - check(`a-down-target`, expValue = 0, expEvaled = Agg(`up-target`, `a-down-target`)) - - `a-down-target`.counter += 1 - check(`a-down-target`, expValue = 1, expEvaled = Agg(`a-down-target`)) - - `up-target`.counter += 1 - check(`a-down-target`, expValue = 2, expEvaled = Agg(`up-target`, `a-down-target`)) - } - 'pair - { - import pair._ - val check = new Checker(pair) - check(down, expValue = 0, expEvaled = Agg(up, down)) - - down.counter += 1 - check(down, expValue = 1, expEvaled = Agg(down)) - - up.counter += 1 - check(down, expValue = 2, expEvaled = Agg(up, down)) - } - 'anonTriple - { - import anonTriple._ - val check = new Checker(anonTriple) - val middle = down.inputs(0) - check(down, expValue = 0, expEvaled = Agg(up, middle, down)) - - down.counter += 1 - check(down, expValue = 1, expEvaled = Agg(middle, down)) - - up.counter += 1 - check(down, expValue = 2, expEvaled = Agg(up, middle, down)) - - middle.asInstanceOf[TestUtil.Test].counter += 1 - - check(down, expValue = 3, expEvaled = Agg(middle, down)) - } - 'diamond - { - import diamond._ - val check = new Checker(diamond) - check(down, expValue = 0, expEvaled = Agg(up, left, right, down)) - - down.counter += 1 - check(down, expValue = 1, expEvaled = Agg(down)) - - up.counter += 1 - // Increment by 2 because up is referenced twice: once by left once by right - check(down, expValue = 3, expEvaled = Agg(up, left, right, down)) - - left.counter += 1 - check(down, expValue = 4, expEvaled = Agg(left, down)) - - right.counter += 1 - check(down, expValue = 5, expEvaled = Agg(right, down)) - } - 'anonDiamond - { - import anonDiamond._ - val check = new Checker(anonDiamond) - val left = down.inputs(0).asInstanceOf[TestUtil.Test] - val right = down.inputs(1).asInstanceOf[TestUtil.Test] - check(down, expValue = 0, expEvaled = Agg(up, left, right, down)) - - down.counter += 1 - check(down, expValue = 1, expEvaled = Agg(left, right, down)) - - up.counter += 1 - // Increment by 2 because up is referenced twice: once by left once by right - check(down, expValue = 3, expEvaled = Agg(up, left, right, down)) - - left.counter += 1 - check(down, expValue = 4, expEvaled = Agg(left, right, down)) - - right.counter += 1 - check(down, expValue = 5, expEvaled = Agg(left, right, down)) - } - - 'bigSingleTerminal - { - import bigSingleTerminal._ - val check = new Checker(bigSingleTerminal) - - check(j, expValue = 0, expEvaled = Agg(a, b, e, f, i, j), extraEvaled = 22) - - j.counter += 1 - check(j, expValue = 1, expEvaled = Agg(j), extraEvaled = 3) - - i.counter += 1 - // increment value by 2 because `i` is used twice on the way to `j` - check(j, expValue = 3, expEvaled = Agg(j, i), extraEvaled = 8) - - b.counter += 1 - // increment value by 4 because `b` is used four times on the way to `j` - check(j, expValue = 7, expEvaled = Agg(b, e, f, i, j), extraEvaled = 20) - } - } - - 'evaluateMixed - { - 'separateGroups - { - // Make sure that `left` and `right` are able to recompute separately, - // even though one depends on the other - - import separateGroups._ - val checker = new Checker(separateGroups) - val evaled1 = checker.evaluator.evaluate(Agg(right, left)) - val filtered1 = evaled1.evaluated.filter(_.isInstanceOf[Target[_]]) - assert(filtered1 == Agg(change, left, right)) - val evaled2 = checker.evaluator.evaluate(Agg(right, left)) - val filtered2 = evaled2.evaluated.filter(_.isInstanceOf[Target[_]]) - assert(filtered2 == Agg()) - change.counter += 1 - val evaled3 = checker.evaluator.evaluate(Agg(right, left)) - val filtered3 = evaled3.evaluated.filter(_.isInstanceOf[Target[_]]) - assert(filtered3 == Agg(change, right)) - - - } - 'triangleTask - { - - import triangleTask._ - val checker = new Checker(triangleTask) - checker(right, 3, Agg(left, right), extraEvaled = -1) - checker(left, 1, Agg(), extraEvaled = -1) - - } - 'multiTerminalGroup - { - import multiTerminalGroup._ - - val checker = new Checker(multiTerminalGroup) - checker(right, 1, Agg(right), extraEvaled = -1) - checker(left, 1, Agg(left), extraEvaled = -1) - } - - 'multiTerminalBoundary - { - - import multiTerminalBoundary._ - - val checker = new Checker(multiTerminalBoundary) - checker(task2, 4, Agg(right, left), extraEvaled = -1, secondRunNoOp = false) - checker(task2, 4, Agg(), extraEvaled = -1, secondRunNoOp = false) - } - - 'overrideSuperTask - { - // Make sure you can override targets, call their supers, and have the - // overriden target be allocated a spot within the overriden/ folder of - // the main publically-available target - import canOverrideSuper._ - - val checker = new Checker(canOverrideSuper) - checker(foo, Seq("base", "object"), Agg(foo), extraEvaled = -1) - - - val public = ammonite.ops.read(checker.evaluator.outPath / 'foo / "meta.json") - val overriden = ammonite.ops.read( - checker.evaluator.outPath / 'foo / - 'overriden / "mill" / "util" / "TestGraphs" / "BaseModule" / "foo" / "meta.json" - ) - assert( - public.contains("base"), - public.contains("object"), - overriden.contains("base"), - !overriden.contains("object") - ) - } - 'overrideSuperCommand - { - // Make sure you can override commands, call their supers, and have the - // overriden command be allocated a spot within the overriden/ folder of - // the main publically-available command - import canOverrideSuper._ - - val checker = new Checker(canOverrideSuper) - val runCmd = cmd(1) - checker( - runCmd, - Seq("base1", "object1"), - Agg(runCmd), - extraEvaled = -1, - secondRunNoOp = false - ) - - val public = ammonite.ops.read(checker.evaluator.outPath / 'cmd / "meta.json") - val overriden = ammonite.ops.read( - checker.evaluator.outPath / 'cmd / - 'overriden / "mill" / "util" / "TestGraphs" / "BaseModule"/ "cmd" / "meta.json" - ) - assert( - public.contains("base1"), - public.contains("object1"), - overriden.contains("base1"), - !overriden.contains("object1") - ) - } - 'nullTasks - { - import nullTasks._ - val checker = new Checker(nullTasks) - checker(nullTarget1, null, Agg(nullTarget1), extraEvaled = -1) - checker(nullTarget1, null, Agg(), extraEvaled = -1) - checker(nullTarget2, null, Agg(nullTarget2), extraEvaled = -1) - checker(nullTarget2, null, Agg(), extraEvaled = -1) - checker(nullTarget3, null, Agg(nullTarget3), extraEvaled = -1) - checker(nullTarget3, null, Agg(), extraEvaled = -1) - checker(nullTarget4, null, Agg(nullTarget4), extraEvaled = -1) - checker(nullTarget4, null, Agg(), extraEvaled = -1) - - val nc1 = nullCommand1() - val nc2 = nullCommand2() - val nc3 = nullCommand3() - val nc4 = nullCommand4() - - checker(nc1, null, Agg(nc1), extraEvaled = -1, secondRunNoOp = false) - checker(nc1, null, Agg(nc1), extraEvaled = -1, secondRunNoOp = false) - checker(nc2, null, Agg(nc2), extraEvaled = -1, secondRunNoOp = false) - checker(nc2, null, Agg(nc2), extraEvaled = -1, secondRunNoOp = false) - checker(nc3, null, Agg(nc3), extraEvaled = -1, secondRunNoOp = false) - checker(nc3, null, Agg(nc3), extraEvaled = -1, secondRunNoOp = false) - checker(nc4, null, Agg(nc4), extraEvaled = -1, secondRunNoOp = false) - checker(nc4, null, Agg(nc4), extraEvaled = -1, secondRunNoOp = false) - } - - 'tasksAreUncached - { - // Make sure the tasks `left` and `middle` re-compute every time, while - // the target `right` does not - // - // ___ left ___ - // / \ - // up middle -- down - // / - // right - object build extends TestUtil.BaseModule{ - var leftCount = 0 - var rightCount = 0 - var middleCount = 0 - def up = T{ test.anon() } - def left = T.task{ leftCount += 1; up() + 1 } - def middle = T.task{ middleCount += 1; 100 } - def right = T{ rightCount += 1; 10000 } - def down = T{ left() + middle() + right() } - } - - import build._ - - // Ensure task objects themselves are not cached, and recomputed each time - assert( - up eq up, - left ne left, - middle ne middle, - right eq right, - down eq down - ) - - // During the first evaluation, they get computed normally like any - // cached target - val check = new Checker(build) - assert(leftCount == 0, rightCount == 0) - check(down, expValue = 10101, expEvaled = Agg(up, right, down), extraEvaled = 8) - assert(leftCount == 1, middleCount == 1, rightCount == 1) - - // If the upstream `up` doesn't change, the entire block of tasks - // doesn't need to recompute - check(down, expValue = 10101, expEvaled = Agg()) - assert(leftCount == 1, middleCount == 1, rightCount == 1) - - // But if `up` changes, the entire block of downstream tasks needs to - // recompute together, including `middle` which doesn't depend on `up`, - // because tasks have no cached value that can be used. `right`, which - // is a cached Target, does not recompute - up.inputs(0).asInstanceOf[Test].counter += 1 - check(down, expValue = 10102, expEvaled = Agg(up, down), extraEvaled = 6) - assert(leftCount == 2, middleCount == 2, rightCount == 1) - - // Running the tasks themselves results in them being recomputed every - // single time, even if nothing changes - check(left, expValue = 2, expEvaled = Agg(), extraEvaled = 1, secondRunNoOp = false) - assert(leftCount == 3, middleCount == 2, rightCount == 1) - check(left, expValue = 2, expEvaled = Agg(), extraEvaled = 1, secondRunNoOp = false) - assert(leftCount == 4, middleCount == 2, rightCount == 1) - - check(middle, expValue = 100, expEvaled = Agg(), extraEvaled = 2, secondRunNoOp = false) - assert(leftCount == 4, middleCount == 3, rightCount == 1) - check(middle, expValue = 100, expEvaled = Agg(), extraEvaled = 2, secondRunNoOp = false) - assert(leftCount == 4, middleCount == 4, rightCount == 1) - } - } - } -} diff --git a/main/test/src/mill/eval/FailureTests.scala b/main/test/src/mill/eval/FailureTests.scala deleted file mode 100644 index dcfbcb60..00000000 --- a/main/test/src/mill/eval/FailureTests.scala +++ /dev/null @@ -1,132 +0,0 @@ -package mill.eval -import mill.T -import mill.util.{TestEvaluator, TestUtil} -import mill.api.Result.OuterStack -import utest._ -import utest.framework.TestPath - - -object FailureTests extends TestSuite{ - - val tests = Tests{ - val graphs = new mill.util.TestGraphs() - import graphs._ - - 'evaluateSingle - { - val check = new TestEvaluator(singleton) - check.fail( - target = singleton.single, - expectedFailCount = 0, - expectedRawValues = Seq(Result.Success(0)) - ) - - singleton.single.failure = Some("lols") - - check.fail( - target = singleton.single, - expectedFailCount = 1, - expectedRawValues = Seq(Result.Failure("lols")) - ) - - singleton.single.failure = None - - check.fail( - target = singleton.single, - expectedFailCount = 0, - expectedRawValues = Seq(Result.Success(0)) - ) - - - val ex = new IndexOutOfBoundsException() - singleton.single.exception = Some(ex) - - - check.fail( - target = singleton.single, - expectedFailCount = 1, - expectedRawValues = Seq(Result.Exception(ex, new OuterStack(Nil))) - ) - } - 'evaluatePair - { - val check = new TestEvaluator(pair) - check.fail( - pair.down, - expectedFailCount = 0, - expectedRawValues = Seq(Result.Success(0)) - ) - - pair.up.failure = Some("lols") - - check.fail( - pair.down, - expectedFailCount = 1, - expectedRawValues = Seq(Result.Skipped) - ) - - pair.up.failure = None - - check.fail( - pair.down, - expectedFailCount = 0, - expectedRawValues = Seq(Result.Success(0)) - ) - - pair.up.exception = Some(new IndexOutOfBoundsException()) - - check.fail( - pair.down, - expectedFailCount = 1, - expectedRawValues = Seq(Result.Skipped) - ) - } - 'evaluateBacktickIdentifiers - { - val check = new TestEvaluator(bactickIdentifiers) - import bactickIdentifiers._ - check.fail( - `a-down-target`, - expectedFailCount = 0, - expectedRawValues = Seq(Result.Success(0)) - ) - - `up-target`.failure = Some("lols") - - check.fail( - `a-down-target`, - expectedFailCount = 1, - expectedRawValues = Seq(Result.Skipped) - ) - - `up-target`.failure = None - - check.fail( - `a-down-target`, - expectedFailCount = 0, - expectedRawValues = Seq(Result.Success(0)) - ) - - `up-target`.exception = Some(new IndexOutOfBoundsException()) - - check.fail( - `a-down-target`, - expectedFailCount = 1, - expectedRawValues = Seq(Result.Skipped) - ) - } - 'multipleUsesOfDest - { - object build extends TestUtil.BaseModule { - // Using `T.ctx( ).dest` twice in a single task is ok - def left = T{ + T.ctx().dest.toString.length + T.ctx().dest.toString.length } - - // Using `T.ctx( ).dest` once in two different tasks is not ok - val task = T.task{ T.ctx().dest.toString.length } - def right = T{ task() + left() + T.ctx().dest.toString().length } - } - - val check = new TestEvaluator(build) - val Right(_) = check(build.left) - val Left(Result.Exception(e, _)) = check(build.right) - assert(e.getMessage.contains("`dest` can only be used in one place")) - } - } -} - diff --git a/main/test/src/mill/eval/JavaCompileJarTests.scala b/main/test/src/mill/eval/JavaCompileJarTests.scala deleted file mode 100644 index 426c6ea6..00000000 --- a/main/test/src/mill/eval/JavaCompileJarTests.scala +++ /dev/null @@ -1,164 +0,0 @@ -package mill.eval - -import mill.define.{Discover, Input, Target, Task} -import mill.modules.Jvm -import mill.api.Ctx.Dest -import mill.{Module, T} -import mill.util.{DummyLogger, Loose, TestEvaluator, TestUtil} -import mill.util.Strict.Agg -import utest._ -import mill._ -object JavaCompileJarTests extends TestSuite{ - def compileAll(sources: mill.util.Loose.Agg[PathRef])(implicit ctx: Dest) = { - os.makeDir.all(ctx.dest) - - os.proc("javac", sources.map(_.path.toString()).toSeq, "-d", ctx.dest).call(ctx.dest) - PathRef(ctx.dest) - } - - val tests = Tests{ - 'javac { - val javacSrcPath = os.pwd / 'main / 'test / 'resources / 'examples / 'javac - val javacDestPath = TestUtil.getOutPath() / 'src - - os.makeDir.all(javacDestPath / os.up) - os.copy(javacSrcPath, javacDestPath) - - object Build extends TestUtil.BaseModule{ - def sourceRootPath = javacDestPath / 'src - def resourceRootPath = javacDestPath / 'resources - - // sourceRoot -> allSources -> classFiles - // | - // v - // resourceRoot ----> jar - def sourceRoot = T.sources{ sourceRootPath } - def resourceRoot = T.sources{ resourceRootPath } - def allSources = T{ sourceRoot().flatMap(p => os.walk(p.path)).map(PathRef(_)) } - def classFiles = T{ compileAll(allSources()) } - def jar = T{ Jvm.createJar(Loose.Agg(classFiles().path) ++ resourceRoot().map(_.path)) } - // Test createJar() with optional file filter. - def filterJar(fileFilter: (os.Path, os.RelPath) => Boolean) = T{ Jvm.createJar(Loose.Agg(classFiles().path) ++ resourceRoot().map(_.path), None, fileFilter) } - - def run(mainClsName: String) = T.command{ - os.proc('java, "-Duser.language=en", "-cp", classFiles().path, mainClsName).call() - } - } - - import Build._ - - var evaluator = new TestEvaluator(Build) - def eval[T](t: Task[T]) = { - evaluator.apply(t) - } - def check(targets: Agg[Task[_]], expected: Agg[Task[_]]) = { - evaluator.check(targets, expected) - } - - def append(path: os.Path, txt: String) = ammonite.ops.write.append(path, txt) - - - check( - targets = Agg(jar), - expected = Agg(allSources, classFiles, jar) - ) - - // Re-running with no changes results in nothing being evaluated - check(targets = Agg(jar), expected = Agg()) - // Appending an empty string gets ignored due to file-content hashing - append(sourceRootPath / "Foo.java", "") - check(targets = Agg(jar), expected = Agg()) - - // Appending whitespace forces a recompile, but the classfilesend up - // exactly the same so no re-jarring. - append(sourceRootPath / "Foo.java", " ") - // Note that `sourceRoot` and `resourceRoot` never turn up in the `expected` - // list, because they are `Source`s not `Target`s - check(targets = Agg(jar), expected = Agg(/*sourceRoot, */allSources, classFiles)) - - // Appending a new class changes the classfiles, which forces us to - // re-create the final jar - append(sourceRootPath / "Foo.java", "\nclass FooTwo{}") - check(targets = Agg(jar), expected = Agg(allSources, classFiles, jar)) - - // Tweaking the resources forces rebuild of the final jar, without - // recompiling classfiles - append(resourceRootPath / "hello.txt", " ") - check(targets = Agg(jar), expected = Agg(jar)) - - // You can swap evaluators halfway without any ill effects - evaluator = new TestEvaluator(Build) - - // Asking for an intermediate target forces things to be build up to that - // target only; these are re-used for any downstream targets requested - append(sourceRootPath / "Bar.java", "\nclass BarTwo{}") - append(resourceRootPath / "hello.txt", " ") - check(targets = Agg(classFiles), expected = Agg(allSources, classFiles)) - check(targets = Agg(jar), expected = Agg(jar)) - check(targets = Agg(allSources), expected = Agg()) - - append(sourceRootPath / "Bar.java", "\nclass BarThree{}") - append(resourceRootPath / "hello.txt", " ") - check(targets = Agg(resourceRoot), expected = Agg()) - check(targets = Agg(allSources), expected = Agg(allSources)) - check(targets = Agg(jar), expected = Agg(classFiles, jar)) - - val jarContents = os.proc('jar, "-tf", evaluator.outPath/'jar/'dest/"out.jar").call(evaluator.outPath).out.string - val expectedJarContents = - """META-INF/MANIFEST.MF - |test/Bar.class - |test/BarThree.class - |test/BarTwo.class - |test/Foo.class - |test/FooTwo.class - |hello.txt - |""".stripMargin - assert(jarContents.linesIterator.toSeq == expectedJarContents.linesIterator.toSeq) - - // Create the Jar again, but this time, filter out the Foo files. - def noFoos(s: String) = !s.contains("Foo") - val filterFunc = (p: os.Path, r: os.RelPath) => noFoos(r.last) - eval(filterJar(filterFunc)) - val filteredJarContents = os.proc('jar, "-tf", evaluator.outPath/'filterJar/'dest/"out.jar").call(evaluator.outPath).out.string - assert(filteredJarContents.linesIterator.toSeq == expectedJarContents.linesIterator.filter(noFoos(_)).toSeq) - - val executed = os.proc('java, "-cp", evaluator.outPath/'jar/'dest/"out.jar", "test.Foo").call(evaluator.outPath).out.string - assert(executed == (31337 + 271828) + System.lineSeparator) - - for(i <- 0 until 3){ - // Build.run is not cached, so every time we eval it it has to - // re-evaluate - val Right((runOutput, evalCount)) = eval(Build.run("test.Foo")) - assert( - runOutput.out.string == (31337 + 271828) + System.lineSeparator, - evalCount == 1 - ) - } - - val Left(Result.Exception(ex, _)) = eval(Build.run("test.BarFour")) - - assert(ex.getMessage.contains("Could not find or load main class")) - - append( - sourceRootPath / "Bar.java", - """ - class BarFour{ - public static void main(String[] args){ - System.out.println("New Cls!"); - } - } - """ - ) - val Right((runOutput2, evalCount2)) = eval(Build.run("test.BarFour")) - assert( - runOutput2.out.string == "New Cls!" + System.lineSeparator, - evalCount2 == 3 - ) - val Right((runOutput3, evalCount3)) = eval(Build.run("test.BarFour")) - assert( - runOutput3.out.string == "New Cls!" + System.lineSeparator, - evalCount3 == 1 - ) - } - } -} diff --git a/main/test/src/mill/eval/ModuleTests.scala b/main/test/src/mill/eval/ModuleTests.scala deleted file mode 100644 index f28fc9b6..00000000 --- a/main/test/src/mill/eval/ModuleTests.scala +++ /dev/null @@ -1,45 +0,0 @@ -package mill.eval - - -import mill.util.{TestEvaluator, TestUtil} -import mill.T -import mill.define.Discover - -import utest._ - -object ModuleTests extends TestSuite{ - object ExternalModule extends mill.define.ExternalModule { - def x = T{13} - object inner extends mill.Module{ - def y = T{17} - } - lazy val millDiscover = Discover[this.type] - } - object Build extends TestUtil.BaseModule{ - def z = T{ ExternalModule.x() + ExternalModule.inner.y() } - } - val tests = Tests { - os.remove.all(TestEvaluator.externalOutPath) - 'externalModuleTargetsAreNamespacedByModulePackagePath - { - val check = new TestEvaluator(Build) - val zresult = check.apply(Build.z) - assert( - zresult == Right((30, 1)), - os.read(check.evaluator.outPath / 'z / "meta.json").contains("30"), - os.read(TestEvaluator.externalOutPath / 'mill / 'eval / 'ModuleTests / 'ExternalModule / 'x / "meta.json").contains("13"), - os.read(TestEvaluator.externalOutPath / 'mill / 'eval / 'ModuleTests / 'ExternalModule / 'inner / 'y / "meta.json").contains("17") - ) - } - 'externalModuleMustBeGlobalStatic - { - - - object Build extends mill.define.ExternalModule { - - def z = T{ ExternalModule.x() + ExternalModule.inner.y() } - lazy val millDiscover = Discover[this.type] - } - - intercept[java.lang.AssertionError]{ Build } - } - } -} diff --git a/main/test/src/mill/eval/TarjanTests.scala b/main/test/src/mill/eval/TarjanTests.scala deleted file mode 100644 index 2f9d0a4d..00000000 --- a/main/test/src/mill/eval/TarjanTests.scala +++ /dev/null @@ -1,91 +0,0 @@ -package mill.eval - -import utest._ - -object TarjanTests extends TestSuite{ - def check(input: Seq[Seq[Int]], expected: Seq[Seq[Int]]) = { - val result = Tarjans(input).map(_.sorted) - val sortedExpected = expected.map(_.sorted) - assert(result == sortedExpected) - } - val tests = Tests{ - // - 'empty - check(Seq(), Seq()) - - // (0) - 'singleton - check(Seq(Seq()), Seq(Seq(0))) - - - // (0)-. - // ^._/ - 'selfCycle - check(Seq(Seq(0)), Seq(Seq(0))) - - // (0) <-> (1) - 'simpleCycle- check(Seq(Seq(1), Seq(0)), Seq(Seq(1, 0))) - - // (0) (1) (2) - 'multipleSingletons - check( - Seq(Seq(), Seq(), Seq()), - Seq(Seq(0), Seq(1), Seq(2)) - ) - - // (0) -> (1) -> (2) - 'straightLineNoCycles- check( - Seq(Seq(1), Seq(2), Seq()), - Seq(Seq(2), Seq(1), Seq(0)) - ) - - // (0) <- (1) <- (2) - 'straightLineNoCyclesReversed- check( - Seq(Seq(), Seq(0), Seq(1)), - Seq(Seq(0), Seq(1), Seq(2)) - ) - - // (0) <-> (1) (2) -> (3) -> (4) - // ^.____________/ - 'independentSimpleCycles - check( - Seq(Seq(1), Seq(0), Seq(3), Seq(4), Seq(2)), - Seq(Seq(1, 0), Seq(4, 3, 2)) - ) - - // ___________________ - // v \ - // (0) <-> (1) (2) -> (3) -> (4) - // ^.____________/ - 'independentLinkedCycles - check( - Seq(Seq(1), Seq(0), Seq(3), Seq(4), Seq(2, 1)), - Seq(Seq(1, 0), Seq(4, 3, 2)) - ) - // _____________ - // / v - // (0) <-> (1) (2) -> (3) -> (4) - // ^.____________/ - 'independentLinkedCycles2 - check( - Seq(Seq(1, 2), Seq(0), Seq(3), Seq(4), Seq(2)), - Seq(Seq(4, 3, 2), Seq(1, 0)) - ) - - // _____________ - // / v - // (0) <-> (1) (2) -> (3) -> (4) - // ^. ^.____________/ - // \________________/ - 'combinedCycles - check( - Seq(Seq(1, 2), Seq(0), Seq(3), Seq(4), Seq(2, 1)), - Seq(Seq(4, 3, 2, 1, 0)) - ) - // - // (0) <-> (1) <- (2) <- (3) <-> (4) <- (5) - // ^.____________/ / / - // / / - // (6) <- (7) <-/ (8) <-' - // / / - // v / - // (9) <--------' - 'combinedCycles - check( - Seq(Seq(1), Seq(0), Seq(0, 1), Seq(2, 4, 7, 9), Seq(3), Seq(4, 8), Seq(9), Seq(6), Seq(), Seq()), - Seq(Seq(0, 1), Seq(2), Seq(9), Seq(6), Seq(7), Seq(3, 4), Seq(8), Seq(5)) - ) - - } -} \ No newline at end of file diff --git a/main/test/src/mill/eval/TaskTests.scala b/main/test/src/mill/eval/TaskTests.scala deleted file mode 100644 index 0bfd8efc..00000000 --- a/main/test/src/mill/eval/TaskTests.scala +++ /dev/null @@ -1,95 +0,0 @@ -package mill.eval - -import utest._ - -import mill.T - -import mill.util.TestEvaluator -object TaskTests extends TestSuite{ - val tests = Tests{ - object build extends mill.util.TestUtil.BaseModule{ - var count = 0 - // Explicitly instantiate `Function1` objects to make sure we get - // different instances each time - def staticWorker = T.worker{ - new Function1[Int, Int] { - def apply(v1: Int) = v1 + 1 - } - } - def noisyWorker = T.worker{ - new Function1[Int, Int] { - def apply(v1: Int) = input() + 1 - } - } - def input = T.input{ - count += 1 - count - } - def task = T.task{ - count += 1 - count - } - def taskInput = T{ input() } - def taskNoInput = T{ task() } - - def persistent = T.persistent{ - input() // force re-computation - os.makeDir.all(T.ctx().dest) - os.write.append(T.ctx().dest/'count, "hello\n") - os.read.lines(T.ctx().dest/'count).length - } - def nonPersistent = T{ - input() // force re-computation - os.makeDir.all(T.ctx().dest) - os.write.append(T.ctx().dest/'count, "hello\n") - os.read.lines(T.ctx().dest/'count).length - } - - def staticWorkerDownstream = T{ - staticWorker().apply(1) - } - def noisyWorkerDownstream = T{ - noisyWorker().apply(1) - } - } - - 'inputs - { - // Inputs always re-evaluate, including forcing downstream cached Targets - // to re-evaluate, but normal Tasks behind a Target run once then are cached - val check = new TestEvaluator(build) - - val Right((1, 1)) = check.apply(build.taskInput) - val Right((2, 1)) = check.apply(build.taskInput) - val Right((3, 1)) = check.apply(build.taskInput) - - val Right((4, 1)) = check.apply(build.taskNoInput) - val Right((4, 0)) = check.apply(build.taskNoInput) - val Right((4, 0)) = check.apply(build.taskNoInput) - } - - 'persistent - { - // Persistent tasks keep the working dir around between runs - val check = new TestEvaluator(build) - val Right((1, 1)) = check.apply(build.persistent) - val Right((2, 1)) = check.apply(build.persistent) - val Right((3, 1)) = check.apply(build.persistent) - - val Right((1, 1)) = check.apply(build.nonPersistent) - val Right((1, 1)) = check.apply(build.nonPersistent) - val Right((1, 1)) = check.apply(build.nonPersistent) - } - - 'worker - { - // Persistent task - def check = new TestEvaluator(build) - - val Right((2, 1)) = check.apply(build.noisyWorkerDownstream) - val Right((3, 1)) = check.apply(build.noisyWorkerDownstream) - val Right((4, 1)) = check.apply(build.noisyWorkerDownstream) - - val Right((2, 1)) = check.apply(build.staticWorkerDownstream) - val Right((2, 0)) = check.apply(build.staticWorkerDownstream) - val Right((2, 0)) = check.apply(build.staticWorkerDownstream) - } - } -} diff --git a/main/test/src/mill/main/ClientServerTests.scala b/main/test/src/mill/main/ClientServerTests.scala deleted file mode 100644 index 05238a5f..00000000 --- a/main/test/src/mill/main/ClientServerTests.scala +++ /dev/null @@ -1,214 +0,0 @@ -package mill.main -import java.io._ - -import mill.main.client.{Util, Locks} - -import scala.collection.JavaConverters._ -import utest._ -class EchoServer extends MillServerMain[Int]{ - def main0(args: Array[String], - stateCache: Option[Int], - mainInteractive: Boolean, - stdin: InputStream, - stdout: PrintStream, - stderr: PrintStream, - env: Map[String, String], - setIdle: Boolean => Unit) = { - - val reader = new BufferedReader(new InputStreamReader(stdin)) - val str = reader.readLine() - if (args.nonEmpty){ - stdout.println(str + args(0)) - } - env.toSeq.sortBy(_._1).foreach{ - case (key, value) => stdout.println(s"$key=$value") - } - stdout.flush() - if (args.nonEmpty){ - stderr.println(str.toUpperCase + args(0)) - } - stderr.flush() - (true, None) - } -} - -object ClientServerTests extends TestSuite{ - def initStreams() = { - val in = new ByteArrayInputStream("hello\n".getBytes()) - val out = new ByteArrayOutputStream() - val err = new ByteArrayOutputStream() - (in, out, err) - } - def init() = { - val tmpDir = java.nio.file.Files.createTempDirectory("") - val locks = Locks.memory() - - (tmpDir, locks) - } - - def spawnEchoServer(tmpDir : java.nio.file.Path, locks: Locks): Unit = { - new Thread(() => new Server( - tmpDir.toString, - new EchoServer(), - () => (), - 1000, - locks - ).run()).start() - } - - def runClientAux(tmpDir : java.nio.file.Path, locks: Locks) - (env : Map[String, String], args: Array[String]) = { - val (in, out, err) = initStreams() - Server.lockBlock(locks.clientLock){ - mill.main.client.MillClientMain.run( - tmpDir.toString, - () => spawnEchoServer(tmpDir, locks), - locks, - in, - out, - err, - args, - env.asJava - ) - Thread.sleep(100) - (new String(out.toByteArray), new String(err.toByteArray)) - } - } - - def tests = Tests{ - 'hello - { - if (!Util.isWindows){ - val (tmpDir, locks) = init() - def runClient(s: String) = runClientAux(tmpDir, locks)(Map.empty, Array(s)) - - // Make sure the simple "have the client start a server and - // exchange one message" workflow works from end to end. - - assert( - locks.clientLock.probe(), - locks.serverLock.probe(), - locks.processLock.probe() - ) - - val (out1, err1) = runClient("world") - - assert( - out1 == "helloworld\n", - err1 == "HELLOworld\n" - ) - - // Give a bit of time for the server to release the lock and - // re-acquire it to signal to the client that it's done - Thread.sleep(100) - - assert( - locks.clientLock.probe(), - !locks.serverLock.probe(), - !locks.processLock.probe() - ) - - // A seecond client in sequence connect to the same server - val (out2, err2) = runClient(" WORLD") - - assert( - out2 == "hello WORLD\n", - err2 == "HELLO WORLD\n" - ) - - // Make sure the server times out of not used for a while - Thread.sleep(2000) - assert( - locks.clientLock.probe(), - locks.serverLock.probe(), - locks.processLock.probe() - ) - - // Have a third client spawn/connect-to a new server at the same path - val (out3, err3) = runClient(" World") - assert( - out3 == "hello World\n", - err3 == "HELLO World\n" - ) - } - - 'envVars - { - if (!Util.isWindows){ - val (tmpDir, locks) = init() - - def runClient(env : Map[String, String]) = runClientAux(tmpDir, locks)(env, Array()) - - // Make sure the simple "have the client start a server and - // exchange one message" workflow works from end to end. - - assert( - locks.clientLock.probe(), - locks.serverLock.probe(), - locks.processLock.probe() - ) - - def longString(s : String) = Array.fill(1000)(s).mkString - val b1000 = longString("b") - val c1000 = longString("c") - val a1000 = longString("a") - - val env = Map( - "a" -> a1000, - "b" -> b1000, - "c" -> c1000 - ) - - - val (out1, err1) = runClient(env) - val expected = s"a=$a1000\nb=$b1000\nc=$c1000\n" - - assert( - out1 == expected, - err1 == "" - ) - - // Give a bit of time for the server to release the lock and - // re-acquire it to signal to the client that it's done - Thread.sleep(100) - - assert( - locks.clientLock.probe(), - !locks.serverLock.probe(), - !locks.processLock.probe() - ) - - val path = List( - "/Users/foo/Library/Haskell/bin", - "/usr/local/git/bin", - "/sw/bin/", - "/usr/local/bin", - "/usr/local/", - "/usr/local/sbin", - "/usr/local/mysql/bin", - "/usr/local/bin", - "/usr/bin", - "/bin", - "/usr/sbin", - "/sbin", - "/opt/X11/bin", - "/usr/local/MacGPG2/bin", - "/Library/TeX/texbin", - "/usr/local/bin/", - "/Users/foo/bin", - "/Users/foo/go/bin", - "~/.bloop" - ) - - val pathEnvVar = path.mkString(":") - val (out2, err2) = runClient(Map("PATH" -> pathEnvVar)) - - val expected2 = s"PATH=$pathEnvVar\n" - - assert( - out2 == expected2, - err2 == "" - ) - } - } - } - } -} diff --git a/main/test/src/mill/main/ForeignBuildsTest.scala b/main/test/src/mill/main/ForeignBuildsTest.scala deleted file mode 100644 index cfc8d00c..00000000 --- a/main/test/src/mill/main/ForeignBuildsTest.scala +++ /dev/null @@ -1,30 +0,0 @@ -package mill.main - -import mill.util.ScriptTestSuite -import utest._ - -object ForeignBuildsTest extends ScriptTestSuite(fork = false) { - def workspaceSlug = "foreign-builds" - def scriptSourcePath = - os.pwd / 'main / 'test / 'resources / 'examples / 'foreign - override def buildPath = os.rel / 'project / "build.sc" - - val tests = Tests { - initWorkspace() - 'test - { - // See https://github.com/lihaoyi/mill/issues/302 - if (!ammonite.util.Util.java9OrAbove) { - assert( - eval("checkProjectPaths"), - eval("checkInnerPaths"), - eval("checkOuterPaths"), - eval("checkOuterInnerPaths"), - eval("checkProjectDests"), - eval("checkInnerDests"), - eval("checkOuterDests"), - eval("checkOuterInnerDests") - ) - } - } - } -} diff --git a/main/test/src/mill/main/ForeignConflictTest.scala b/main/test/src/mill/main/ForeignConflictTest.scala deleted file mode 100644 index a4352bb6..00000000 --- a/main/test/src/mill/main/ForeignConflictTest.scala +++ /dev/null @@ -1,25 +0,0 @@ -package mill.main - - -import mill.util.ScriptTestSuite -import utest._ - -object ForeignConflictTest extends ScriptTestSuite(fork = false) { - def workspaceSlug = "foreign-conflict" - def scriptSourcePath = - os.pwd / 'main / 'test / 'resources / 'examples / 'foreign - override def buildPath = os.rel / 'conflict / "build.sc" - - val tests = Tests { - initWorkspace() - 'test - { - // see https://github.com/lihaoyi/mill/issues/302 - if (!ammonite.util.Util.java9OrAbove) { - assert( - eval("checkPaths"), - eval("checkDests") - ) - } - } - } -} diff --git a/main/test/src/mill/main/JavaCompileJarTests.scala b/main/test/src/mill/main/JavaCompileJarTests.scala deleted file mode 100644 index 37c64b05..00000000 --- a/main/test/src/mill/main/JavaCompileJarTests.scala +++ /dev/null @@ -1,67 +0,0 @@ -package mill.main - -import mill.util.ScriptTestSuite -import utest._ - -object JavaCompileJarTests extends ScriptTestSuite(fork = false) { - def workspaceSlug = "java-compile-jar" - def scriptSourcePath = os.pwd / 'main / 'test / 'resources / 'examples / 'javac - val tests = Tests{ - initWorkspace() - 'test - { - if (!ammonite.util.Util.java9OrAbove) { - // Basic target evaluation works - assert(eval("classFiles")) - assert(eval("jar")) - - val classFiles1 = meta("classFiles") - val jar1 = meta("jar") - - assert(eval("classFiles")) - assert(eval("jar")) - - // Repeated evaluation has the same results - val classFiles2 = meta("classFiles") - val jar2 = meta("jar") - - assert( - jar1 == jar2, - classFiles1 == classFiles2 - ) - - // If we update resources, classFiles are unchanged but jar changes - for(scalaFile <- os.walk(workspacePath).filter(_.ext == "txt")){ - os.write.append(scalaFile, "\n") - } - - assert(eval("classFiles")) - assert(eval("jar")) - - val classFiles3 = meta("classFiles") - val jar3 = meta("jar") - - assert( - jar2 != jar3, - classFiles2 == classFiles3 - ) - - // We can intentionally break the code, have the targets break, then - // fix the code and have them recover. - for(scalaFile <- os.walk(workspacePath).filter(_.ext == "java")){ - os.write.append(scalaFile, "\n}") - } - - assert(!eval("classFiles")) - assert(!eval("jar")) - - for(scalaFile <- os.walk(workspacePath).filter(_.ext == "java")){ - os.write.over(scalaFile, os.read(scalaFile).dropRight(2)) - } - - assert(eval("classFiles")) - assert(eval("jar")) - } - } - } -} - diff --git a/main/test/src/mill/main/MainTests.scala b/main/test/src/mill/main/MainTests.scala deleted file mode 100644 index e836099c..00000000 --- a/main/test/src/mill/main/MainTests.scala +++ /dev/null @@ -1,272 +0,0 @@ -package mill.main - -import mill.define.{Discover, Segment, Task} -import mill.util.TestGraphs._ - -import utest._ -object MainTests extends TestSuite{ - - def check[T <: mill.define.BaseModule](module: T)( - selectorString: String, - expected0: Either[String, Seq[T => Task[_]]])= { - - val expected = expected0.map(_.map(_(module))) - val resolved = for{ - selectors <- mill.util.ParseArgs(Seq(selectorString), multiSelect = false).map(_._1.head) - val crossSelectors = selectors._2.value.map{case Segment.Cross(x) => x.toList.map(_.toString) case _ => Nil} - task <- mill.main.ResolveTasks.resolve( - selectors._2.value.toList, module, module.millDiscover, Nil, crossSelectors.toList, Nil - ) - } yield task - assert(resolved == expected) - } - val tests = Tests{ - val graphs = new mill.util.TestGraphs() - import graphs._ - 'single - { - val check = MainTests.check(singleton) _ - 'pos - check("single", Right(Seq(_.single))) - 'neg1 - check("sngle", Left("Cannot resolve sngle. Did you mean single?")) - 'neg2 - check("snigle", Left("Cannot resolve snigle. Did you mean single?")) - 'neg3 - check("nsiigle", Left("Cannot resolve nsiigle. Did you mean single?")) - 'neg4 - check("ansiigle", Left("Cannot resolve ansiigle. Try `mill resolve _` to see what's available.")) - 'neg5 - check("doesntExist", Left("Cannot resolve doesntExist. Try `mill resolve _` to see what's available.")) - 'neg6 - check("single.doesntExist", Left("Task single is not a module and has no children.")) - 'neg7 - check("", Left("Selector cannot be empty")) - } - 'backtickIdentifiers - { - val check = MainTests.check(bactickIdentifiers) _ - 'pos1 - check("up-target", Right(Seq(_.`up-target`))) - 'pos2 - check("a-down-target", Right(Seq(_.`a-down-target`))) - 'neg1 - check("uptarget", Left("Cannot resolve uptarget. Did you mean up-target?")) - 'neg2 - check("upt-arget", Left("Cannot resolve upt-arget. Did you mean up-target?")) - 'neg3 - check("up-target.doesntExist", Left("Task up-target is not a module and has no children.")) - 'neg4 - check("", Left("Selector cannot be empty")) - 'neg5 - check("invisible&", Left("Cannot resolve invisible. Try `mill resolve _` to see what's available.")) - 'nested - { - 'pos - check("nested-module.nested-target", Right(Seq(_.`nested-module`.`nested-target`))) - 'neg - check("nested-module.doesntExist", Left("Cannot resolve nested-module.doesntExist. Try `mill resolve nested-module._` to see what's available.")) - } - } - 'nested - { - val check = MainTests.check(nestedModule) _ - 'pos1 - check("single", Right(Seq(_.single))) - 'pos2 - check("nested.single", Right(Seq(_.nested.single))) - 'pos3 - check("classInstance.single", Right(Seq(_.classInstance.single))) - 'neg1 - check( - "doesntExist", - Left("Cannot resolve doesntExist. Try `mill resolve _` to see what's available.") - ) - 'neg2 - check( - "single.doesntExist", - Left("Task single is not a module and has no children.") - ) - 'neg3 - check( - "nested.doesntExist", - Left("Cannot resolve nested.doesntExist. Try `mill resolve nested._` to see what's available.") - ) - 'neg3 - check( - "nested.singel", - Left("Cannot resolve nested.singel. Did you mean nested.single?") - ) - 'neg4 - check( - "classInstance.doesntExist", - Left("Cannot resolve classInstance.doesntExist. Try `mill resolve classInstance._` to see what's available.") - ) - 'wildcard - check( - "_.single", - Right(Seq( - _.classInstance.single, - _.nested.single - )) - ) - 'wildcardNeg - check( - "_._.single", - Left("Cannot resolve _._.single. Try `mill resolve _` to see what's available") - ) - 'wildcardNeg2 - check( - "_._.__", - Left("Cannot resolve _._.__. Try `mill resolve _` to see what's available") - ) - 'wildcardNeg3 - check( - "nested._.foobar", - Left("Cannot resolve nested._.foobar. Try `mill resolve nested._` to see what's available") - ) - 'wildcard2 - check( - "__.single", - Right(Seq( - _.single, - _.classInstance.single, - _.nested.single - )) - ) - - 'wildcard3 - check( - "_.__.single", - Right(Seq( - _.classInstance.single, - _.nested.single - )) - ) - - } - 'cross - { - 'single - { - val check = MainTests.check(singleCross) _ - 'pos1 - check("cross[210].suffix", Right(Seq(_.cross("210").suffix))) - 'pos2 - check("cross[211].suffix", Right(Seq(_.cross("211").suffix))) - 'neg1 - check( - "cross[210].doesntExist", - Left("Cannot resolve cross[210].doesntExist. Try `mill resolve cross[210]._` to see what's available.") - ) - 'neg2 - check( - "cross[doesntExist].doesntExist", - Left("Cannot resolve cross[doesntExist]. Try `mill resolve cross[__]` to see what's available.") - ) - 'neg3 - check( - "cross[221].doesntExist", - Left("Cannot resolve cross[221]. Did you mean cross[211]?") - ) - 'neg4 - check( - "cross[doesntExist].suffix", - Left("Cannot resolve cross[doesntExist]. Try `mill resolve cross[__]` to see what's available.") - ) - 'wildcard - check( - "cross[_].suffix", - Right(Seq( - _.cross("210").suffix, - _.cross("211").suffix, - _.cross("212").suffix - )) - ) - 'wildcard2 - check( - "cross[__].suffix", - Right(Seq( - _.cross("210").suffix, - _.cross("211").suffix, - _.cross("212").suffix - )) - ) - } - 'double - { - val check = MainTests.check(doubleCross) _ - 'pos1 - check( - "cross[210,jvm].suffix", - Right(Seq(_.cross("210", "jvm").suffix)) - ) - 'pos2 - check( - "cross[211,jvm].suffix", - Right(Seq(_.cross("211", "jvm").suffix)) - ) - 'wildcard - { - 'labelNeg - check( - "_.suffix", - Left("Cannot resolve _.suffix. Try `mill resolve _._` to see what's available.") - ) - 'labelPos - check( - "__.suffix", - Right(Seq( - _.cross("210", "jvm").suffix, - _.cross("210", "js").suffix, - - _.cross("211", "jvm").suffix, - _.cross("211", "js").suffix, - - _.cross("212", "jvm").suffix, - _.cross("212", "js").suffix, - _.cross("212", "native").suffix - )) - ) - 'first - check( - "cross[_,jvm].suffix", - Right(Seq( - _.cross("210", "jvm").suffix, - _.cross("211", "jvm").suffix, - _.cross("212", "jvm").suffix - )) - ) - 'second - check( - "cross[210,_].suffix", - Right(Seq( - _.cross("210", "jvm").suffix, - _.cross("210", "js").suffix - )) - ) - 'both - check( - "cross[_,_].suffix", - Right(Seq( - _.cross("210", "jvm").suffix, - _.cross("210", "js").suffix, - - _.cross("211", "jvm").suffix, - _.cross("211", "js").suffix, - - _.cross("212", "jvm").suffix, - _.cross("212", "js").suffix, - _.cross("212", "native").suffix - )) - ) - 'both2 - check( - "cross[__].suffix", - Right(Seq( - _.cross("210", "jvm").suffix, - _.cross("210", "js").suffix, - - _.cross("211", "jvm").suffix, - _.cross("211", "js").suffix, - - _.cross("212", "jvm").suffix, - _.cross("212", "js").suffix, - _.cross("212", "native").suffix - )) - ) - } - } - 'nested - { - val check = MainTests.check(nestedCrosses) _ - 'pos1 - check( - "cross[210].cross2[js].suffix", - Right(Seq(_.cross("210").cross2("js").suffix)) - ) - 'pos2 - check( - "cross[211].cross2[jvm].suffix", - Right(Seq(_.cross("211").cross2("jvm").suffix)) - ) - 'wildcard - { - 'first - check( - "cross[_].cross2[jvm].suffix", - Right(Seq( - _.cross("210").cross2("jvm").suffix, - _.cross("211").cross2("jvm").suffix, - _.cross("212").cross2("jvm").suffix - )) - ) - 'second - check( - "cross[210].cross2[_].suffix", - Right(Seq( - _.cross("210").cross2("jvm").suffix, - _.cross("210").cross2("js").suffix, - _.cross("210").cross2("native").suffix - )) - ) - 'both - check( - "cross[_].cross2[_].suffix", - Right(Seq( - _.cross("210").cross2("jvm").suffix, - _.cross("210").cross2("js").suffix, - _.cross("210").cross2("native").suffix, - - _.cross("211").cross2("jvm").suffix, - _.cross("211").cross2("js").suffix, - _.cross("211").cross2("native").suffix, - - _.cross("212").cross2("jvm").suffix, - _.cross("212").cross2("js").suffix, - _.cross("212").cross2("native").suffix - )) - ) - } - } - } - } -} diff --git a/main/test/src/mill/util/ParseArgsTest.scala b/main/test/src/mill/util/ParseArgsTest.scala deleted file mode 100644 index e31baf4f..00000000 --- a/main/test/src/mill/util/ParseArgsTest.scala +++ /dev/null @@ -1,254 +0,0 @@ -package mill.util - -import mill.define.{Segment, Segments} -import mill.define.Segment.{Cross, Label} -import utest._ - -object ParseArgsTest extends TestSuite { - - val tests = Tests { - 'extractSelsAndArgs - { - def check(input: Seq[String], - expectedSelectors: Seq[String], - expectedArgs: Seq[String], - multiSelect: Boolean) = { - val (selectors, args) = ParseArgs.extractSelsAndArgs(input, multiSelect) - - assert( - selectors == expectedSelectors, - args == expectedArgs - ) - } - - 'empty - check(input = Seq.empty, - expectedSelectors = Seq.empty, - expectedArgs = Seq.empty, - multiSelect = false) - 'singleSelector - check( - input = Seq("core.compile"), - expectedSelectors = Seq("core.compile"), - expectedArgs = Seq.empty, - multiSelect = false - ) - 'singleSelectorWithArgs - check( - input = Seq("application.run", "hello", "world"), - expectedSelectors = Seq("application.run"), - expectedArgs = Seq("hello", "world"), - multiSelect = false - ) - 'singleSelectorWithAllInArgs - check( - input = Seq("application.run", "hello", "world", "--all"), - expectedSelectors = Seq("application.run"), - expectedArgs = Seq("hello", "world", "--all"), - multiSelect = false - ) - 'multiSelectors - check( - input = Seq("core.jar", "core.docJar", "core.sourcesJar"), - expectedSelectors = Seq("core.jar", "core.docJar", "core.sourcesJar"), - expectedArgs = Seq.empty, - multiSelect = true - ) - 'multiSelectorsSeq - check( - input = Seq("core.jar", "core.docJar", "core.sourcesJar"), - expectedSelectors = Seq("core.jar", "core.docJar", "core.sourcesJar"), - expectedArgs = Seq.empty, - multiSelect = true - ) - 'multiSelectorsWithArgs - check( - input = Seq("core.compile", - "application.runMain", - "--", - "Main", - "hello", - "world"), - expectedSelectors = Seq("core.compile", "application.runMain"), - expectedArgs = Seq("Main", "hello", "world"), - multiSelect = true - ) - 'multiSelectorsWithArgsWithAllInArgs - check( - input = Seq("core.compile", - "application.runMain", - "--", - "Main", - "--all", - "world"), - expectedSelectors = Seq("core.compile", "application.runMain"), - expectedArgs = Seq("Main", "--all", "world"), - multiSelect = true - ) - } - 'expandBraces - { - def check(input: String, expectedExpansion: List[String]) = { - val Right(expanded) = ParseArgs.expandBraces(input) - - assert(expanded == expectedExpansion) - } - - 'expandLeft - check( - "{application,core}.compile", - List("application.compile", "core.compile") - ) - 'expandRight - check( - "application.{jar,docJar,sourcesJar}", - List("application.jar", "application.docJar", "application.sourcesJar") - ) - 'expandBoth - check( - "{core,application}.{jar,docJar}", - List( - "core.jar", - "core.docJar", - "application.jar", - "application.docJar" - ) - ) - 'expandNested - { - check("{hello,world.{cow,moo}}", - List("hello", "world.cow", "world.moo")) - check("{a,b{c,d}}", List("a", "bc", "bd")) - check("{a,b,{c,d}}", List("a", "b", "c", "d")) - check("{a,b{c,d{e,f}}}", List("a", "bc", "bde", "bdf")) - check("{a{b,c},d}", List("ab", "ac", "d")) - check("{a,{b,c}d}", List("a", "bd", "cd")) - check("{a{b,c},d{e,f}}", List("ab", "ac", "de", "df")) - check("{a,b{c,d},e{f,g}}", List("a", "bc", "bd", "ef", "eg")) - } - 'expandMixed - check( - "{a,b}.{c}.{}.e", - List("a.{c}.{}.e", "b.{c}.{}.e") - ) - 'malformed - { - val malformed = Seq("core.{compile", "core.{compile,test]") - - malformed.foreach { m => - val Left(error) = ParseArgs.expandBraces(m) - assert(error.contains("Parsing exception")) - } - } - 'dontExpand - { - check("core.compile", List("core.compile")) - check("{}.compile", List("{}.compile")) - check("{core}.compile", List("{core}.compile")) - } - 'keepUnknownSymbols - { - check("{a,b}.e<>", List("a.e<>", "b.e<>")) - check("a[99]&&", List("a[99]&&")) - check( - "{a,b}.<%%>.{c,d}", - List("a.<%%>.c", "a.<%%>.d", "b.<%%>.c", "b.<%%>.d") - ) - } - } - - 'apply - { - def check(input: Seq[String], - expectedSelectors: List[(Option[List[Segment]], List[Segment])], - expectedArgs: Seq[String], - multiSelect: Boolean) = { - val Right((selectors0, args)) = ParseArgs(input, multiSelect) - - val selectors = selectors0.map{ - case (Some(v1), v2) => (Some(v1.value), v2.value) - case (None, v2) => (None, v2.value) - } - assert( - selectors == expectedSelectors, - args == expectedArgs - ) - } - - 'rejectEmpty { - assert(ParseArgs(Seq.empty, multiSelect = false) == Left("Selector cannot be empty")) - } - 'singleSelector - check( - input = Seq("core.compile"), - expectedSelectors = List( - None -> List(Label("core"), Label("compile")) - ), - expectedArgs = Seq.empty, - multiSelect = false - ) - 'externalSelector - check( - input = Seq("foo.bar/core.compile"), - expectedSelectors = List( - Some(List(Label("foo"), Label("bar"))) -> List(Label("core"), Label("compile")) - ), - expectedArgs = Seq.empty, - multiSelect = false - ) - 'singleSelectorWithArgs - check( - input = Seq("application.run", "hello", "world"), - expectedSelectors = List( - None -> List(Label("application"), Label("run")) - ), - expectedArgs = Seq("hello", "world"), - multiSelect = false - ) - 'singleSelectorWithCross - check( - input = Seq("bridges[2.12.4,jvm].compile"), - expectedSelectors = List( - None -> List(Label("bridges"), Cross(Seq("2.12.4", "jvm")), Label("compile")) - ), - expectedArgs = Seq.empty, - multiSelect = false - ) - 'multiSelectorsBraceExpansion - check( - input = Seq("{core,application}.compile"), - expectedSelectors = List( - None -> List(Label("core"), Label("compile")), - None -> List(Label("application"), Label("compile")) - ), - expectedArgs = Seq.empty, - multiSelect = true - ) - 'multiSelectorsBraceExpansionWithArgs - check( - input = Seq("{core,application}.run", "--", "hello", "world"), - expectedSelectors = List( - None -> List(Label("core"), Label("run")), - None -> List(Label("application"), Label("run")) - ), - expectedArgs = Seq("hello", "world"), - multiSelect = true - ) - 'multiSelectorsBraceExpansionWithCross - check( - input = Seq("bridges[2.12.4,jvm].{test,jar}"), - expectedSelectors = List( - None -> List(Label("bridges"), Cross(Seq("2.12.4", "jvm")), Label("test")), - None -> List(Label("bridges"), Cross(Seq("2.12.4", "jvm")), Label("jar")) - ), - expectedArgs = Seq.empty, - multiSelect = true - ) - 'multiSelectorsBraceExpansionInsideCross - check( - input = Seq("bridges[{2.11.11,2.11.8,2.13.0-M3}].jar"), - expectedSelectors = List( - None -> List(Label("bridges"), Cross(Seq("2.11.11")), Label("jar")), - None -> List(Label("bridges"), Cross(Seq("2.11.8")), Label("jar")), - None -> List(Label("bridges"), Cross(Seq("2.13.0-M3")), Label("jar")) - ), - expectedArgs = Seq.empty, - multiSelect = true - ) - 'multiSelectorsBraceExpansionWithoutAll - { - val res = ParseArgs(Seq("{core,application}.compile"), multiSelect = false) - val expected = Right( - List( - None -> Segments(Label("core"), Label("compile")), - None -> Segments(Label("application"), Label("compile")) - ), - Nil - ) - assert(res == expected) - } - 'multiSelectorsWithoutAllAsSingle - check( - // this is how it works when we pass multiple tasks without --all flag - input = Seq("core.compile", "application.compile"), - expectedSelectors = List( - None -> List(Label("core"), Label("compile")) - ), - expectedArgs = Seq("application.compile"), - multiSelect = false - ) - } - } - -} diff --git a/main/test/src/mill/util/ScriptTestSuite.scala b/main/test/src/mill/util/ScriptTestSuite.scala deleted file mode 100644 index f448aaaf..00000000 --- a/main/test/src/mill/util/ScriptTestSuite.scala +++ /dev/null @@ -1,53 +0,0 @@ -package mill.util - -import java.io.{ByteArrayInputStream, ByteArrayOutputStream, PrintStream} - -import utest._ - -abstract class ScriptTestSuite(fork: Boolean) extends TestSuite{ - def workspaceSlug: String - def scriptSourcePath: os.Path - def buildPath: os.RelPath = "build.sc" - - val workspacePath = os.pwd / 'target / 'workspace / workspaceSlug - val wd = workspacePath / buildPath / os.up - val stdOutErr = System.out // new PrintStream(new ByteArrayOutputStream()) - val stdIn = new ByteArrayInputStream(Array()) - val disableTicker = false - val debugLog = false - lazy val runner = new mill.main.MainRunner( - ammonite.main.Cli.Config(wd = wd), disableTicker, - stdOutErr, stdOutErr, stdIn, None, Map.empty, - b => (), debugLog - ) - def eval(s: String*) = { - if (!fork) runner.runScript(workspacePath / buildPath , s.toList) - else{ - try { - os.proc(os.home / "mill-release", "-i", s).call( - wd, - stdin = os.Inherit, - stdout = os.Inherit, - stderr = os.Inherit, - ) - true - }catch{case e: Throwable => false} - } - } - def meta(s: String) = { - val (List(selector), args) = ParseArgs.apply(Seq(s), multiSelect = false).right.get - - os.read(wd / "out" / selector._2.value.flatMap(_.pathSegments) / "meta.json") - } - - - def initWorkspace() = { - os.remove.all(workspacePath) - os.makeDir.all(workspacePath / os.up) - // The unzipped git repo snapshots we get from github come with a - // wrapper-folder inside the zip file, so copy the wrapper folder to the - // destination instead of the folder containing the wrapper. - - os.copy(scriptSourcePath, workspacePath) - } -} diff --git a/main/test/src/mill/util/TestEvaluator.scala b/main/test/src/mill/util/TestEvaluator.scala deleted file mode 100644 index 9a235679..00000000 --- a/main/test/src/mill/util/TestEvaluator.scala +++ /dev/null @@ -1,80 +0,0 @@ -package mill.util - -import mill.define.{Input, Target, Task} -import mill.api.Result.OuterStack -import mill.eval.{Evaluator, Result} -import mill.util.Strict.Agg -import utest.assert -import utest.framework.TestPath - -import language.experimental.macros -object TestEvaluator{ - val externalOutPath = os.pwd / 'target / 'external - - - def static(module: TestUtil.BaseModule)(implicit fullName: sourcecode.FullName) = { - new TestEvaluator(module)(fullName, TestPath(Nil)) - } -} - -class TestEvaluator(module: TestUtil.BaseModule) - (implicit fullName: sourcecode.FullName, - tp: TestPath){ - val outPath = TestUtil.getOutPath() - -// val logger = DummyLogger - val logger = new PrintLogger( - colored = true, disableTicker=false, - ammonite.util.Colors.Default, System.out, System.out, System.err, System.in, debugEnabled = false - ) - val evaluator = new Evaluator(Ctx.defaultHome, outPath, TestEvaluator.externalOutPath, module, logger) - - def apply[T](t: Task[T]): Either[Result.Failing[T], (T, Int)] = { - val evaluated = evaluator.evaluate(Agg(t)) - - if (evaluated.failing.keyCount == 0) { - Right( - Tuple2( - evaluated.rawValues.head.asInstanceOf[Result.Success[T]].value, - evaluated.evaluated.collect { - case t: Target[_] - if module.millInternal.targets.contains(t) - && !t.isInstanceOf[Input[_]] - && !t.ctx.external => t - case t: mill.define.Command[_] => t - }.size - )) - } else { - Left( - evaluated.failing.lookupKey(evaluated.failing.keys().next).items.next() - .asInstanceOf[Result.Failing[T]] - ) - } - } - - def fail(target: Target[_], expectedFailCount: Int, expectedRawValues: Seq[Result[_]]) = { - - val res = evaluator.evaluate(Agg(target)) - - val cleaned = res.rawValues.map{ - case Result.Exception(ex, _) => Result.Exception(ex, new OuterStack(Nil)) - case x => x - } - - assert( - cleaned == expectedRawValues, - res.failing.keyCount == expectedFailCount - ) - - } - - def check(targets: Agg[Task[_]], expected: Agg[Task[_]]) = { - val evaluated = evaluator.evaluate(targets) - .evaluated - .flatMap(_.asTarget) - .filter(module.millInternal.targets.contains) - .filter(!_.isInstanceOf[Input[_]]) - assert(evaluated == expected) - } - -} diff --git a/main/test/src/mill/util/TestGraphs.scala b/main/test/src/mill/util/TestGraphs.scala deleted file mode 100644 index d3b35ddc..00000000 --- a/main/test/src/mill/util/TestGraphs.scala +++ /dev/null @@ -1,271 +0,0 @@ -package mill.util -import TestUtil.test -import mill.define.{Cross, Discover} -import mill.{Module, T} - -/** - * Example dependency graphs for us to use in our test suite. - * - * The graphs using `test()` live in the `class` and need to be instantiated - * every time you use them, because they are mutable (you can poke at the - * `test`'s `counter`/`failure`/`exception` fields to test various graph - * evaluation scenarios. - * - * The immutable graphs, used for testing discovery & target resolution, - * live in the companion object. - */ -class TestGraphs(){ - // single - object singleton extends TestUtil.BaseModule { - val single = test() - } - - object bactickIdentifiers extends TestUtil.BaseModule { - val `up-target` = test() - val `a-down-target` = test(`up-target`) - val `invisible&` = test() - object `nested-module` extends TestUtil.BaseModule { - val `nested-target` = test() - } - } - - // up---down - object pair extends TestUtil.BaseModule{ - val up = test() - val down = test(up) - } - - // up---o---down - object anonTriple extends TestUtil.BaseModule { - val up = test() - val down = test(test.anon(up)) - } - - // left - // / \ - // up down - // \ / - // right - object diamond extends TestUtil.BaseModule { - val up = test() - val left = test(up) - val right = test(up) - val down = test(left, right) - } - - // o - // / \ - // up down - // \ / - // o - object anonDiamond extends TestUtil.BaseModule { - val up = test() - val down = test(test.anon(up), test.anon(up)) - } - - object defCachedDiamond extends TestUtil.BaseModule { - def up = T{ test() } - def left = T{ test(up) } - def right = T{ test(up) } - def down = T{ test(left, right) } - } - - - object borkedCachedDiamond2 extends TestUtil.BaseModule { - def up = test() - def left = test(up) - def right = test(up) - def down = test(left, right) - } - - object borkedCachedDiamond3 extends TestUtil.BaseModule { - def up = test() - def left = test(up) - def right = test(up) - def down = test(left, right) - } - - // o g-----o - // \ \ \ - // o o h-----I---o - // \ / \ / \ / \ \ - // A---c--o E o-o \ \ - // / \ / \ / \ o---J - // o d o--o o / / - // \ / \ / / - // o o---F---o - // / / - // o--B o - object bigSingleTerminal extends TestUtil.BaseModule { - val a = test(test.anon(), test.anon()) - val b = test(test.anon()) - val e = { - val c = test.anon(a) - val d = test.anon(a) - test( - test.anon(test.anon(), test.anon(c)), - test.anon(test.anon(c, test.anon(d, b))) - ) - } - val f = test(test.anon(test.anon(), test.anon(e))) - - val i = { - val g = test.anon() - val h = test.anon(g, e) - test(test.anon(g), test.anon(test.anon(h))) - } - val j = test(test.anon(i), test.anon(i, f), test.anon(f)) - } - // _ left _ - // / \ - // task1 -------- right - // _/ - // change - task2 - object separateGroups extends TestUtil.BaseModule { - val task1 = T.task{ 1 } - def left = T{ task1() } - val change = test() - val task2 = T.task{ change() } - def right = T{ task1() + task2() + left() + 1 } - - } -} - - -object TestGraphs{ - // _ left _ - // / \ - // task -------- right - object triangleTask extends TestUtil.BaseModule { - val task = T.task{ 1 } - def left = T{ task() } - def right = T{ task() + left() + 1 } - } - - - // _ left - // / - // task -------- right - object multiTerminalGroup extends TestUtil.BaseModule { - val task = T.task{ 1 } - def left = T{ task() } - def right = T{ task() } - } - - // _ left _____________ - // / \ \ - // task1 -------- right ----- task2 - object multiTerminalBoundary extends TestUtil.BaseModule { - val task1 = T.task{ 1 } - def left = T{ task1() } - def right = T{ task1() + left() + 1 } - val task2 = T.task{ left() + right() } - } - - - trait CanNest extends Module{ - def single = T{ 1 } - def invisible: Any = T{ 2 } - def invisible2: mill.define.Task[Int] = T{ 3 } - def invisible3: mill.define.Task[_] = T{ 4 } - } - object nestedModule extends TestUtil.BaseModule { - def single = T{ 5 } - def invisible: Any = T{ 6 } - object nested extends Module{ - def single = T{ 7 } - def invisible: Any = T{ 8 } - - } - object classInstance extends CanNest - - } - - trait BaseModule extends Module { - def foo = T{ Seq("base") } - def cmd(i: Int) = T.command{ Seq("base" + i) } - } - - object canOverrideSuper extends TestUtil.BaseModule with BaseModule { - override def foo = T{ super.foo() ++ Seq("object") } - override def cmd(i: Int) = T.command{ super.cmd(i)() ++ Seq("object" + i) } - override lazy val millDiscover: Discover[this.type] = Discover[this.type] - } - - trait TraitWithModule extends Module{ outer => - object TraitModule extends Module{ - def testFrameworks = T{ Seq("mill.UTestFramework") } - def test() = T.command{ ()/*donothing*/ } - } - } - - - // Make sure nested objects inherited from traits work - object TraitWithModuleObject extends TestUtil.BaseModule with TraitWithModule{ - override lazy val millDiscover: Discover[this.type] = Discover[this.type] - } - - object nullTasks extends TestUtil.BaseModule{ - val nullString: String = null - def nullTask1 = T.task{ nullString } - def nullTask2 = T.task{ nullTask1() } - - def nullTarget1 = T{ nullString } - def nullTarget2 = T{ nullTarget1() } - def nullTarget3 = T{ nullTask1() } - def nullTarget4 = T{ nullTask2() } - - def nullCommand1() = T.command{ nullString } - def nullCommand2() = T.command{ nullTarget1() } - def nullCommand3() = T.command{ nullTask1() } - def nullCommand4() = T.command{ nullTask2() } - - override lazy val millDiscover: Discover[this.type] = Discover[this.type] - } - - object singleCross extends TestUtil.BaseModule { - object cross extends mill.Cross[Cross]("210", "211", "212") - class Cross(scalaVersion: String) extends Module{ - def suffix = T{ scalaVersion } - } - } - object crossResolved extends TestUtil.BaseModule { - trait MyModule extends Module{ - def crossVersion: String - implicit object resolver extends mill.define.Cross.Resolver[MyModule]{ - def resolve[V <: MyModule](c: Cross[V]): V = c.itemMap(List(crossVersion)) - } - } - - object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12") - class FooModule(val crossVersion: String) extends MyModule{ - def suffix = T{ crossVersion } - } - - object bar extends mill.Cross[BarModule]("2.10", "2.11", "2.12") - class BarModule(val crossVersion: String) extends MyModule{ - def longSuffix = T{ "_" + foo().suffix() } - } - } - object doubleCross extends TestUtil.BaseModule { - val crossMatrix = for{ - scalaVersion <- Seq("210", "211", "212") - platform <- Seq("jvm", "js", "native") - if !(platform == "native" && scalaVersion != "212") - } yield (scalaVersion, platform) - object cross extends mill.Cross[Cross](crossMatrix:_*) - class Cross(scalaVersion: String, platform: String) extends Module{ - def suffix = T{ scalaVersion + "_" + platform } - } - } - - object nestedCrosses extends TestUtil.BaseModule { - object cross extends mill.Cross[Cross]("210", "211", "212") - class Cross(scalaVersion: String) extends mill.Module{ - object cross2 extends mill.Cross[Cross]("jvm", "js", "native") - class Cross(platform: String) extends mill.Module{ - def suffix = T{ scalaVersion + "_" + platform } - } - } - } -} diff --git a/main/test/src/mill/util/TestUtil.scala b/main/test/src/mill/util/TestUtil.scala deleted file mode 100644 index baab2992..00000000 --- a/main/test/src/mill/util/TestUtil.scala +++ /dev/null @@ -1,86 +0,0 @@ -package mill.util - -import mill.util.Router.Overrides -import mill.define._ -import mill.api.Result -import mill.api.Result.OuterStack -import utest.assert -import mill.util.Strict.Agg -import utest.framework.TestPath - -import scala.collection.mutable - -object TestUtil { - def getOutPath()(implicit fullName: sourcecode.FullName, - tp: TestPath) = { - os.pwd / 'target / 'workspace / (fullName.value.split('.') ++ tp.value) - } - def getOutPathStatic()(implicit fullName: sourcecode.FullName) = { - os.pwd / 'target / 'workspace / fullName.value.split('.') - } - - def getSrcPathStatic()(implicit fullName: sourcecode.FullName) = { - os.pwd / 'target / 'worksources / fullName.value.split('.') - } - def getSrcPathBase() = { - os.pwd / 'target / 'worksources - } - - class BaseModule(implicit millModuleEnclosing0: sourcecode.Enclosing, - millModuleLine0: sourcecode.Line, - millName0: sourcecode.Name, - overrides: Overrides) - extends mill.define.BaseModule(getSrcPathBase() / millModuleEnclosing0.value.split("\\.| |#"))( - implicitly, implicitly, implicitly, implicitly, implicitly){ - lazy val millDiscover: Discover[this.type] = Discover[this.type] - } - - object test{ - - def anon(inputs: Task[Int]*) = new Test(inputs) - def apply(inputs: Task[Int]*) - (implicit ctx: mill.define.Ctx)= { - new TestTarget(inputs, pure = inputs.nonEmpty) - } - } - - class Test(val inputs: Seq[Task[Int]]) extends Task[Int]{ - var counter = 0 - var failure = Option.empty[String] - var exception = Option.empty[Throwable] - override def evaluate(args: Ctx) = { - failure.map(Result.Failure(_)) orElse - exception.map(Result.Exception(_, new OuterStack(Nil))) getOrElse - Result.Success(counter + args.args.map(_.asInstanceOf[Int]).sum) - } - override def sideHash = counter + failure.hashCode() + exception.hashCode() - } - /** - * A dummy target that takes any number of inputs, and whose output can be - * controlled externally, so you can construct arbitrary dataflow graphs and - * test how changes propagate. - */ - class TestTarget(inputs: Seq[Task[Int]], - val pure: Boolean) - (implicit ctx0: mill.define.Ctx) - extends Test(inputs) with Target[Int]{ - val ctx = ctx0.copy(segments = ctx0.segments ++ Seq(ctx0.segment)) - val readWrite = upickle.default.readwriter[Int] - - - } - def checkTopological(targets: Agg[Task[_]]) = { - val seen = mutable.Set.empty[Task[_]] - for(t <- targets.indexed.reverseIterator){ - seen.add(t) - for(upstream <- t.inputs){ - assert(!seen(upstream)) - } - } - } - def disableInJava9OrAbove(f: => Any): Unit = { - if (!ammonite.util.Util.java9OrAbove) { - f - } - } -} diff --git a/main/test/src/util/ParseArgsTest.scala b/main/test/src/util/ParseArgsTest.scala new file mode 100644 index 00000000..e31baf4f --- /dev/null +++ b/main/test/src/util/ParseArgsTest.scala @@ -0,0 +1,254 @@ +package mill.util + +import mill.define.{Segment, Segments} +import mill.define.Segment.{Cross, Label} +import utest._ + +object ParseArgsTest extends TestSuite { + + val tests = Tests { + 'extractSelsAndArgs - { + def check(input: Seq[String], + expectedSelectors: Seq[String], + expectedArgs: Seq[String], + multiSelect: Boolean) = { + val (selectors, args) = ParseArgs.extractSelsAndArgs(input, multiSelect) + + assert( + selectors == expectedSelectors, + args == expectedArgs + ) + } + + 'empty - check(input = Seq.empty, + expectedSelectors = Seq.empty, + expectedArgs = Seq.empty, + multiSelect = false) + 'singleSelector - check( + input = Seq("core.compile"), + expectedSelectors = Seq("core.compile"), + expectedArgs = Seq.empty, + multiSelect = false + ) + 'singleSelectorWithArgs - check( + input = Seq("application.run", "hello", "world"), + expectedSelectors = Seq("application.run"), + expectedArgs = Seq("hello", "world"), + multiSelect = false + ) + 'singleSelectorWithAllInArgs - check( + input = Seq("application.run", "hello", "world", "--all"), + expectedSelectors = Seq("application.run"), + expectedArgs = Seq("hello", "world", "--all"), + multiSelect = false + ) + 'multiSelectors - check( + input = Seq("core.jar", "core.docJar", "core.sourcesJar"), + expectedSelectors = Seq("core.jar", "core.docJar", "core.sourcesJar"), + expectedArgs = Seq.empty, + multiSelect = true + ) + 'multiSelectorsSeq - check( + input = Seq("core.jar", "core.docJar", "core.sourcesJar"), + expectedSelectors = Seq("core.jar", "core.docJar", "core.sourcesJar"), + expectedArgs = Seq.empty, + multiSelect = true + ) + 'multiSelectorsWithArgs - check( + input = Seq("core.compile", + "application.runMain", + "--", + "Main", + "hello", + "world"), + expectedSelectors = Seq("core.compile", "application.runMain"), + expectedArgs = Seq("Main", "hello", "world"), + multiSelect = true + ) + 'multiSelectorsWithArgsWithAllInArgs - check( + input = Seq("core.compile", + "application.runMain", + "--", + "Main", + "--all", + "world"), + expectedSelectors = Seq("core.compile", "application.runMain"), + expectedArgs = Seq("Main", "--all", "world"), + multiSelect = true + ) + } + 'expandBraces - { + def check(input: String, expectedExpansion: List[String]) = { + val Right(expanded) = ParseArgs.expandBraces(input) + + assert(expanded == expectedExpansion) + } + + 'expandLeft - check( + "{application,core}.compile", + List("application.compile", "core.compile") + ) + 'expandRight - check( + "application.{jar,docJar,sourcesJar}", + List("application.jar", "application.docJar", "application.sourcesJar") + ) + 'expandBoth - check( + "{core,application}.{jar,docJar}", + List( + "core.jar", + "core.docJar", + "application.jar", + "application.docJar" + ) + ) + 'expandNested - { + check("{hello,world.{cow,moo}}", + List("hello", "world.cow", "world.moo")) + check("{a,b{c,d}}", List("a", "bc", "bd")) + check("{a,b,{c,d}}", List("a", "b", "c", "d")) + check("{a,b{c,d{e,f}}}", List("a", "bc", "bde", "bdf")) + check("{a{b,c},d}", List("ab", "ac", "d")) + check("{a,{b,c}d}", List("a", "bd", "cd")) + check("{a{b,c},d{e,f}}", List("ab", "ac", "de", "df")) + check("{a,b{c,d},e{f,g}}", List("a", "bc", "bd", "ef", "eg")) + } + 'expandMixed - check( + "{a,b}.{c}.{}.e", + List("a.{c}.{}.e", "b.{c}.{}.e") + ) + 'malformed - { + val malformed = Seq("core.{compile", "core.{compile,test]") + + malformed.foreach { m => + val Left(error) = ParseArgs.expandBraces(m) + assert(error.contains("Parsing exception")) + } + } + 'dontExpand - { + check("core.compile", List("core.compile")) + check("{}.compile", List("{}.compile")) + check("{core}.compile", List("{core}.compile")) + } + 'keepUnknownSymbols - { + check("{a,b}.e<>", List("a.e<>", "b.e<>")) + check("a[99]&&", List("a[99]&&")) + check( + "{a,b}.<%%>.{c,d}", + List("a.<%%>.c", "a.<%%>.d", "b.<%%>.c", "b.<%%>.d") + ) + } + } + + 'apply - { + def check(input: Seq[String], + expectedSelectors: List[(Option[List[Segment]], List[Segment])], + expectedArgs: Seq[String], + multiSelect: Boolean) = { + val Right((selectors0, args)) = ParseArgs(input, multiSelect) + + val selectors = selectors0.map{ + case (Some(v1), v2) => (Some(v1.value), v2.value) + case (None, v2) => (None, v2.value) + } + assert( + selectors == expectedSelectors, + args == expectedArgs + ) + } + + 'rejectEmpty { + assert(ParseArgs(Seq.empty, multiSelect = false) == Left("Selector cannot be empty")) + } + 'singleSelector - check( + input = Seq("core.compile"), + expectedSelectors = List( + None -> List(Label("core"), Label("compile")) + ), + expectedArgs = Seq.empty, + multiSelect = false + ) + 'externalSelector - check( + input = Seq("foo.bar/core.compile"), + expectedSelectors = List( + Some(List(Label("foo"), Label("bar"))) -> List(Label("core"), Label("compile")) + ), + expectedArgs = Seq.empty, + multiSelect = false + ) + 'singleSelectorWithArgs - check( + input = Seq("application.run", "hello", "world"), + expectedSelectors = List( + None -> List(Label("application"), Label("run")) + ), + expectedArgs = Seq("hello", "world"), + multiSelect = false + ) + 'singleSelectorWithCross - check( + input = Seq("bridges[2.12.4,jvm].compile"), + expectedSelectors = List( + None -> List(Label("bridges"), Cross(Seq("2.12.4", "jvm")), Label("compile")) + ), + expectedArgs = Seq.empty, + multiSelect = false + ) + 'multiSelectorsBraceExpansion - check( + input = Seq("{core,application}.compile"), + expectedSelectors = List( + None -> List(Label("core"), Label("compile")), + None -> List(Label("application"), Label("compile")) + ), + expectedArgs = Seq.empty, + multiSelect = true + ) + 'multiSelectorsBraceExpansionWithArgs - check( + input = Seq("{core,application}.run", "--", "hello", "world"), + expectedSelectors = List( + None -> List(Label("core"), Label("run")), + None -> List(Label("application"), Label("run")) + ), + expectedArgs = Seq("hello", "world"), + multiSelect = true + ) + 'multiSelectorsBraceExpansionWithCross - check( + input = Seq("bridges[2.12.4,jvm].{test,jar}"), + expectedSelectors = List( + None -> List(Label("bridges"), Cross(Seq("2.12.4", "jvm")), Label("test")), + None -> List(Label("bridges"), Cross(Seq("2.12.4", "jvm")), Label("jar")) + ), + expectedArgs = Seq.empty, + multiSelect = true + ) + 'multiSelectorsBraceExpansionInsideCross - check( + input = Seq("bridges[{2.11.11,2.11.8,2.13.0-M3}].jar"), + expectedSelectors = List( + None -> List(Label("bridges"), Cross(Seq("2.11.11")), Label("jar")), + None -> List(Label("bridges"), Cross(Seq("2.11.8")), Label("jar")), + None -> List(Label("bridges"), Cross(Seq("2.13.0-M3")), Label("jar")) + ), + expectedArgs = Seq.empty, + multiSelect = true + ) + 'multiSelectorsBraceExpansionWithoutAll - { + val res = ParseArgs(Seq("{core,application}.compile"), multiSelect = false) + val expected = Right( + List( + None -> Segments(Label("core"), Label("compile")), + None -> Segments(Label("application"), Label("compile")) + ), + Nil + ) + assert(res == expected) + } + 'multiSelectorsWithoutAllAsSingle - check( + // this is how it works when we pass multiple tasks without --all flag + input = Seq("core.compile", "application.compile"), + expectedSelectors = List( + None -> List(Label("core"), Label("compile")) + ), + expectedArgs = Seq("application.compile"), + multiSelect = false + ) + } + } + +} diff --git a/main/test/src/util/ScriptTestSuite.scala b/main/test/src/util/ScriptTestSuite.scala new file mode 100644 index 00000000..f448aaaf --- /dev/null +++ b/main/test/src/util/ScriptTestSuite.scala @@ -0,0 +1,53 @@ +package mill.util + +import java.io.{ByteArrayInputStream, ByteArrayOutputStream, PrintStream} + +import utest._ + +abstract class ScriptTestSuite(fork: Boolean) extends TestSuite{ + def workspaceSlug: String + def scriptSourcePath: os.Path + def buildPath: os.RelPath = "build.sc" + + val workspacePath = os.pwd / 'target / 'workspace / workspaceSlug + val wd = workspacePath / buildPath / os.up + val stdOutErr = System.out // new PrintStream(new ByteArrayOutputStream()) + val stdIn = new ByteArrayInputStream(Array()) + val disableTicker = false + val debugLog = false + lazy val runner = new mill.main.MainRunner( + ammonite.main.Cli.Config(wd = wd), disableTicker, + stdOutErr, stdOutErr, stdIn, None, Map.empty, + b => (), debugLog + ) + def eval(s: String*) = { + if (!fork) runner.runScript(workspacePath / buildPath , s.toList) + else{ + try { + os.proc(os.home / "mill-release", "-i", s).call( + wd, + stdin = os.Inherit, + stdout = os.Inherit, + stderr = os.Inherit, + ) + true + }catch{case e: Throwable => false} + } + } + def meta(s: String) = { + val (List(selector), args) = ParseArgs.apply(Seq(s), multiSelect = false).right.get + + os.read(wd / "out" / selector._2.value.flatMap(_.pathSegments) / "meta.json") + } + + + def initWorkspace() = { + os.remove.all(workspacePath) + os.makeDir.all(workspacePath / os.up) + // The unzipped git repo snapshots we get from github come with a + // wrapper-folder inside the zip file, so copy the wrapper folder to the + // destination instead of the folder containing the wrapper. + + os.copy(scriptSourcePath, workspacePath) + } +} diff --git a/main/test/src/util/TestEvaluator.scala b/main/test/src/util/TestEvaluator.scala new file mode 100644 index 00000000..9a235679 --- /dev/null +++ b/main/test/src/util/TestEvaluator.scala @@ -0,0 +1,80 @@ +package mill.util + +import mill.define.{Input, Target, Task} +import mill.api.Result.OuterStack +import mill.eval.{Evaluator, Result} +import mill.util.Strict.Agg +import utest.assert +import utest.framework.TestPath + +import language.experimental.macros +object TestEvaluator{ + val externalOutPath = os.pwd / 'target / 'external + + + def static(module: TestUtil.BaseModule)(implicit fullName: sourcecode.FullName) = { + new TestEvaluator(module)(fullName, TestPath(Nil)) + } +} + +class TestEvaluator(module: TestUtil.BaseModule) + (implicit fullName: sourcecode.FullName, + tp: TestPath){ + val outPath = TestUtil.getOutPath() + +// val logger = DummyLogger + val logger = new PrintLogger( + colored = true, disableTicker=false, + ammonite.util.Colors.Default, System.out, System.out, System.err, System.in, debugEnabled = false + ) + val evaluator = new Evaluator(Ctx.defaultHome, outPath, TestEvaluator.externalOutPath, module, logger) + + def apply[T](t: Task[T]): Either[Result.Failing[T], (T, Int)] = { + val evaluated = evaluator.evaluate(Agg(t)) + + if (evaluated.failing.keyCount == 0) { + Right( + Tuple2( + evaluated.rawValues.head.asInstanceOf[Result.Success[T]].value, + evaluated.evaluated.collect { + case t: Target[_] + if module.millInternal.targets.contains(t) + && !t.isInstanceOf[Input[_]] + && !t.ctx.external => t + case t: mill.define.Command[_] => t + }.size + )) + } else { + Left( + evaluated.failing.lookupKey(evaluated.failing.keys().next).items.next() + .asInstanceOf[Result.Failing[T]] + ) + } + } + + def fail(target: Target[_], expectedFailCount: Int, expectedRawValues: Seq[Result[_]]) = { + + val res = evaluator.evaluate(Agg(target)) + + val cleaned = res.rawValues.map{ + case Result.Exception(ex, _) => Result.Exception(ex, new OuterStack(Nil)) + case x => x + } + + assert( + cleaned == expectedRawValues, + res.failing.keyCount == expectedFailCount + ) + + } + + def check(targets: Agg[Task[_]], expected: Agg[Task[_]]) = { + val evaluated = evaluator.evaluate(targets) + .evaluated + .flatMap(_.asTarget) + .filter(module.millInternal.targets.contains) + .filter(!_.isInstanceOf[Input[_]]) + assert(evaluated == expected) + } + +} diff --git a/main/test/src/util/TestGraphs.scala b/main/test/src/util/TestGraphs.scala new file mode 100644 index 00000000..d3b35ddc --- /dev/null +++ b/main/test/src/util/TestGraphs.scala @@ -0,0 +1,271 @@ +package mill.util +import TestUtil.test +import mill.define.{Cross, Discover} +import mill.{Module, T} + +/** + * Example dependency graphs for us to use in our test suite. + * + * The graphs using `test()` live in the `class` and need to be instantiated + * every time you use them, because they are mutable (you can poke at the + * `test`'s `counter`/`failure`/`exception` fields to test various graph + * evaluation scenarios. + * + * The immutable graphs, used for testing discovery & target resolution, + * live in the companion object. + */ +class TestGraphs(){ + // single + object singleton extends TestUtil.BaseModule { + val single = test() + } + + object bactickIdentifiers extends TestUtil.BaseModule { + val `up-target` = test() + val `a-down-target` = test(`up-target`) + val `invisible&` = test() + object `nested-module` extends TestUtil.BaseModule { + val `nested-target` = test() + } + } + + // up---down + object pair extends TestUtil.BaseModule{ + val up = test() + val down = test(up) + } + + // up---o---down + object anonTriple extends TestUtil.BaseModule { + val up = test() + val down = test(test.anon(up)) + } + + // left + // / \ + // up down + // \ / + // right + object diamond extends TestUtil.BaseModule { + val up = test() + val left = test(up) + val right = test(up) + val down = test(left, right) + } + + // o + // / \ + // up down + // \ / + // o + object anonDiamond extends TestUtil.BaseModule { + val up = test() + val down = test(test.anon(up), test.anon(up)) + } + + object defCachedDiamond extends TestUtil.BaseModule { + def up = T{ test() } + def left = T{ test(up) } + def right = T{ test(up) } + def down = T{ test(left, right) } + } + + + object borkedCachedDiamond2 extends TestUtil.BaseModule { + def up = test() + def left = test(up) + def right = test(up) + def down = test(left, right) + } + + object borkedCachedDiamond3 extends TestUtil.BaseModule { + def up = test() + def left = test(up) + def right = test(up) + def down = test(left, right) + } + + // o g-----o + // \ \ \ + // o o h-----I---o + // \ / \ / \ / \ \ + // A---c--o E o-o \ \ + // / \ / \ / \ o---J + // o d o--o o / / + // \ / \ / / + // o o---F---o + // / / + // o--B o + object bigSingleTerminal extends TestUtil.BaseModule { + val a = test(test.anon(), test.anon()) + val b = test(test.anon()) + val e = { + val c = test.anon(a) + val d = test.anon(a) + test( + test.anon(test.anon(), test.anon(c)), + test.anon(test.anon(c, test.anon(d, b))) + ) + } + val f = test(test.anon(test.anon(), test.anon(e))) + + val i = { + val g = test.anon() + val h = test.anon(g, e) + test(test.anon(g), test.anon(test.anon(h))) + } + val j = test(test.anon(i), test.anon(i, f), test.anon(f)) + } + // _ left _ + // / \ + // task1 -------- right + // _/ + // change - task2 + object separateGroups extends TestUtil.BaseModule { + val task1 = T.task{ 1 } + def left = T{ task1() } + val change = test() + val task2 = T.task{ change() } + def right = T{ task1() + task2() + left() + 1 } + + } +} + + +object TestGraphs{ + // _ left _ + // / \ + // task -------- right + object triangleTask extends TestUtil.BaseModule { + val task = T.task{ 1 } + def left = T{ task() } + def right = T{ task() + left() + 1 } + } + + + // _ left + // / + // task -------- right + object multiTerminalGroup extends TestUtil.BaseModule { + val task = T.task{ 1 } + def left = T{ task() } + def right = T{ task() } + } + + // _ left _____________ + // / \ \ + // task1 -------- right ----- task2 + object multiTerminalBoundary extends TestUtil.BaseModule { + val task1 = T.task{ 1 } + def left = T{ task1() } + def right = T{ task1() + left() + 1 } + val task2 = T.task{ left() + right() } + } + + + trait CanNest extends Module{ + def single = T{ 1 } + def invisible: Any = T{ 2 } + def invisible2: mill.define.Task[Int] = T{ 3 } + def invisible3: mill.define.Task[_] = T{ 4 } + } + object nestedModule extends TestUtil.BaseModule { + def single = T{ 5 } + def invisible: Any = T{ 6 } + object nested extends Module{ + def single = T{ 7 } + def invisible: Any = T{ 8 } + + } + object classInstance extends CanNest + + } + + trait BaseModule extends Module { + def foo = T{ Seq("base") } + def cmd(i: Int) = T.command{ Seq("base" + i) } + } + + object canOverrideSuper extends TestUtil.BaseModule with BaseModule { + override def foo = T{ super.foo() ++ Seq("object") } + override def cmd(i: Int) = T.command{ super.cmd(i)() ++ Seq("object" + i) } + override lazy val millDiscover: Discover[this.type] = Discover[this.type] + } + + trait TraitWithModule extends Module{ outer => + object TraitModule extends Module{ + def testFrameworks = T{ Seq("mill.UTestFramework") } + def test() = T.command{ ()/*donothing*/ } + } + } + + + // Make sure nested objects inherited from traits work + object TraitWithModuleObject extends TestUtil.BaseModule with TraitWithModule{ + override lazy val millDiscover: Discover[this.type] = Discover[this.type] + } + + object nullTasks extends TestUtil.BaseModule{ + val nullString: String = null + def nullTask1 = T.task{ nullString } + def nullTask2 = T.task{ nullTask1() } + + def nullTarget1 = T{ nullString } + def nullTarget2 = T{ nullTarget1() } + def nullTarget3 = T{ nullTask1() } + def nullTarget4 = T{ nullTask2() } + + def nullCommand1() = T.command{ nullString } + def nullCommand2() = T.command{ nullTarget1() } + def nullCommand3() = T.command{ nullTask1() } + def nullCommand4() = T.command{ nullTask2() } + + override lazy val millDiscover: Discover[this.type] = Discover[this.type] + } + + object singleCross extends TestUtil.BaseModule { + object cross extends mill.Cross[Cross]("210", "211", "212") + class Cross(scalaVersion: String) extends Module{ + def suffix = T{ scalaVersion } + } + } + object crossResolved extends TestUtil.BaseModule { + trait MyModule extends Module{ + def crossVersion: String + implicit object resolver extends mill.define.Cross.Resolver[MyModule]{ + def resolve[V <: MyModule](c: Cross[V]): V = c.itemMap(List(crossVersion)) + } + } + + object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12") + class FooModule(val crossVersion: String) extends MyModule{ + def suffix = T{ crossVersion } + } + + object bar extends mill.Cross[BarModule]("2.10", "2.11", "2.12") + class BarModule(val crossVersion: String) extends MyModule{ + def longSuffix = T{ "_" + foo().suffix() } + } + } + object doubleCross extends TestUtil.BaseModule { + val crossMatrix = for{ + scalaVersion <- Seq("210", "211", "212") + platform <- Seq("jvm", "js", "native") + if !(platform == "native" && scalaVersion != "212") + } yield (scalaVersion, platform) + object cross extends mill.Cross[Cross](crossMatrix:_*) + class Cross(scalaVersion: String, platform: String) extends Module{ + def suffix = T{ scalaVersion + "_" + platform } + } + } + + object nestedCrosses extends TestUtil.BaseModule { + object cross extends mill.Cross[Cross]("210", "211", "212") + class Cross(scalaVersion: String) extends mill.Module{ + object cross2 extends mill.Cross[Cross]("jvm", "js", "native") + class Cross(platform: String) extends mill.Module{ + def suffix = T{ scalaVersion + "_" + platform } + } + } + } +} diff --git a/main/test/src/util/TestUtil.scala b/main/test/src/util/TestUtil.scala new file mode 100644 index 00000000..baab2992 --- /dev/null +++ b/main/test/src/util/TestUtil.scala @@ -0,0 +1,86 @@ +package mill.util + +import mill.util.Router.Overrides +import mill.define._ +import mill.api.Result +import mill.api.Result.OuterStack +import utest.assert +import mill.util.Strict.Agg +import utest.framework.TestPath + +import scala.collection.mutable + +object TestUtil { + def getOutPath()(implicit fullName: sourcecode.FullName, + tp: TestPath) = { + os.pwd / 'target / 'workspace / (fullName.value.split('.') ++ tp.value) + } + def getOutPathStatic()(implicit fullName: sourcecode.FullName) = { + os.pwd / 'target / 'workspace / fullName.value.split('.') + } + + def getSrcPathStatic()(implicit fullName: sourcecode.FullName) = { + os.pwd / 'target / 'worksources / fullName.value.split('.') + } + def getSrcPathBase() = { + os.pwd / 'target / 'worksources + } + + class BaseModule(implicit millModuleEnclosing0: sourcecode.Enclosing, + millModuleLine0: sourcecode.Line, + millName0: sourcecode.Name, + overrides: Overrides) + extends mill.define.BaseModule(getSrcPathBase() / millModuleEnclosing0.value.split("\\.| |#"))( + implicitly, implicitly, implicitly, implicitly, implicitly){ + lazy val millDiscover: Discover[this.type] = Discover[this.type] + } + + object test{ + + def anon(inputs: Task[Int]*) = new Test(inputs) + def apply(inputs: Task[Int]*) + (implicit ctx: mill.define.Ctx)= { + new TestTarget(inputs, pure = inputs.nonEmpty) + } + } + + class Test(val inputs: Seq[Task[Int]]) extends Task[Int]{ + var counter = 0 + var failure = Option.empty[String] + var exception = Option.empty[Throwable] + override def evaluate(args: Ctx) = { + failure.map(Result.Failure(_)) orElse + exception.map(Result.Exception(_, new OuterStack(Nil))) getOrElse + Result.Success(counter + args.args.map(_.asInstanceOf[Int]).sum) + } + override def sideHash = counter + failure.hashCode() + exception.hashCode() + } + /** + * A dummy target that takes any number of inputs, and whose output can be + * controlled externally, so you can construct arbitrary dataflow graphs and + * test how changes propagate. + */ + class TestTarget(inputs: Seq[Task[Int]], + val pure: Boolean) + (implicit ctx0: mill.define.Ctx) + extends Test(inputs) with Target[Int]{ + val ctx = ctx0.copy(segments = ctx0.segments ++ Seq(ctx0.segment)) + val readWrite = upickle.default.readwriter[Int] + + + } + def checkTopological(targets: Agg[Task[_]]) = { + val seen = mutable.Set.empty[Task[_]] + for(t <- targets.indexed.reverseIterator){ + seen.add(t) + for(upstream <- t.inputs){ + assert(!seen(upstream)) + } + } + } + def disableInJava9OrAbove(f: => Any): Unit = { + if (!ammonite.util.Util.java9OrAbove) { + f + } + } +} diff --git a/scalajslib/api/src/ScalaJSWorkerApi.scala b/scalajslib/api/src/ScalaJSWorkerApi.scala new file mode 100644 index 00000000..4e44a8df --- /dev/null +++ b/scalajslib/api/src/ScalaJSWorkerApi.scala @@ -0,0 +1,41 @@ +package mill.scalajslib.api +import java.io.File +import mill.api.Result +trait ScalaJSWorkerApi { + def link(sources: Array[File], + libraries: Array[File], + dest: File, + main: String, + fullOpt: Boolean, + moduleKind: ModuleKind): Result[File] + + def run(config: NodeJSConfig, linkedFile: File): Unit + + def getFramework(config: NodeJSConfig, + frameworkName: String, + linkedFile: File): (() => Unit, sbt.testing.Framework) + +} + + +sealed trait OptimizeMode + +object FastOpt extends OptimizeMode +object FullOpt extends OptimizeMode + +sealed trait ModuleKind +object ModuleKind{ + object NoModule extends ModuleKind + object CommonJSModule extends ModuleKind +} + + +object NodeJSConfig { + import upickle.default.{ReadWriter => RW, macroRW} + implicit def rw: RW[NodeJSConfig] = macroRW +} + +final case class NodeJSConfig(executable: String = "node", + args: List[String] = Nil, + env: Map[String, String] = Map.empty, + sourceMap: Boolean = true) diff --git a/scalajslib/api/src/mill/scalajslib/api/ScalaJSWorkerApi.scala b/scalajslib/api/src/mill/scalajslib/api/ScalaJSWorkerApi.scala deleted file mode 100644 index 4e44a8df..00000000 --- a/scalajslib/api/src/mill/scalajslib/api/ScalaJSWorkerApi.scala +++ /dev/null @@ -1,41 +0,0 @@ -package mill.scalajslib.api -import java.io.File -import mill.api.Result -trait ScalaJSWorkerApi { - def link(sources: Array[File], - libraries: Array[File], - dest: File, - main: String, - fullOpt: Boolean, - moduleKind: ModuleKind): Result[File] - - def run(config: NodeJSConfig, linkedFile: File): Unit - - def getFramework(config: NodeJSConfig, - frameworkName: String, - linkedFile: File): (() => Unit, sbt.testing.Framework) - -} - - -sealed trait OptimizeMode - -object FastOpt extends OptimizeMode -object FullOpt extends OptimizeMode - -sealed trait ModuleKind -object ModuleKind{ - object NoModule extends ModuleKind - object CommonJSModule extends ModuleKind -} - - -object NodeJSConfig { - import upickle.default.{ReadWriter => RW, macroRW} - implicit def rw: RW[NodeJSConfig] = macroRW -} - -final case class NodeJSConfig(executable: String = "node", - args: List[String] = Nil, - env: Map[String, String] = Map.empty, - sourceMap: Boolean = true) diff --git a/scalajslib/src/ScalaJSModule.scala b/scalajslib/src/ScalaJSModule.scala new file mode 100644 index 00000000..8568c39b --- /dev/null +++ b/scalajslib/src/ScalaJSModule.scala @@ -0,0 +1,200 @@ +package mill +package scalajslib + +import coursier.Cache +import coursier.maven.MavenRepository +import mill.eval.{PathRef, Result} +import mill.api.Result.Success +import mill.scalalib.Lib.resolveDependencies +import mill.scalalib.{DepSyntax, Lib, TestModule, TestRunner} +import mill.util.{Ctx, Loose} +import mill.scalajslib.api._ +trait ScalaJSModule extends scalalib.ScalaModule { outer => + + def scalaJSVersion: T[String] + + trait Tests extends TestScalaJSModule { + override def zincWorker = outer.zincWorker + override def scalaOrganization = outer.scalaOrganization() + override def scalaVersion = outer.scalaVersion() + override def scalaJSVersion = outer.scalaJSVersion() + override def moduleDeps = Seq(outer) + } + + def scalaJSBinaryVersion = T { mill.scalalib.api.Util.scalaBinaryVersion(scalaJSVersion()) } + + def scalaJSWorkerVersion = T{ scalaJSVersion().split('.').dropRight(1).mkString(".") } + + def scalaJSWorkerClasspath = T { + val workerKey = "MILL_SCALAJS_WORKER_" + scalaJSWorkerVersion().replace('.', '_') + mill.modules.Util.millProjectModule( + workerKey, + s"mill-scalajslib-worker-${scalaJSWorkerVersion()}", + repositories, + resolveFilter = _.toString.contains("mill-scalajslib-worker") + ) + } + + def scalaJSLinkerClasspath: T[Loose.Agg[PathRef]] = T{ + val commonDeps = Seq( + ivy"org.scala-js::scalajs-tools:${scalaJSVersion()}", + ivy"org.scala-js::scalajs-sbt-test-adapter:${scalaJSVersion()}" + ) + val envDep = scalaJSBinaryVersion() match { + case v if v.startsWith("0.6") => ivy"org.scala-js::scalajs-js-envs:${scalaJSVersion()}" + case v if v.startsWith("1.0") => ivy"org.scala-js::scalajs-env-nodejs:${scalaJSVersion()}" + } + resolveDependencies( + repositories, + Lib.depToDependency(_, "2.12.4", ""), + commonDeps :+ envDep + ) + } + + def toolsClasspath = T { scalaJSWorkerClasspath() ++ scalaJSLinkerClasspath() } + + def fastOpt = T { + link( + ScalaJSWorkerApi.scalaJSWorker(), + toolsClasspath(), + runClasspath(), + finalMainClassOpt().toOption, + FastOpt, + moduleKind() + ) + } + + def fullOpt = T { + link( + ScalaJSWorkerApi.scalaJSWorker(), + toolsClasspath(), + runClasspath(), + finalMainClassOpt().toOption, + FullOpt, + moduleKind() + ) + } + + override def runLocal(args: String*) = T.command { run(args:_*) } + + override def run(args: String*) = T.command { + finalMainClassOpt() match{ + case Left(err) => Result.Failure(err) + case Right(_) => + ScalaJSWorkerApi.scalaJSWorker().run( + toolsClasspath().map(_.path), + nodeJSConfig(), + fastOpt().path.toIO + ) + Result.Success(()) + } + + } + + override def runMainLocal(mainClass: String, args: String*) = T.command[Unit] { + mill.api.Result.Failure("runMain is not supported in Scala.js") + } + + override def runMain(mainClass: String, args: String*) = T.command[Unit] { + mill.api.Result.Failure("runMain is not supported in Scala.js") + } + + def link(worker: ScalaJSWorker, + toolsClasspath: Agg[PathRef], + runClasspath: Agg[PathRef], + mainClass: Option[String], + mode: OptimizeMode, + moduleKind: ModuleKind)(implicit ctx: Ctx): Result[PathRef] = { + val outputPath = ctx.dest / "out.js" + + os.makeDir.all(ctx.dest) + os.remove.all(outputPath) + + val classpath = runClasspath.map(_.path) + val sjsirFiles = classpath + .filter(path => os.exists(path) && os.isDir(path)) + .flatMap(os.walk(_)) + .filter(_.ext == "sjsir") + val libraries = classpath.filter(_.ext == "jar") + worker.link( + toolsClasspath.map(_.path), + sjsirFiles, + libraries, + outputPath.toIO, + mainClass, + mode == FullOpt, + moduleKind + ).map(PathRef(_)) + } + + override def scalacPluginIvyDeps = T{ + super.scalacPluginIvyDeps() ++ + Seq(ivy"org.scala-js:::scalajs-compiler:${scalaJSVersion()}") + } + override def scalaLibraryIvyDeps = T{ + Seq(ivy"org.scala-js::scalajs-library:${scalaJSVersion()}") + } + + // publish artifact with name "mill_sjs0.6.4_2.12" instead of "mill_sjs0.6_2.12" + def crossFullScalaJSVersion: T[Boolean] = false + def artifactScalaJSVersion: T[String] = T { + if (crossFullScalaJSVersion()) scalaJSVersion() + else scalaJSBinaryVersion() + } + + override def artifactSuffix: T[String] = s"${platformSuffix()}_${artifactScalaVersion()}" + + override def platformSuffix = s"_sjs${artifactScalaJSVersion()}" + + def nodeJSConfig = T { NodeJSConfig() } + + def moduleKind: T[ModuleKind] = T { ModuleKind.NoModule } +} + +trait TestScalaJSModule extends ScalaJSModule with TestModule { + def scalaJSTestDeps = T { + resolveDeps(T.task { + Loose.Agg( + ivy"org.scala-js::scalajs-library:${scalaJSVersion()}", + ivy"org.scala-js::scalajs-test-interface:${scalaJSVersion()}" + ) + }) + } + + def fastOptTest = T { + link( + ScalaJSWorkerApi.scalaJSWorker(), + toolsClasspath(), + scalaJSTestDeps() ++ runClasspath(), + None, + FastOpt, + moduleKind() + ) + } + + override def testLocal(args: String*) = T.command { test(args:_*) } + + override def test(args: String*) = T.command { + val (close, framework) = mill.scalajslib.ScalaJSWorkerApi.scalaJSWorker().getFramework( + toolsClasspath().map(_.path), + nodeJSConfig(), + testFrameworks().head, + fastOptTest().path.toIO + ) + + val (doneMsg, results) = TestRunner.runTests( + _ => Seq(framework), + runClasspath().map(_.path), + Agg(compile().classes.path), + args + ) + val res = TestModule.handleResults(doneMsg, results) + // Hack to try and let the Node.js subprocess finish streaming it's stdout + // to the JVM. Without this, the stdout can still be streaming when `close()` + // is called, and some of the output is dropped onto the floor. + Thread.sleep(100) + close() + res + } + +} diff --git a/scalajslib/src/ScalaJSWorkerApi.scala b/scalajslib/src/ScalaJSWorkerApi.scala new file mode 100644 index 00000000..bcaeb2d3 --- /dev/null +++ b/scalajslib/src/ScalaJSWorkerApi.scala @@ -0,0 +1,72 @@ +package mill.scalajslib + +import java.io.File +import java.net.URLClassLoader + +import mill.define.Discover +import mill.api.Result +import mill.api.Ctx +import mill.{Agg, T} +import mill.scalajslib.api._ +class ScalaJSWorker { + private var scalaInstanceCache = Option.empty[(Long, ScalaJSWorkerApi)] + + private def bridge(toolsClasspath: Agg[os.Path]) + (implicit ctx: Ctx.Home) = { + val classloaderSig = + toolsClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum + scalaInstanceCache match { + case Some((sig, bridge)) if sig == classloaderSig => bridge + case _ => + val cl = mill.api.ClassLoader.create( + toolsClasspath.map(_.toIO.toURI.toURL).toVector, + getClass.getClassLoader + ) + val bridge = cl + .loadClass("mill.scalajslib.worker.ScalaJSWorkerImpl") + .getDeclaredConstructor() + .newInstance() + .asInstanceOf[ScalaJSWorkerApi] + scalaInstanceCache = Some((classloaderSig, bridge)) + bridge + } + } + + def link(toolsClasspath: Agg[os.Path], + sources: Agg[os.Path], + libraries: Agg[os.Path], + dest: File, + main: Option[String], + fullOpt: Boolean, + moduleKind: ModuleKind) + (implicit ctx: Ctx.Home): Result[os.Path] = { + bridge(toolsClasspath).link( + sources.items.map(_.toIO).toArray, + libraries.items.map(_.toIO).toArray, + dest, + main.orNull, + fullOpt, + moduleKind + ).map(os.Path(_)) + } + + def run(toolsClasspath: Agg[os.Path], config: NodeJSConfig, linkedFile: File) + (implicit ctx: Ctx.Home): Unit = { + bridge(toolsClasspath).run(config, linkedFile) + } + + def getFramework(toolsClasspath: Agg[os.Path], + config: NodeJSConfig, + frameworkName: String, + linkedFile: File) + (implicit ctx: Ctx.Home): (() => Unit, sbt.testing.Framework) = { + bridge(toolsClasspath).getFramework(config, frameworkName, linkedFile) + } + +} + +object ScalaJSWorkerApi extends mill.define.ExternalModule { + + def scalaJSWorker = T.worker { new ScalaJSWorker() } + lazy val millDiscover = Discover[this.type] +} diff --git a/scalajslib/src/mill/scalajslib/ScalaJSModule.scala b/scalajslib/src/mill/scalajslib/ScalaJSModule.scala deleted file mode 100644 index 8568c39b..00000000 --- a/scalajslib/src/mill/scalajslib/ScalaJSModule.scala +++ /dev/null @@ -1,200 +0,0 @@ -package mill -package scalajslib - -import coursier.Cache -import coursier.maven.MavenRepository -import mill.eval.{PathRef, Result} -import mill.api.Result.Success -import mill.scalalib.Lib.resolveDependencies -import mill.scalalib.{DepSyntax, Lib, TestModule, TestRunner} -import mill.util.{Ctx, Loose} -import mill.scalajslib.api._ -trait ScalaJSModule extends scalalib.ScalaModule { outer => - - def scalaJSVersion: T[String] - - trait Tests extends TestScalaJSModule { - override def zincWorker = outer.zincWorker - override def scalaOrganization = outer.scalaOrganization() - override def scalaVersion = outer.scalaVersion() - override def scalaJSVersion = outer.scalaJSVersion() - override def moduleDeps = Seq(outer) - } - - def scalaJSBinaryVersion = T { mill.scalalib.api.Util.scalaBinaryVersion(scalaJSVersion()) } - - def scalaJSWorkerVersion = T{ scalaJSVersion().split('.').dropRight(1).mkString(".") } - - def scalaJSWorkerClasspath = T { - val workerKey = "MILL_SCALAJS_WORKER_" + scalaJSWorkerVersion().replace('.', '_') - mill.modules.Util.millProjectModule( - workerKey, - s"mill-scalajslib-worker-${scalaJSWorkerVersion()}", - repositories, - resolveFilter = _.toString.contains("mill-scalajslib-worker") - ) - } - - def scalaJSLinkerClasspath: T[Loose.Agg[PathRef]] = T{ - val commonDeps = Seq( - ivy"org.scala-js::scalajs-tools:${scalaJSVersion()}", - ivy"org.scala-js::scalajs-sbt-test-adapter:${scalaJSVersion()}" - ) - val envDep = scalaJSBinaryVersion() match { - case v if v.startsWith("0.6") => ivy"org.scala-js::scalajs-js-envs:${scalaJSVersion()}" - case v if v.startsWith("1.0") => ivy"org.scala-js::scalajs-env-nodejs:${scalaJSVersion()}" - } - resolveDependencies( - repositories, - Lib.depToDependency(_, "2.12.4", ""), - commonDeps :+ envDep - ) - } - - def toolsClasspath = T { scalaJSWorkerClasspath() ++ scalaJSLinkerClasspath() } - - def fastOpt = T { - link( - ScalaJSWorkerApi.scalaJSWorker(), - toolsClasspath(), - runClasspath(), - finalMainClassOpt().toOption, - FastOpt, - moduleKind() - ) - } - - def fullOpt = T { - link( - ScalaJSWorkerApi.scalaJSWorker(), - toolsClasspath(), - runClasspath(), - finalMainClassOpt().toOption, - FullOpt, - moduleKind() - ) - } - - override def runLocal(args: String*) = T.command { run(args:_*) } - - override def run(args: String*) = T.command { - finalMainClassOpt() match{ - case Left(err) => Result.Failure(err) - case Right(_) => - ScalaJSWorkerApi.scalaJSWorker().run( - toolsClasspath().map(_.path), - nodeJSConfig(), - fastOpt().path.toIO - ) - Result.Success(()) - } - - } - - override def runMainLocal(mainClass: String, args: String*) = T.command[Unit] { - mill.api.Result.Failure("runMain is not supported in Scala.js") - } - - override def runMain(mainClass: String, args: String*) = T.command[Unit] { - mill.api.Result.Failure("runMain is not supported in Scala.js") - } - - def link(worker: ScalaJSWorker, - toolsClasspath: Agg[PathRef], - runClasspath: Agg[PathRef], - mainClass: Option[String], - mode: OptimizeMode, - moduleKind: ModuleKind)(implicit ctx: Ctx): Result[PathRef] = { - val outputPath = ctx.dest / "out.js" - - os.makeDir.all(ctx.dest) - os.remove.all(outputPath) - - val classpath = runClasspath.map(_.path) - val sjsirFiles = classpath - .filter(path => os.exists(path) && os.isDir(path)) - .flatMap(os.walk(_)) - .filter(_.ext == "sjsir") - val libraries = classpath.filter(_.ext == "jar") - worker.link( - toolsClasspath.map(_.path), - sjsirFiles, - libraries, - outputPath.toIO, - mainClass, - mode == FullOpt, - moduleKind - ).map(PathRef(_)) - } - - override def scalacPluginIvyDeps = T{ - super.scalacPluginIvyDeps() ++ - Seq(ivy"org.scala-js:::scalajs-compiler:${scalaJSVersion()}") - } - override def scalaLibraryIvyDeps = T{ - Seq(ivy"org.scala-js::scalajs-library:${scalaJSVersion()}") - } - - // publish artifact with name "mill_sjs0.6.4_2.12" instead of "mill_sjs0.6_2.12" - def crossFullScalaJSVersion: T[Boolean] = false - def artifactScalaJSVersion: T[String] = T { - if (crossFullScalaJSVersion()) scalaJSVersion() - else scalaJSBinaryVersion() - } - - override def artifactSuffix: T[String] = s"${platformSuffix()}_${artifactScalaVersion()}" - - override def platformSuffix = s"_sjs${artifactScalaJSVersion()}" - - def nodeJSConfig = T { NodeJSConfig() } - - def moduleKind: T[ModuleKind] = T { ModuleKind.NoModule } -} - -trait TestScalaJSModule extends ScalaJSModule with TestModule { - def scalaJSTestDeps = T { - resolveDeps(T.task { - Loose.Agg( - ivy"org.scala-js::scalajs-library:${scalaJSVersion()}", - ivy"org.scala-js::scalajs-test-interface:${scalaJSVersion()}" - ) - }) - } - - def fastOptTest = T { - link( - ScalaJSWorkerApi.scalaJSWorker(), - toolsClasspath(), - scalaJSTestDeps() ++ runClasspath(), - None, - FastOpt, - moduleKind() - ) - } - - override def testLocal(args: String*) = T.command { test(args:_*) } - - override def test(args: String*) = T.command { - val (close, framework) = mill.scalajslib.ScalaJSWorkerApi.scalaJSWorker().getFramework( - toolsClasspath().map(_.path), - nodeJSConfig(), - testFrameworks().head, - fastOptTest().path.toIO - ) - - val (doneMsg, results) = TestRunner.runTests( - _ => Seq(framework), - runClasspath().map(_.path), - Agg(compile().classes.path), - args - ) - val res = TestModule.handleResults(doneMsg, results) - // Hack to try and let the Node.js subprocess finish streaming it's stdout - // to the JVM. Without this, the stdout can still be streaming when `close()` - // is called, and some of the output is dropped onto the floor. - Thread.sleep(100) - close() - res - } - -} diff --git a/scalajslib/src/mill/scalajslib/ScalaJSWorkerApi.scala b/scalajslib/src/mill/scalajslib/ScalaJSWorkerApi.scala deleted file mode 100644 index bcaeb2d3..00000000 --- a/scalajslib/src/mill/scalajslib/ScalaJSWorkerApi.scala +++ /dev/null @@ -1,72 +0,0 @@ -package mill.scalajslib - -import java.io.File -import java.net.URLClassLoader - -import mill.define.Discover -import mill.api.Result -import mill.api.Ctx -import mill.{Agg, T} -import mill.scalajslib.api._ -class ScalaJSWorker { - private var scalaInstanceCache = Option.empty[(Long, ScalaJSWorkerApi)] - - private def bridge(toolsClasspath: Agg[os.Path]) - (implicit ctx: Ctx.Home) = { - val classloaderSig = - toolsClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum - scalaInstanceCache match { - case Some((sig, bridge)) if sig == classloaderSig => bridge - case _ => - val cl = mill.api.ClassLoader.create( - toolsClasspath.map(_.toIO.toURI.toURL).toVector, - getClass.getClassLoader - ) - val bridge = cl - .loadClass("mill.scalajslib.worker.ScalaJSWorkerImpl") - .getDeclaredConstructor() - .newInstance() - .asInstanceOf[ScalaJSWorkerApi] - scalaInstanceCache = Some((classloaderSig, bridge)) - bridge - } - } - - def link(toolsClasspath: Agg[os.Path], - sources: Agg[os.Path], - libraries: Agg[os.Path], - dest: File, - main: Option[String], - fullOpt: Boolean, - moduleKind: ModuleKind) - (implicit ctx: Ctx.Home): Result[os.Path] = { - bridge(toolsClasspath).link( - sources.items.map(_.toIO).toArray, - libraries.items.map(_.toIO).toArray, - dest, - main.orNull, - fullOpt, - moduleKind - ).map(os.Path(_)) - } - - def run(toolsClasspath: Agg[os.Path], config: NodeJSConfig, linkedFile: File) - (implicit ctx: Ctx.Home): Unit = { - bridge(toolsClasspath).run(config, linkedFile) - } - - def getFramework(toolsClasspath: Agg[os.Path], - config: NodeJSConfig, - frameworkName: String, - linkedFile: File) - (implicit ctx: Ctx.Home): (() => Unit, sbt.testing.Framework) = { - bridge(toolsClasspath).getFramework(config, frameworkName, linkedFile) - } - -} - -object ScalaJSWorkerApi extends mill.define.ExternalModule { - - def scalaJSWorker = T.worker { new ScalaJSWorker() } - lazy val millDiscover = Discover[this.type] -} diff --git a/scalajslib/test/resources/multi-module/shared/src/Utils.scala b/scalajslib/test/resources/multi-module/shared/src/Utils.scala new file mode 100644 index 00000000..fad61a31 --- /dev/null +++ b/scalajslib/test/resources/multi-module/shared/src/Utils.scala @@ -0,0 +1,5 @@ +package shared + +object Utils { + def add(a: Int, b: Int) = a + b +} diff --git a/scalajslib/test/resources/multi-module/shared/src/shared/Utils.scala b/scalajslib/test/resources/multi-module/shared/src/shared/Utils.scala deleted file mode 100644 index fad61a31..00000000 --- a/scalajslib/test/resources/multi-module/shared/src/shared/Utils.scala +++ /dev/null @@ -1,5 +0,0 @@ -package shared - -object Utils { - def add(a: Int, b: Int) = a + b -} diff --git a/scalajslib/test/src/HelloJSWorldTests.scala b/scalajslib/test/src/HelloJSWorldTests.scala new file mode 100644 index 00000000..1b3e9bb3 --- /dev/null +++ b/scalajslib/test/src/HelloJSWorldTests.scala @@ -0,0 +1,247 @@ +package mill.scalajslib + +import java.util.jar.JarFile +import mill._ +import mill.define.Discover +import mill.eval.{Evaluator, Result} +import mill.scalalib.{CrossScalaModule, DepSyntax, Lib, PublishModule, TestRunner} +import mill.scalalib.publish.{Developer, License, PomSettings, VersionControl} +import mill.util.{TestEvaluator, TestUtil} +import utest._ + + +import scala.collection.JavaConverters._ +import mill.scalajslib.api._ + +object HelloJSWorldTests extends TestSuite { + val workspacePath = TestUtil.getOutPathStatic() / "hello-js-world" + + trait HelloJSWorldModule extends CrossScalaModule with ScalaJSModule with PublishModule { + override def millSourcePath = workspacePath + def publishVersion = "0.0.1-SNAPSHOT" + override def mainClass = Some("Main") + } + + object HelloJSWorld extends TestUtil.BaseModule { + val matrix = for { + scala <- Seq("2.11.8", "2.12.3", "2.12.4") + scalaJS <- Seq("0.6.22", "1.0.0-M2") + } yield (scala, scalaJS) + + object helloJsWorld extends Cross[BuildModule](matrix:_*) + class BuildModule(val crossScalaVersion: String, sjsVersion0: String) extends HelloJSWorldModule { + override def artifactName = "hello-js-world" + def scalaJSVersion = sjsVersion0 + def pomSettings = PomSettings( + organization = "com.lihaoyi", + description = "hello js world ready for real world publishing", + url = "https://github.com/lihaoyi/hello-world-publish", + licenses = Seq(License.Common.Apache2), + versionControl = VersionControl.github("lihaoyi", "hello-world-publish"), + developers = + Seq(Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi")) + ) + } + + object buildUTest extends Cross[BuildModuleUtest](matrix:_*) + class BuildModuleUtest(crossScalaVersion: String, sjsVersion0: String) + extends BuildModule(crossScalaVersion, sjsVersion0) { + object test extends super.Tests { + override def sources = T.sources{ millSourcePath / 'src / 'utest } + def testFrameworks = Seq("utest.runner.Framework") + override def ivyDeps = Agg( + ivy"com.lihaoyi::utest::0.6.3" + ) + } + } + + object buildScalaTest extends Cross[BuildModuleScalaTest](matrix:_*) + class BuildModuleScalaTest(crossScalaVersion: String, sjsVersion0: String) + extends BuildModule(crossScalaVersion, sjsVersion0) { + object test extends super.Tests { + override def sources = T.sources{ millSourcePath / 'src / 'scalatest } + def testFrameworks = Seq("org.scalatest.tools.Framework") + override def ivyDeps = Agg( + ivy"org.scalatest::scalatest::3.0.4" + ) + } + } + override lazy val millDiscover = Discover[this.type] + } + + val millSourcePath = os.pwd / 'scalajslib / 'test / 'resources / "hello-js-world" + + val helloWorldEvaluator = TestEvaluator.static(HelloJSWorld) + + + val mainObject = helloWorldEvaluator.outPath / 'src / "Main.scala" + + def tests: Tests = Tests { + prepareWorkspace() + 'compile - { + def testCompileFromScratch(scalaVersion: String, + scalaJSVersion: String): Unit = { + val Right((result, evalCount)) = + helloWorldEvaluator(HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).compile) + + val outPath = result.classes.path + val outputFiles = os.walk(outPath) + val expectedClassfiles = compileClassfiles(outPath) + assert( + outputFiles.toSet == expectedClassfiles, + evalCount > 0 + ) + + // don't recompile if nothing changed + val Right((_, unchangedEvalCount)) = + helloWorldEvaluator(HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).compile) + assert(unchangedEvalCount == 0) + } + + 'fromScratch_2124_0622 - testCompileFromScratch("2.12.4", "0.6.22") + 'fromScratch_2123_0622 - testCompileFromScratch("2.12.3", "0.6.22") + 'fromScratch_2118_0622 - TestUtil.disableInJava9OrAbove(testCompileFromScratch("2.11.8", "0.6.22")) + 'fromScratch_2124_100M2 - testCompileFromScratch("2.12.4", "1.0.0-M2") + } + + def testRun(scalaVersion: String, + scalaJSVersion: String, + mode: OptimizeMode): Unit = { + val task = mode match { + case FullOpt => HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).fullOpt + case FastOpt => HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).fastOpt + } + val Right((result, evalCount)) = helloWorldEvaluator(task) + val output = ScalaJsUtils.runJS(result.path) + assert(output == "Hello Scala.js") + } + + 'fullOpt - { + 'run_2124_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "0.6.22", FullOpt)) + 'run_2123_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.3", "0.6.22", FullOpt)) + 'run_2118_0622 - TestUtil.disableInJava9OrAbove(testRun("2.11.8", "0.6.22", FullOpt)) + 'run_2124_100M2 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "1.0.0-M2", FullOpt)) + } + 'fastOpt - { + 'run_2124_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "0.6.22", FastOpt)) + 'run_2123_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.3", "0.6.22", FastOpt)) + 'run_2118_0622 - TestUtil.disableInJava9OrAbove(testRun("2.11.8", "0.6.22", FastOpt)) + 'run_2124_100M2 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "1.0.0-M2", FastOpt)) + } + 'jar - { + 'containsSJSIRs - { + val Right((result, evalCount)) = helloWorldEvaluator(HelloJSWorld.helloJsWorld("2.12.4", "0.6.22").jar) + val jar = result.path + val entries = new JarFile(jar.toIO).entries().asScala.map(_.getName) + assert(entries.contains("Main$.sjsir")) + } + } + 'publish - { + def testArtifactId(scalaVersion: String, + scalaJSVersion: String, + artifactId: String): Unit = { + val Right((result, evalCount)) = helloWorldEvaluator(HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).artifactMetadata) + assert(result.id == artifactId) + } + 'artifactId_0622 - testArtifactId("2.12.4", "0.6.22", "hello-js-world_sjs0.6_2.12") + 'artifactId_100M2 - testArtifactId("2.12.4", "1.0.0-M2", "hello-js-world_sjs1.0.0-M2_2.12") + } + 'test - { + def runTests(testTask: define.Command[(String, Seq[TestRunner.Result])]): Map[String, Map[String, TestRunner.Result]] = { + val Left(Result.Failure(_, Some(res))) = helloWorldEvaluator(testTask) + + val (doneMsg, testResults) = res + testResults + .groupBy(_.fullyQualifiedName) + .mapValues(_.map(e => e.selector -> e).toMap) + } + + def checkUtest(scalaVersion: String, scalaJSVersion: String) = { + val resultMap = runTests(HelloJSWorld.buildUTest(scalaVersion, scalaJSVersion).test.test()) + + val mainTests = resultMap("MainTests") + val argParserTests = resultMap("ArgsParserTests") + + assert( + mainTests.size == 2, + mainTests("MainTests.vmName.containJs").status == "Success", + mainTests("MainTests.vmName.containScala").status == "Success", + + argParserTests.size == 2, + argParserTests("ArgsParserTests.one").status == "Success", + argParserTests("ArgsParserTests.two").status == "Failure" + ) + } + + def checkScalaTest(scalaVersion: String, scalaJSVersion: String) = { + val resultMap = runTests(HelloJSWorld.buildScalaTest(scalaVersion, scalaJSVersion).test.test()) + + val mainSpec = resultMap("MainSpec") + val argParserSpec = resultMap("ArgsParserSpec") + + assert( + mainSpec.size == 2, + mainSpec("vmName should contain js").status == "Success", + mainSpec("vmName should contain Scala").status == "Success", + + argParserSpec.size == 2, + argParserSpec("parse should one").status == "Success", + argParserSpec("parse should two").status == "Failure" + ) + } + + 'utest_2118_0622 - TestUtil.disableInJava9OrAbove(checkUtest("2.11.8", "0.6.22")) + 'utest_2124_0622 - checkUtest("2.12.4", "0.6.22") + 'utest_2118_100M2 - TestUtil.disableInJava9OrAbove(checkUtest("2.11.8", "1.0.0-M2")) + 'utest_2124_100M2 - checkUtest("2.12.4", "1.0.0-M2") + + 'scalaTest_2118_0622 - TestUtil.disableInJava9OrAbove(checkScalaTest("2.11.8", "0.6.22")) + 'scalaTest_2124_0622 - checkScalaTest("2.12.4", "0.6.22") +// No scalatest artifact for scala.js 1.0.0-M2 published yet +// 'scalaTest_2118_100M2 - checkScalaTest("2.11.8", "1.0.0-M2") +// 'scalaTest_2124_100M2 - checkScalaTest("2.12.4", "1.0.0-M2") + } + + def checkRun(scalaVersion: String, scalaJSVersion: String): Unit = { + val task = HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).run() + + val Right((_, evalCount)) = helloWorldEvaluator(task) + + val paths = Evaluator.resolveDestPaths( + helloWorldEvaluator.outPath, + task.ctx.segments + ) + val log = os.read(paths.log) + assert( + evalCount > 0, + log.contains("node"), + log.contains("Scala.js") + ) + } + + 'run - { + 'run_2118_0622 - TestUtil.disableInJava9OrAbove(checkRun("2.11.8", "0.6.22")) + 'run_2124_0622 - checkRun("2.12.4", "0.6.22") + 'run_2118_100M2 - TestUtil.disableInJava9OrAbove(checkRun("2.11.8", "1.0.0-M2")) + 'run_2124_100M2 - checkRun("2.12.4", "1.0.0-M2") + } + } + + def compileClassfiles(parentDir: os.Path) = Set( + parentDir / "ArgsParser$.class", + parentDir / "ArgsParser$.sjsir", + parentDir / "ArgsParser.class", + parentDir / "Main.class", + parentDir / "Main$.class", + parentDir / "Main$delayedInit$body.class", + parentDir / "Main$.sjsir", + parentDir / "Main$delayedInit$body.sjsir" + ) + + def prepareWorkspace(): Unit = { + os.remove.all(workspacePath) + os.makeDir.all(workspacePath / os.up) + os.copy(millSourcePath, workspacePath) + } + +} diff --git a/scalajslib/test/src/MultiModuleTests.scala b/scalajslib/test/src/MultiModuleTests.scala new file mode 100644 index 00000000..2ffea2cf --- /dev/null +++ b/scalajslib/test/src/MultiModuleTests.scala @@ -0,0 +1,93 @@ +package mill.scalajslib + +import mill._ +import mill.define.Discover +import mill.eval.Evaluator +import mill.util._ +import mill.scalalib._ +import utest._ +import mill.scalajslib.api._ +object MultiModuleTests extends TestSuite { + val workspacePath = TestUtil.getOutPathStatic() / "multi-module" + val sourcePath = os.pwd / 'scalajslib / 'test / 'resources / "multi-module" + + object MultiModule extends TestUtil.BaseModule { + trait BaseModule extends ScalaJSModule { + def scalaVersion = "2.12.4" + def scalaJSVersion = "0.6.22" + } + + object client extends BaseModule { + override def millSourcePath = workspacePath / 'client + override def moduleDeps = Seq(shared) + override def mainClass = Some("Main") + object test extends Tests { + def testFrameworks = Seq("utest.runner.Framework") + override def ivyDeps = Agg(ivy"com.lihaoyi::utest::0.6.3") + } + } + + object shared extends BaseModule { + override def millSourcePath = workspacePath / 'shared + } + + override lazy val millDiscover = Discover[this.type] + } + + val evaluator = TestEvaluator.static(MultiModule) + + def tests: Tests = Tests { + prepareWorkspace() + + def checkOpt(mode: OptimizeMode) = { + val task = mode match { + case FullOpt => MultiModule.client.fullOpt + case FastOpt => MultiModule.client.fastOpt + } + val Right((linked, evalCount)) = evaluator(task) + + val runOutput = ScalaJsUtils.runJS(linked.path) + assert( + evalCount > 0, + runOutput == "Hello from Scala.js, result is: 3" + ) + } + + 'fastOpt - TestUtil.disableInJava9OrAbove(checkOpt(FastOpt)) + 'fullOpt - TestUtil.disableInJava9OrAbove(checkOpt(FullOpt)) + + 'test - { + val Right(((_, testResults), evalCount)) = evaluator(MultiModule.client.test.test()) + + assert( + evalCount > 0, + testResults.size == 3, + testResults.forall(_.status == "Success") + ) + } + + 'run - { + val command = MultiModule.client.run() + + val Right((_, evalCount)) = evaluator(command) + + val paths = Evaluator.resolveDestPaths( + evaluator.outPath, + command.ctx.segments + ) + val log = os.read(paths.log) + assert( + evalCount > 0, + log.contains("node"), + log.contains("Hello from Scala.js, result is: 3") + ) + } + } + + def prepareWorkspace(): Unit = { + os.remove.all(workspacePath) + os.makeDir.all(workspacePath / os.up) + os.copy(sourcePath, workspacePath) + } + +} diff --git a/scalajslib/test/src/NodeJSConfigTests.scala b/scalajslib/test/src/NodeJSConfigTests.scala new file mode 100644 index 00000000..d9e6b45b --- /dev/null +++ b/scalajslib/test/src/NodeJSConfigTests.scala @@ -0,0 +1,103 @@ +package mill.scalajslib + +import mill._ +import mill.define.Discover +import mill.eval.Evaluator +import mill.scalalib.{CrossScalaModule, DepSyntax} +import mill.util.{TestEvaluator, TestUtil} +import utest._ +import mill.scalajslib.api._ + +object NodeJSConfigTests extends TestSuite { + val workspacePath = TestUtil.getOutPathStatic() / "hello-js-world" + val scalaVersion = "2.12.4" + val scalaJSVersion = "0.6.22" + val utestVersion = "0.6.3" + val nodeArgsEmpty = List() + val nodeArgs2G = List("--max-old-space-size=2048") + val nodeArgs4G = List("--max-old-space-size=4096") + + trait HelloJSWorldModule extends CrossScalaModule with ScalaJSModule { + override def millSourcePath = workspacePath + def publishVersion = "0.0.1-SNAPSHOT" + override def mainClass = Some("Main") + } + + object HelloJSWorld extends TestUtil.BaseModule { + val matrix = for { + scala <- Seq(scalaVersion) + nodeArgs <- Seq(nodeArgsEmpty, nodeArgs2G) + } yield (scala, nodeArgs) + + object helloJsWorld extends Cross[BuildModule](matrix:_*) + class BuildModule(val crossScalaVersion: String, nodeArgs: List[String]) extends HelloJSWorldModule { + override def artifactName = "hello-js-world" + def scalaJSVersion = NodeJSConfigTests.scalaJSVersion + override def nodeJSConfig = T { NodeJSConfig(args = nodeArgs) } + } + + object buildUTest extends Cross[BuildModuleUtest](matrix:_*) + class BuildModuleUtest(crossScalaVersion: String, nodeArgs: List[String]) + extends BuildModule(crossScalaVersion, nodeArgs) { + object test extends super.Tests { + override def sources = T.sources{ millSourcePath / 'src / 'utest } + def testFrameworks = Seq("utest.runner.Framework") + override def ivyDeps = Agg( + ivy"com.lihaoyi::utest::$utestVersion" + ) + override def nodeJSConfig = T { NodeJSConfig(args = nodeArgs) } + } + } + + override lazy val millDiscover = Discover[this.type] + } + + val millSourcePath = os.pwd / 'scalajslib / 'test / 'resources / "hello-js-world" + + val helloWorldEvaluator = TestEvaluator.static(HelloJSWorld) + + val mainObject = helloWorldEvaluator.outPath / 'src / "Main.scala" + + def tests: Tests = Tests { + prepareWorkspace() + + def checkLog(command: define.Command[_], nodeArgs: List[String], notNodeArgs: List[String]) = { + helloWorldEvaluator(command) + val paths = Evaluator.resolveDestPaths( + helloWorldEvaluator.outPath, + command.ctx.segments + ) + val log = os.read(paths.log) + assert( + nodeArgs.forall(log.contains), + notNodeArgs.forall(!log.contains(_)) + ) + } + + 'test - { + + def checkUtest(nodeArgs: List[String], notNodeArgs: List[String]) = { + checkLog(HelloJSWorld.buildUTest(scalaVersion, nodeArgs).test.test(), nodeArgs, notNodeArgs) + } + + 'test - checkUtest(nodeArgsEmpty, nodeArgs2G) + 'test2G - checkUtest(nodeArgs2G, nodeArgs4G) + } + + def checkRun(nodeArgs: List[String], notNodeArgs: List[String]): Unit = { + checkLog(HelloJSWorld.helloJsWorld(scalaVersion, nodeArgs).run(), nodeArgs, notNodeArgs) + } + + 'run - { + 'run - checkRun(nodeArgsEmpty, nodeArgs2G) + 'run2G - checkRun(nodeArgs2G, nodeArgs4G) + } + } + + def prepareWorkspace(): Unit = { + os.remove.all(workspacePath) + os.makeDir.all(workspacePath / os.up) + os.copy(millSourcePath, workspacePath) + } + +} diff --git a/scalajslib/test/src/ScalaJsUtils.scala b/scalajslib/test/src/ScalaJsUtils.scala new file mode 100644 index 00000000..68f2f7ee --- /dev/null +++ b/scalajslib/test/src/ScalaJsUtils.scala @@ -0,0 +1,21 @@ +package mill.scalajslib + +import java.io.{FileReader, StringWriter} +import javax.script.{ScriptContext, ScriptEngineManager} + +object ScalaJsUtils { + def runJS(path: os.Path): String = { + val engineManager = new ScriptEngineManager(null) + val engine = engineManager.getEngineByName("nashorn") + val console = new Console + val bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE) + bindings.put("console", console) + engine.eval(new FileReader(path.toIO)) + console.out.toString + } +} + +class Console { + val out = new StringWriter() + def log(s: String): Unit = out.append(s) +} diff --git a/scalajslib/test/src/mill/scalajslib/HelloJSWorldTests.scala b/scalajslib/test/src/mill/scalajslib/HelloJSWorldTests.scala deleted file mode 100644 index 1b3e9bb3..00000000 --- a/scalajslib/test/src/mill/scalajslib/HelloJSWorldTests.scala +++ /dev/null @@ -1,247 +0,0 @@ -package mill.scalajslib - -import java.util.jar.JarFile -import mill._ -import mill.define.Discover -import mill.eval.{Evaluator, Result} -import mill.scalalib.{CrossScalaModule, DepSyntax, Lib, PublishModule, TestRunner} -import mill.scalalib.publish.{Developer, License, PomSettings, VersionControl} -import mill.util.{TestEvaluator, TestUtil} -import utest._ - - -import scala.collection.JavaConverters._ -import mill.scalajslib.api._ - -object HelloJSWorldTests extends TestSuite { - val workspacePath = TestUtil.getOutPathStatic() / "hello-js-world" - - trait HelloJSWorldModule extends CrossScalaModule with ScalaJSModule with PublishModule { - override def millSourcePath = workspacePath - def publishVersion = "0.0.1-SNAPSHOT" - override def mainClass = Some("Main") - } - - object HelloJSWorld extends TestUtil.BaseModule { - val matrix = for { - scala <- Seq("2.11.8", "2.12.3", "2.12.4") - scalaJS <- Seq("0.6.22", "1.0.0-M2") - } yield (scala, scalaJS) - - object helloJsWorld extends Cross[BuildModule](matrix:_*) - class BuildModule(val crossScalaVersion: String, sjsVersion0: String) extends HelloJSWorldModule { - override def artifactName = "hello-js-world" - def scalaJSVersion = sjsVersion0 - def pomSettings = PomSettings( - organization = "com.lihaoyi", - description = "hello js world ready for real world publishing", - url = "https://github.com/lihaoyi/hello-world-publish", - licenses = Seq(License.Common.Apache2), - versionControl = VersionControl.github("lihaoyi", "hello-world-publish"), - developers = - Seq(Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi")) - ) - } - - object buildUTest extends Cross[BuildModuleUtest](matrix:_*) - class BuildModuleUtest(crossScalaVersion: String, sjsVersion0: String) - extends BuildModule(crossScalaVersion, sjsVersion0) { - object test extends super.Tests { - override def sources = T.sources{ millSourcePath / 'src / 'utest } - def testFrameworks = Seq("utest.runner.Framework") - override def ivyDeps = Agg( - ivy"com.lihaoyi::utest::0.6.3" - ) - } - } - - object buildScalaTest extends Cross[BuildModuleScalaTest](matrix:_*) - class BuildModuleScalaTest(crossScalaVersion: String, sjsVersion0: String) - extends BuildModule(crossScalaVersion, sjsVersion0) { - object test extends super.Tests { - override def sources = T.sources{ millSourcePath / 'src / 'scalatest } - def testFrameworks = Seq("org.scalatest.tools.Framework") - override def ivyDeps = Agg( - ivy"org.scalatest::scalatest::3.0.4" - ) - } - } - override lazy val millDiscover = Discover[this.type] - } - - val millSourcePath = os.pwd / 'scalajslib / 'test / 'resources / "hello-js-world" - - val helloWorldEvaluator = TestEvaluator.static(HelloJSWorld) - - - val mainObject = helloWorldEvaluator.outPath / 'src / "Main.scala" - - def tests: Tests = Tests { - prepareWorkspace() - 'compile - { - def testCompileFromScratch(scalaVersion: String, - scalaJSVersion: String): Unit = { - val Right((result, evalCount)) = - helloWorldEvaluator(HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).compile) - - val outPath = result.classes.path - val outputFiles = os.walk(outPath) - val expectedClassfiles = compileClassfiles(outPath) - assert( - outputFiles.toSet == expectedClassfiles, - evalCount > 0 - ) - - // don't recompile if nothing changed - val Right((_, unchangedEvalCount)) = - helloWorldEvaluator(HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).compile) - assert(unchangedEvalCount == 0) - } - - 'fromScratch_2124_0622 - testCompileFromScratch("2.12.4", "0.6.22") - 'fromScratch_2123_0622 - testCompileFromScratch("2.12.3", "0.6.22") - 'fromScratch_2118_0622 - TestUtil.disableInJava9OrAbove(testCompileFromScratch("2.11.8", "0.6.22")) - 'fromScratch_2124_100M2 - testCompileFromScratch("2.12.4", "1.0.0-M2") - } - - def testRun(scalaVersion: String, - scalaJSVersion: String, - mode: OptimizeMode): Unit = { - val task = mode match { - case FullOpt => HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).fullOpt - case FastOpt => HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).fastOpt - } - val Right((result, evalCount)) = helloWorldEvaluator(task) - val output = ScalaJsUtils.runJS(result.path) - assert(output == "Hello Scala.js") - } - - 'fullOpt - { - 'run_2124_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "0.6.22", FullOpt)) - 'run_2123_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.3", "0.6.22", FullOpt)) - 'run_2118_0622 - TestUtil.disableInJava9OrAbove(testRun("2.11.8", "0.6.22", FullOpt)) - 'run_2124_100M2 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "1.0.0-M2", FullOpt)) - } - 'fastOpt - { - 'run_2124_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "0.6.22", FastOpt)) - 'run_2123_0622 - TestUtil.disableInJava9OrAbove(testRun("2.12.3", "0.6.22", FastOpt)) - 'run_2118_0622 - TestUtil.disableInJava9OrAbove(testRun("2.11.8", "0.6.22", FastOpt)) - 'run_2124_100M2 - TestUtil.disableInJava9OrAbove(testRun("2.12.4", "1.0.0-M2", FastOpt)) - } - 'jar - { - 'containsSJSIRs - { - val Right((result, evalCount)) = helloWorldEvaluator(HelloJSWorld.helloJsWorld("2.12.4", "0.6.22").jar) - val jar = result.path - val entries = new JarFile(jar.toIO).entries().asScala.map(_.getName) - assert(entries.contains("Main$.sjsir")) - } - } - 'publish - { - def testArtifactId(scalaVersion: String, - scalaJSVersion: String, - artifactId: String): Unit = { - val Right((result, evalCount)) = helloWorldEvaluator(HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).artifactMetadata) - assert(result.id == artifactId) - } - 'artifactId_0622 - testArtifactId("2.12.4", "0.6.22", "hello-js-world_sjs0.6_2.12") - 'artifactId_100M2 - testArtifactId("2.12.4", "1.0.0-M2", "hello-js-world_sjs1.0.0-M2_2.12") - } - 'test - { - def runTests(testTask: define.Command[(String, Seq[TestRunner.Result])]): Map[String, Map[String, TestRunner.Result]] = { - val Left(Result.Failure(_, Some(res))) = helloWorldEvaluator(testTask) - - val (doneMsg, testResults) = res - testResults - .groupBy(_.fullyQualifiedName) - .mapValues(_.map(e => e.selector -> e).toMap) - } - - def checkUtest(scalaVersion: String, scalaJSVersion: String) = { - val resultMap = runTests(HelloJSWorld.buildUTest(scalaVersion, scalaJSVersion).test.test()) - - val mainTests = resultMap("MainTests") - val argParserTests = resultMap("ArgsParserTests") - - assert( - mainTests.size == 2, - mainTests("MainTests.vmName.containJs").status == "Success", - mainTests("MainTests.vmName.containScala").status == "Success", - - argParserTests.size == 2, - argParserTests("ArgsParserTests.one").status == "Success", - argParserTests("ArgsParserTests.two").status == "Failure" - ) - } - - def checkScalaTest(scalaVersion: String, scalaJSVersion: String) = { - val resultMap = runTests(HelloJSWorld.buildScalaTest(scalaVersion, scalaJSVersion).test.test()) - - val mainSpec = resultMap("MainSpec") - val argParserSpec = resultMap("ArgsParserSpec") - - assert( - mainSpec.size == 2, - mainSpec("vmName should contain js").status == "Success", - mainSpec("vmName should contain Scala").status == "Success", - - argParserSpec.size == 2, - argParserSpec("parse should one").status == "Success", - argParserSpec("parse should two").status == "Failure" - ) - } - - 'utest_2118_0622 - TestUtil.disableInJava9OrAbove(checkUtest("2.11.8", "0.6.22")) - 'utest_2124_0622 - checkUtest("2.12.4", "0.6.22") - 'utest_2118_100M2 - TestUtil.disableInJava9OrAbove(checkUtest("2.11.8", "1.0.0-M2")) - 'utest_2124_100M2 - checkUtest("2.12.4", "1.0.0-M2") - - 'scalaTest_2118_0622 - TestUtil.disableInJava9OrAbove(checkScalaTest("2.11.8", "0.6.22")) - 'scalaTest_2124_0622 - checkScalaTest("2.12.4", "0.6.22") -// No scalatest artifact for scala.js 1.0.0-M2 published yet -// 'scalaTest_2118_100M2 - checkScalaTest("2.11.8", "1.0.0-M2") -// 'scalaTest_2124_100M2 - checkScalaTest("2.12.4", "1.0.0-M2") - } - - def checkRun(scalaVersion: String, scalaJSVersion: String): Unit = { - val task = HelloJSWorld.helloJsWorld(scalaVersion, scalaJSVersion).run() - - val Right((_, evalCount)) = helloWorldEvaluator(task) - - val paths = Evaluator.resolveDestPaths( - helloWorldEvaluator.outPath, - task.ctx.segments - ) - val log = os.read(paths.log) - assert( - evalCount > 0, - log.contains("node"), - log.contains("Scala.js") - ) - } - - 'run - { - 'run_2118_0622 - TestUtil.disableInJava9OrAbove(checkRun("2.11.8", "0.6.22")) - 'run_2124_0622 - checkRun("2.12.4", "0.6.22") - 'run_2118_100M2 - TestUtil.disableInJava9OrAbove(checkRun("2.11.8", "1.0.0-M2")) - 'run_2124_100M2 - checkRun("2.12.4", "1.0.0-M2") - } - } - - def compileClassfiles(parentDir: os.Path) = Set( - parentDir / "ArgsParser$.class", - parentDir / "ArgsParser$.sjsir", - parentDir / "ArgsParser.class", - parentDir / "Main.class", - parentDir / "Main$.class", - parentDir / "Main$delayedInit$body.class", - parentDir / "Main$.sjsir", - parentDir / "Main$delayedInit$body.sjsir" - ) - - def prepareWorkspace(): Unit = { - os.remove.all(workspacePath) - os.makeDir.all(workspacePath / os.up) - os.copy(millSourcePath, workspacePath) - } - -} diff --git a/scalajslib/test/src/mill/scalajslib/MultiModuleTests.scala b/scalajslib/test/src/mill/scalajslib/MultiModuleTests.scala deleted file mode 100644 index 2ffea2cf..00000000 --- a/scalajslib/test/src/mill/scalajslib/MultiModuleTests.scala +++ /dev/null @@ -1,93 +0,0 @@ -package mill.scalajslib - -import mill._ -import mill.define.Discover -import mill.eval.Evaluator -import mill.util._ -import mill.scalalib._ -import utest._ -import mill.scalajslib.api._ -object MultiModuleTests extends TestSuite { - val workspacePath = TestUtil.getOutPathStatic() / "multi-module" - val sourcePath = os.pwd / 'scalajslib / 'test / 'resources / "multi-module" - - object MultiModule extends TestUtil.BaseModule { - trait BaseModule extends ScalaJSModule { - def scalaVersion = "2.12.4" - def scalaJSVersion = "0.6.22" - } - - object client extends BaseModule { - override def millSourcePath = workspacePath / 'client - override def moduleDeps = Seq(shared) - override def mainClass = Some("Main") - object test extends Tests { - def testFrameworks = Seq("utest.runner.Framework") - override def ivyDeps = Agg(ivy"com.lihaoyi::utest::0.6.3") - } - } - - object shared extends BaseModule { - override def millSourcePath = workspacePath / 'shared - } - - override lazy val millDiscover = Discover[this.type] - } - - val evaluator = TestEvaluator.static(MultiModule) - - def tests: Tests = Tests { - prepareWorkspace() - - def checkOpt(mode: OptimizeMode) = { - val task = mode match { - case FullOpt => MultiModule.client.fullOpt - case FastOpt => MultiModule.client.fastOpt - } - val Right((linked, evalCount)) = evaluator(task) - - val runOutput = ScalaJsUtils.runJS(linked.path) - assert( - evalCount > 0, - runOutput == "Hello from Scala.js, result is: 3" - ) - } - - 'fastOpt - TestUtil.disableInJava9OrAbove(checkOpt(FastOpt)) - 'fullOpt - TestUtil.disableInJava9OrAbove(checkOpt(FullOpt)) - - 'test - { - val Right(((_, testResults), evalCount)) = evaluator(MultiModule.client.test.test()) - - assert( - evalCount > 0, - testResults.size == 3, - testResults.forall(_.status == "Success") - ) - } - - 'run - { - val command = MultiModule.client.run() - - val Right((_, evalCount)) = evaluator(command) - - val paths = Evaluator.resolveDestPaths( - evaluator.outPath, - command.ctx.segments - ) - val log = os.read(paths.log) - assert( - evalCount > 0, - log.contains("node"), - log.contains("Hello from Scala.js, result is: 3") - ) - } - } - - def prepareWorkspace(): Unit = { - os.remove.all(workspacePath) - os.makeDir.all(workspacePath / os.up) - os.copy(sourcePath, workspacePath) - } - -} diff --git a/scalajslib/test/src/mill/scalajslib/NodeJSConfigTests.scala b/scalajslib/test/src/mill/scalajslib/NodeJSConfigTests.scala deleted file mode 100644 index d9e6b45b..00000000 --- a/scalajslib/test/src/mill/scalajslib/NodeJSConfigTests.scala +++ /dev/null @@ -1,103 +0,0 @@ -package mill.scalajslib - -import mill._ -import mill.define.Discover -import mill.eval.Evaluator -import mill.scalalib.{CrossScalaModule, DepSyntax} -import mill.util.{TestEvaluator, TestUtil} -import utest._ -import mill.scalajslib.api._ - -object NodeJSConfigTests extends TestSuite { - val workspacePath = TestUtil.getOutPathStatic() / "hello-js-world" - val scalaVersion = "2.12.4" - val scalaJSVersion = "0.6.22" - val utestVersion = "0.6.3" - val nodeArgsEmpty = List() - val nodeArgs2G = List("--max-old-space-size=2048") - val nodeArgs4G = List("--max-old-space-size=4096") - - trait HelloJSWorldModule extends CrossScalaModule with ScalaJSModule { - override def millSourcePath = workspacePath - def publishVersion = "0.0.1-SNAPSHOT" - override def mainClass = Some("Main") - } - - object HelloJSWorld extends TestUtil.BaseModule { - val matrix = for { - scala <- Seq(scalaVersion) - nodeArgs <- Seq(nodeArgsEmpty, nodeArgs2G) - } yield (scala, nodeArgs) - - object helloJsWorld extends Cross[BuildModule](matrix:_*) - class BuildModule(val crossScalaVersion: String, nodeArgs: List[String]) extends HelloJSWorldModule { - override def artifactName = "hello-js-world" - def scalaJSVersion = NodeJSConfigTests.scalaJSVersion - override def nodeJSConfig = T { NodeJSConfig(args = nodeArgs) } - } - - object buildUTest extends Cross[BuildModuleUtest](matrix:_*) - class BuildModuleUtest(crossScalaVersion: String, nodeArgs: List[String]) - extends BuildModule(crossScalaVersion, nodeArgs) { - object test extends super.Tests { - override def sources = T.sources{ millSourcePath / 'src / 'utest } - def testFrameworks = Seq("utest.runner.Framework") - override def ivyDeps = Agg( - ivy"com.lihaoyi::utest::$utestVersion" - ) - override def nodeJSConfig = T { NodeJSConfig(args = nodeArgs) } - } - } - - override lazy val millDiscover = Discover[this.type] - } - - val millSourcePath = os.pwd / 'scalajslib / 'test / 'resources / "hello-js-world" - - val helloWorldEvaluator = TestEvaluator.static(HelloJSWorld) - - val mainObject = helloWorldEvaluator.outPath / 'src / "Main.scala" - - def tests: Tests = Tests { - prepareWorkspace() - - def checkLog(command: define.Command[_], nodeArgs: List[String], notNodeArgs: List[String]) = { - helloWorldEvaluator(command) - val paths = Evaluator.resolveDestPaths( - helloWorldEvaluator.outPath, - command.ctx.segments - ) - val log = os.read(paths.log) - assert( - nodeArgs.forall(log.contains), - notNodeArgs.forall(!log.contains(_)) - ) - } - - 'test - { - - def checkUtest(nodeArgs: List[String], notNodeArgs: List[String]) = { - checkLog(HelloJSWorld.buildUTest(scalaVersion, nodeArgs).test.test(), nodeArgs, notNodeArgs) - } - - 'test - checkUtest(nodeArgsEmpty, nodeArgs2G) - 'test2G - checkUtest(nodeArgs2G, nodeArgs4G) - } - - def checkRun(nodeArgs: List[String], notNodeArgs: List[String]): Unit = { - checkLog(HelloJSWorld.helloJsWorld(scalaVersion, nodeArgs).run(), nodeArgs, notNodeArgs) - } - - 'run - { - 'run - checkRun(nodeArgsEmpty, nodeArgs2G) - 'run2G - checkRun(nodeArgs2G, nodeArgs4G) - } - } - - def prepareWorkspace(): Unit = { - os.remove.all(workspacePath) - os.makeDir.all(workspacePath / os.up) - os.copy(millSourcePath, workspacePath) - } - -} diff --git a/scalajslib/test/src/mill/scalajslib/ScalaJsUtils.scala b/scalajslib/test/src/mill/scalajslib/ScalaJsUtils.scala deleted file mode 100644 index 68f2f7ee..00000000 --- a/scalajslib/test/src/mill/scalajslib/ScalaJsUtils.scala +++ /dev/null @@ -1,21 +0,0 @@ -package mill.scalajslib - -import java.io.{FileReader, StringWriter} -import javax.script.{ScriptContext, ScriptEngineManager} - -object ScalaJsUtils { - def runJS(path: os.Path): String = { - val engineManager = new ScriptEngineManager(null) - val engine = engineManager.getEngineByName("nashorn") - val console = new Console - val bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE) - bindings.put("console", console) - engine.eval(new FileReader(path.toIO)) - console.out.toString - } -} - -class Console { - val out = new StringWriter() - def log(s: String): Unit = out.append(s) -} diff --git a/scalajslib/worker/0.6/src/ScalaJSWorkerImpl.scala b/scalajslib/worker/0.6/src/ScalaJSWorkerImpl.scala new file mode 100644 index 00000000..64b668e5 --- /dev/null +++ b/scalajslib/worker/0.6/src/ScalaJSWorkerImpl.scala @@ -0,0 +1,87 @@ +package mill +package scalajslib +package worker + +import java.io.File + +import mill.api.Result +import org.scalajs.core.tools.io.IRFileCache.IRContainer +import org.scalajs.core.tools.io._ +import org.scalajs.core.tools.jsdep.ResolvedJSDependency +import org.scalajs.core.tools.linker.{ModuleInitializer, StandardLinker, Semantics, ModuleKind => ScalaJSModuleKind} +import org.scalajs.core.tools.logging.ScalaConsoleLogger +import org.scalajs.jsenv._ +import org.scalajs.jsenv.nodejs._ +import org.scalajs.testadapter.TestAdapter +import mill.scalajslib.api.{ModuleKind, NodeJSConfig} +class ScalaJSWorkerImpl extends mill.scalajslib.api.ScalaJSWorkerApi { + def link(sources: Array[File], + libraries: Array[File], + dest: File, + main: String, + fullOpt: Boolean, + moduleKind: ModuleKind) = { + val semantics = fullOpt match { + case true => Semantics.Defaults.optimized + case false => Semantics.Defaults + } + val scalaJSModuleKind = moduleKind match { + case ModuleKind.NoModule => ScalaJSModuleKind.NoModule + case ModuleKind.CommonJSModule => ScalaJSModuleKind.CommonJSModule + } + val config = StandardLinker.Config() + .withOptimizer(fullOpt) + .withClosureCompilerIfAvailable(fullOpt) + .withSemantics(semantics) + .withModuleKind(scalaJSModuleKind) + val linker = StandardLinker(config) + val sourceSJSIRs = sources.map(new FileVirtualScalaJSIRFile(_)) + val jars = libraries.map(jar => IRContainer.Jar(new FileVirtualBinaryFile(jar) with VirtualJarFile)) + val jarSJSIRs = jars.flatMap(_.jar.sjsirFiles) + val destFile = AtomicWritableFileVirtualJSFile(dest) + val logger = new ScalaConsoleLogger + val initializer = Option(main).map { cls => ModuleInitializer.mainMethodWithArgs(cls, "main") } + try { + linker.link(sourceSJSIRs ++ jarSJSIRs, initializer.toSeq, destFile, logger) + Result.Success(dest) + }catch {case e: org.scalajs.core.tools.linker.LinkingException => + Result.Failure(e.getMessage) + } + } + + def run(config: NodeJSConfig, linkedFile: File): Unit = { + nodeJSEnv(config) + .jsRunner(FileVirtualJSFile(linkedFile)) + .run(new ScalaConsoleLogger, ConsoleJSConsole) + } + + def getFramework(config: NodeJSConfig, + frameworkName: String, + linkedFile: File): (() => Unit, sbt.testing.Framework) = { + val env = nodeJSEnv(config).loadLibs( + Seq(ResolvedJSDependency.minimal(new FileVirtualJSFile(linkedFile))) + ) + + val tconfig = TestAdapter.Config().withLogger(new ScalaConsoleLogger) + val adapter = + new TestAdapter(env, tconfig) + + ( + () => adapter.close(), + adapter + .loadFrameworks(List(List(frameworkName))) + .flatten + .headOption + .getOrElse(throw new RuntimeException("Failed to get framework")) + ) + } + + def nodeJSEnv(config: NodeJSConfig): NodeJSEnv = { + new NodeJSEnv( + NodeJSEnv.Config() + .withExecutable(config.executable) + .withArgs(config.args) + .withEnv(config.env) + .withSourceMap(config.sourceMap)) + } +} diff --git a/scalajslib/worker/0.6/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala b/scalajslib/worker/0.6/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala deleted file mode 100644 index 64b668e5..00000000 --- a/scalajslib/worker/0.6/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala +++ /dev/null @@ -1,87 +0,0 @@ -package mill -package scalajslib -package worker - -import java.io.File - -import mill.api.Result -import org.scalajs.core.tools.io.IRFileCache.IRContainer -import org.scalajs.core.tools.io._ -import org.scalajs.core.tools.jsdep.ResolvedJSDependency -import org.scalajs.core.tools.linker.{ModuleInitializer, StandardLinker, Semantics, ModuleKind => ScalaJSModuleKind} -import org.scalajs.core.tools.logging.ScalaConsoleLogger -import org.scalajs.jsenv._ -import org.scalajs.jsenv.nodejs._ -import org.scalajs.testadapter.TestAdapter -import mill.scalajslib.api.{ModuleKind, NodeJSConfig} -class ScalaJSWorkerImpl extends mill.scalajslib.api.ScalaJSWorkerApi { - def link(sources: Array[File], - libraries: Array[File], - dest: File, - main: String, - fullOpt: Boolean, - moduleKind: ModuleKind) = { - val semantics = fullOpt match { - case true => Semantics.Defaults.optimized - case false => Semantics.Defaults - } - val scalaJSModuleKind = moduleKind match { - case ModuleKind.NoModule => ScalaJSModuleKind.NoModule - case ModuleKind.CommonJSModule => ScalaJSModuleKind.CommonJSModule - } - val config = StandardLinker.Config() - .withOptimizer(fullOpt) - .withClosureCompilerIfAvailable(fullOpt) - .withSemantics(semantics) - .withModuleKind(scalaJSModuleKind) - val linker = StandardLinker(config) - val sourceSJSIRs = sources.map(new FileVirtualScalaJSIRFile(_)) - val jars = libraries.map(jar => IRContainer.Jar(new FileVirtualBinaryFile(jar) with VirtualJarFile)) - val jarSJSIRs = jars.flatMap(_.jar.sjsirFiles) - val destFile = AtomicWritableFileVirtualJSFile(dest) - val logger = new ScalaConsoleLogger - val initializer = Option(main).map { cls => ModuleInitializer.mainMethodWithArgs(cls, "main") } - try { - linker.link(sourceSJSIRs ++ jarSJSIRs, initializer.toSeq, destFile, logger) - Result.Success(dest) - }catch {case e: org.scalajs.core.tools.linker.LinkingException => - Result.Failure(e.getMessage) - } - } - - def run(config: NodeJSConfig, linkedFile: File): Unit = { - nodeJSEnv(config) - .jsRunner(FileVirtualJSFile(linkedFile)) - .run(new ScalaConsoleLogger, ConsoleJSConsole) - } - - def getFramework(config: NodeJSConfig, - frameworkName: String, - linkedFile: File): (() => Unit, sbt.testing.Framework) = { - val env = nodeJSEnv(config).loadLibs( - Seq(ResolvedJSDependency.minimal(new FileVirtualJSFile(linkedFile))) - ) - - val tconfig = TestAdapter.Config().withLogger(new ScalaConsoleLogger) - val adapter = - new TestAdapter(env, tconfig) - - ( - () => adapter.close(), - adapter - .loadFrameworks(List(List(frameworkName))) - .flatten - .headOption - .getOrElse(throw new RuntimeException("Failed to get framework")) - ) - } - - def nodeJSEnv(config: NodeJSConfig): NodeJSEnv = { - new NodeJSEnv( - NodeJSEnv.Config() - .withExecutable(config.executable) - .withArgs(config.args) - .withEnv(config.env) - .withSourceMap(config.sourceMap)) - } -} diff --git a/scalajslib/worker/1.0/src/ScalaJSWorkerImpl.scala b/scalajslib/worker/1.0/src/ScalaJSWorkerImpl.scala new file mode 100644 index 00000000..f40f7a6b --- /dev/null +++ b/scalajslib/worker/1.0/src/ScalaJSWorkerImpl.scala @@ -0,0 +1,85 @@ +package mill +package scalajslib +package worker + +import java.io.File +import mill.scalajslib.api.{ModuleKind, NodeJSConfig} +import mill.api.Result +import org.scalajs.core.tools.io._ +import org.scalajs.core.tools.linker.{ModuleInitializer, Semantics, StandardLinker, ModuleKind => ScalaJSModuleKind} +import org.scalajs.core.tools.logging.ScalaConsoleLogger +import org.scalajs.jsenv.ConsoleJSConsole +import org.scalajs.jsenv.nodejs._ +import org.scalajs.testadapter.TestAdapter + +class ScalaJSWorkerImpl extends mill.scalajslib.api.ScalaJSWorkerApi { + def link(sources: Array[File], + libraries: Array[File], + dest: File, + main: String, + fullOpt: Boolean, + moduleKind: ModuleKind) = { + val semantics = fullOpt match { + case true => Semantics.Defaults.optimized + case false => Semantics.Defaults + } + val scalaJSModuleKind = moduleKind match { + case ModuleKind.NoModule => ScalaJSModuleKind.NoModule + case ModuleKind.CommonJSModule => ScalaJSModuleKind.CommonJSModule + } + val config = StandardLinker.Config() + .withOptimizer(fullOpt) + .withClosureCompilerIfAvailable(fullOpt) + .withSemantics(semantics) + .withModuleKind(scalaJSModuleKind) + val linker = StandardLinker(config) + val cache = new IRFileCache().newCache + val sourceIRs = sources.map(FileVirtualScalaJSIRFile) + val irContainers = FileScalaJSIRContainer.fromClasspath(libraries) + val libraryIRs = cache.cached(irContainers) + val destFile = AtomicWritableFileVirtualJSFile(dest) + val logger = new ScalaConsoleLogger + val initializer = Option(main).map { cls => ModuleInitializer.mainMethodWithArgs(cls, "main") } + + try { + linker.link(sourceIRs ++ libraryIRs, initializer.toSeq, destFile, logger) + Result.Success(dest) + }catch {case e: org.scalajs.core.tools.linker.LinkingException => + Result.Failure(e.getMessage) + } + } + + def run(config: NodeJSConfig, linkedFile: File): Unit = { + nodeJSEnv(config) + .jsRunner(Seq(FileVirtualJSFile(linkedFile))) + .run(new ScalaConsoleLogger, ConsoleJSConsole) + } + + def getFramework(config: NodeJSConfig, + frameworkName: String, + linkedFile: File): (() => Unit, sbt.testing.Framework) = { + val env = nodeJSEnv(config) + val tconfig = TestAdapter.Config().withLogger(new ScalaConsoleLogger) + + val adapter = + new TestAdapter(env, Seq(FileVirtualJSFile(linkedFile)), tconfig) + + ( + () => adapter.close(), + adapter + .loadFrameworks(List(List(frameworkName))) + .flatten + .headOption + .getOrElse(throw new RuntimeException("Failed to get framework")) + ) + } + + def nodeJSEnv(config: NodeJSConfig): NodeJSEnv = { + new NodeJSEnv( + NodeJSEnv.Config() + .withExecutable(config.executable) + .withArgs(config.args) + .withEnv(config.env) + .withSourceMap(config.sourceMap)) + } +} diff --git a/scalajslib/worker/1.0/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala b/scalajslib/worker/1.0/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala deleted file mode 100644 index f40f7a6b..00000000 --- a/scalajslib/worker/1.0/src/mill/scalajslib/worker/ScalaJSWorkerImpl.scala +++ /dev/null @@ -1,85 +0,0 @@ -package mill -package scalajslib -package worker - -import java.io.File -import mill.scalajslib.api.{ModuleKind, NodeJSConfig} -import mill.api.Result -import org.scalajs.core.tools.io._ -import org.scalajs.core.tools.linker.{ModuleInitializer, Semantics, StandardLinker, ModuleKind => ScalaJSModuleKind} -import org.scalajs.core.tools.logging.ScalaConsoleLogger -import org.scalajs.jsenv.ConsoleJSConsole -import org.scalajs.jsenv.nodejs._ -import org.scalajs.testadapter.TestAdapter - -class ScalaJSWorkerImpl extends mill.scalajslib.api.ScalaJSWorkerApi { - def link(sources: Array[File], - libraries: Array[File], - dest: File, - main: String, - fullOpt: Boolean, - moduleKind: ModuleKind) = { - val semantics = fullOpt match { - case true => Semantics.Defaults.optimized - case false => Semantics.Defaults - } - val scalaJSModuleKind = moduleKind match { - case ModuleKind.NoModule => ScalaJSModuleKind.NoModule - case ModuleKind.CommonJSModule => ScalaJSModuleKind.CommonJSModule - } - val config = StandardLinker.Config() - .withOptimizer(fullOpt) - .withClosureCompilerIfAvailable(fullOpt) - .withSemantics(semantics) - .withModuleKind(scalaJSModuleKind) - val linker = StandardLinker(config) - val cache = new IRFileCache().newCache - val sourceIRs = sources.map(FileVirtualScalaJSIRFile) - val irContainers = FileScalaJSIRContainer.fromClasspath(libraries) - val libraryIRs = cache.cached(irContainers) - val destFile = AtomicWritableFileVirtualJSFile(dest) - val logger = new ScalaConsoleLogger - val initializer = Option(main).map { cls => ModuleInitializer.mainMethodWithArgs(cls, "main") } - - try { - linker.link(sourceIRs ++ libraryIRs, initializer.toSeq, destFile, logger) - Result.Success(dest) - }catch {case e: org.scalajs.core.tools.linker.LinkingException => - Result.Failure(e.getMessage) - } - } - - def run(config: NodeJSConfig, linkedFile: File): Unit = { - nodeJSEnv(config) - .jsRunner(Seq(FileVirtualJSFile(linkedFile))) - .run(new ScalaConsoleLogger, ConsoleJSConsole) - } - - def getFramework(config: NodeJSConfig, - frameworkName: String, - linkedFile: File): (() => Unit, sbt.testing.Framework) = { - val env = nodeJSEnv(config) - val tconfig = TestAdapter.Config().withLogger(new ScalaConsoleLogger) - - val adapter = - new TestAdapter(env, Seq(FileVirtualJSFile(linkedFile)), tconfig) - - ( - () => adapter.close(), - adapter - .loadFrameworks(List(List(frameworkName))) - .flatten - .headOption - .getOrElse(throw new RuntimeException("Failed to get framework")) - ) - } - - def nodeJSEnv(config: NodeJSConfig): NodeJSEnv = { - new NodeJSEnv( - NodeJSEnv.Config() - .withExecutable(config.executable) - .withArgs(config.args) - .withEnv(config.env) - .withSourceMap(config.sourceMap)) - } -} diff --git a/scalalib/api/src/ZincWorkerApi.scala b/scalalib/api/src/ZincWorkerApi.scala new file mode 100644 index 00000000..c5230ec5 --- /dev/null +++ b/scalalib/api/src/ZincWorkerApi.scala @@ -0,0 +1,76 @@ +package mill.scalalib.api + +import mill.api.Loose.Agg +import mill.api.PathRef +import mill.api.JsonFormatters._ + +trait ZincWorkerApi { + /** Compile a Java-only project */ + def compileJava(upstreamCompileOutput: Seq[CompilationResult], + sources: Agg[os.Path], + compileClasspath: Agg[os.Path], + javacOptions: Seq[String]) + (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] + + /** Compile a mixed Scala/Java or Scala-only project */ + def compileMixed(upstreamCompileOutput: Seq[CompilationResult], + sources: Agg[os.Path], + compileClasspath: Agg[os.Path], + javacOptions: Seq[String], + scalaVersion: String, + scalacOptions: Seq[String], + compilerBridgeSources: os.Path, + compilerClasspath: Agg[os.Path], + scalacPluginClasspath: Agg[os.Path]) + (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] + + def discoverMainClasses(compilationResult: CompilationResult) + (implicit ctx: mill.api.Ctx): Seq[String] + + def docJar(scalaVersion: String, + compilerBridgeSources: os.Path, + compilerClasspath: Agg[os.Path], + scalacPluginClasspath: Agg[os.Path], + args: Seq[String]) + (implicit ctx: mill.api.Ctx): Boolean +} + + +object CompilationResult { + implicit val jsonFormatter: upickle.default.ReadWriter[CompilationResult] = upickle.default.macroRW +} + +// analysisFile is represented by os.Path, so we won't break caches after file changes +case class CompilationResult(analysisFile: os.Path, classes: PathRef) + +object Util{ + def isDotty(scalaVersion: String) = + scalaVersion.startsWith("0.") + + + def grepJar(classPath: Agg[os.Path], name: String, version: String, sources: Boolean = false) = { + val suffix = if (sources) "-sources" else "" + val mavenStylePath = s"$name-$version$suffix.jar" + val ivyStylePath = { + val dir = if (sources) "srcs" else "jars" + s"$version/$dir/$name$suffix.jar" + } + + classPath + .find(p => p.toString.endsWith(mavenStylePath) || p.toString.endsWith(ivyStylePath)) + .getOrElse(throw new Exception(s"Cannot find $mavenStylePath or $ivyStylePath")) + } + + private val ReleaseVersion = raw"""(\d+)\.(\d+)\.(\d+)""".r + private val MinorSnapshotVersion = raw"""(\d+)\.(\d+)\.([1-9]\d*)-SNAPSHOT""".r + private val DottyVersion = raw"""0\.(\d+)\.(\d+).*""".r + + def scalaBinaryVersion(scalaVersion: String) = { + scalaVersion match { + case ReleaseVersion(major, minor, _) => s"$major.$minor" + case MinorSnapshotVersion(major, minor, _) => s"$major.$minor" + case DottyVersion(minor, _) => s"0.$minor" + case _ => scalaVersion + } + } +} \ No newline at end of file diff --git a/scalalib/api/src/mill/scalalib/api/ZincWorkerApi.scala b/scalalib/api/src/mill/scalalib/api/ZincWorkerApi.scala deleted file mode 100644 index c5230ec5..00000000 --- a/scalalib/api/src/mill/scalalib/api/ZincWorkerApi.scala +++ /dev/null @@ -1,76 +0,0 @@ -package mill.scalalib.api - -import mill.api.Loose.Agg -import mill.api.PathRef -import mill.api.JsonFormatters._ - -trait ZincWorkerApi { - /** Compile a Java-only project */ - def compileJava(upstreamCompileOutput: Seq[CompilationResult], - sources: Agg[os.Path], - compileClasspath: Agg[os.Path], - javacOptions: Seq[String]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] - - /** Compile a mixed Scala/Java or Scala-only project */ - def compileMixed(upstreamCompileOutput: Seq[CompilationResult], - sources: Agg[os.Path], - compileClasspath: Agg[os.Path], - javacOptions: Seq[String], - scalaVersion: String, - scalacOptions: Seq[String], - compilerBridgeSources: os.Path, - compilerClasspath: Agg[os.Path], - scalacPluginClasspath: Agg[os.Path]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] - - def discoverMainClasses(compilationResult: CompilationResult) - (implicit ctx: mill.api.Ctx): Seq[String] - - def docJar(scalaVersion: String, - compilerBridgeSources: os.Path, - compilerClasspath: Agg[os.Path], - scalacPluginClasspath: Agg[os.Path], - args: Seq[String]) - (implicit ctx: mill.api.Ctx): Boolean -} - - -object CompilationResult { - implicit val jsonFormatter: upickle.default.ReadWriter[CompilationResult] = upickle.default.macroRW -} - -// analysisFile is represented by os.Path, so we won't break caches after file changes -case class CompilationResult(analysisFile: os.Path, classes: PathRef) - -object Util{ - def isDotty(scalaVersion: String) = - scalaVersion.startsWith("0.") - - - def grepJar(classPath: Agg[os.Path], name: String, version: String, sources: Boolean = false) = { - val suffix = if (sources) "-sources" else "" - val mavenStylePath = s"$name-$version$suffix.jar" - val ivyStylePath = { - val dir = if (sources) "srcs" else "jars" - s"$version/$dir/$name$suffix.jar" - } - - classPath - .find(p => p.toString.endsWith(mavenStylePath) || p.toString.endsWith(ivyStylePath)) - .getOrElse(throw new Exception(s"Cannot find $mavenStylePath or $ivyStylePath")) - } - - private val ReleaseVersion = raw"""(\d+)\.(\d+)\.(\d+)""".r - private val MinorSnapshotVersion = raw"""(\d+)\.(\d+)\.([1-9]\d*)-SNAPSHOT""".r - private val DottyVersion = raw"""0\.(\d+)\.(\d+).*""".r - - def scalaBinaryVersion(scalaVersion: String) = { - scalaVersion match { - case ReleaseVersion(major, minor, _) => s"$major.$minor" - case MinorSnapshotVersion(major, minor, _) => s"$major.$minor" - case DottyVersion(minor, _) => s"0.$minor" - case _ => scalaVersion - } - } -} \ No newline at end of file diff --git a/scalalib/backgroundwrapper/src/BackgroundWrapper.java b/scalalib/backgroundwrapper/src/BackgroundWrapper.java new file mode 100644 index 00000000..02ee23eb --- /dev/null +++ b/scalalib/backgroundwrapper/src/BackgroundWrapper.java @@ -0,0 +1,40 @@ +package mill.scalalib.backgroundwrapper; + +public class BackgroundWrapper { + public static void main(String[] args) throws Exception{ + String watched = args[0]; + String tombstone = args[1]; + String expected = args[2]; + Thread watcher = new Thread(new Runnable() { + @Override + public void run() { + while (true) { + try{ + Thread.sleep(50); + String token = new String( + java.nio.file.Files.readAllBytes(java.nio.file.Paths.get(watched)) + ); + if (!token.equals(expected)) { + new java.io.File(tombstone).createNewFile(); + System.exit(0); + } + }catch(Exception e){ + try { + new java.io.File(tombstone).createNewFile(); + }catch(Exception e2){} + System.exit(0); + } + + } + } + }); + watcher.setDaemon(true); + watcher.start(); + String realMain = args[3]; + String[] realArgs = new String[args.length - 4]; + for(int i = 0; i < args.length-4; i++){ + realArgs[i] = args[i+4]; + } + Class.forName(realMain).getMethod("main", String[].class).invoke(null, (Object)realArgs); + } +} diff --git a/scalalib/backgroundwrapper/src/mill/scalalib/backgroundwrapper/BackgroundWrapper.java b/scalalib/backgroundwrapper/src/mill/scalalib/backgroundwrapper/BackgroundWrapper.java deleted file mode 100644 index 02ee23eb..00000000 --- a/scalalib/backgroundwrapper/src/mill/scalalib/backgroundwrapper/BackgroundWrapper.java +++ /dev/null @@ -1,40 +0,0 @@ -package mill.scalalib.backgroundwrapper; - -public class BackgroundWrapper { - public static void main(String[] args) throws Exception{ - String watched = args[0]; - String tombstone = args[1]; - String expected = args[2]; - Thread watcher = new Thread(new Runnable() { - @Override - public void run() { - while (true) { - try{ - Thread.sleep(50); - String token = new String( - java.nio.file.Files.readAllBytes(java.nio.file.Paths.get(watched)) - ); - if (!token.equals(expected)) { - new java.io.File(tombstone).createNewFile(); - System.exit(0); - } - }catch(Exception e){ - try { - new java.io.File(tombstone).createNewFile(); - }catch(Exception e2){} - System.exit(0); - } - - } - } - }); - watcher.setDaemon(true); - watcher.start(); - String realMain = args[3]; - String[] realArgs = new String[args.length - 4]; - for(int i = 0; i < args.length-4; i++){ - realArgs[i] = args[i+4]; - } - Class.forName(realMain).getMethod("main", String[].class).invoke(null, (Object)realArgs); - } -} diff --git a/scalalib/src/Dep.scala b/scalalib/src/Dep.scala new file mode 100644 index 00000000..714fa21e --- /dev/null +++ b/scalalib/src/Dep.scala @@ -0,0 +1,121 @@ +package mill.scalalib +import mill.util.JsonFormatters._ +import upickle.default.{macroRW, ReadWriter => RW} + +import CrossVersion._ + +case class Dep(dep: coursier.Dependency, cross: CrossVersion, force: Boolean) { + import mill.scalalib.api.Util.isDotty + + def artifactName(binaryVersion: String, fullVersion: String, platformSuffix: String) = { + val suffix = cross.suffixString(binaryVersion, fullVersion, platformSuffix) + dep.module.name + suffix + } + def configure(attributes: coursier.Attributes): Dep = copy(dep = dep.copy(attributes = attributes)) + def forceVersion(): Dep = copy(force = true) + def exclude(exclusions: (String, String)*) = copy(dep = dep.copy(exclusions = dep.exclusions ++ exclusions)) + def excludeOrg(organizations: String*): Dep = exclude(organizations.map(_ -> "*"): _*) + def excludeName(names: String*): Dep = exclude(names.map("*" -> _): _*) + def toDependency(binaryVersion: String, fullVersion: String, platformSuffix: String) = + dep.copy(module = dep.module.copy(name = artifactName(binaryVersion, fullVersion, platformSuffix))) + def withConfiguration(configuration: String): Dep = copy(dep = dep.copy(configuration = configuration)) + + /** + * If scalaVersion is a Dotty version, replace the cross-version suffix + * by the Scala 2.x version that the Dotty version is retro-compatible with, + * otherwise do nothing. + * + * This setting is useful when your build contains dependencies that have only + * been published with Scala 2.x, if you have: + * {{{ + * def ivyDeps = Agg(ivy"a::b:c") + * }}} + * you can replace it by: + * {{{ + * def ivyDeps = Agg(ivy"a::b:c".withDottyCompat(scalaVersion())) + * }}} + * This will have no effect when compiling with Scala 2.x, but when compiling + * with Dotty this will change the cross-version to a Scala 2.x one. This + * works because Dotty is currently retro-compatible with Scala 2.x. + */ + def withDottyCompat(scalaVersion: String): Dep = + cross match { + case cross: Binary if isDotty(scalaVersion) => + copy(cross = Constant(value = "_2.12", platformed = cross.platformed)) + case _ => + this + } +} + +object Dep { + + val DefaultConfiguration = "default(compile)" + + implicit def parse(signature: String): Dep = { + val parts = signature.split(';') + val module = parts.head + val attributes = parts.tail.foldLeft(coursier.Attributes()) { (as, s) => + s.split('=') match { + case Array("classifier", v) => as.copy(classifier = v) + case Array(k, v) => throw new Exception(s"Unrecognized attribute: [$s]") + case _ => throw new Exception(s"Unable to parse attribute specifier: [$s]") + } + } + (module.split(':') match { + case Array(a, b, c) => Dep(a, b, c, cross = empty(platformed = false)) + case Array(a, b, "", c) => Dep(a, b, c, cross = empty(platformed = true)) + case Array(a, "", b, c) => Dep(a, b, c, cross = Binary(platformed = false)) + case Array(a, "", b, "", c) => Dep(a, b, c, cross = Binary(platformed = true)) + case Array(a, "", "", b, c) => Dep(a, b, c, cross = Full(platformed = false)) + case Array(a, "", "", b, "", c) => Dep(a, b, c, cross = Full(platformed = true)) + case _ => throw new Exception(s"Unable to parse signature: [$signature]") + }).configure(attributes = attributes) + } + def apply(org: String, name: String, version: String, cross: CrossVersion, force: Boolean = false): Dep = { + apply(coursier.Dependency(coursier.Module(org, name), version, DefaultConfiguration), cross, force) + } + implicit def rw: RW[Dep] = macroRW +} + +sealed trait CrossVersion { + /** If true, the cross-version suffix should start with a platform suffix if it exists */ + def platformed: Boolean + + def isBinary: Boolean = + this.isInstanceOf[Binary] + def isConstant: Boolean = + this.isInstanceOf[Constant] + def isFull: Boolean = + this.isInstanceOf[Full] + + /** The string that should be appended to the module name to get the artifact name */ + def suffixString(binaryVersion: String, fullVersion: String, platformSuffix: String): String = { + val firstSuffix = if (platformed) platformSuffix else "" + this match { + case cross: Constant => + s"${firstSuffix}${cross.value}" + case cross: Binary => + s"${firstSuffix}_${binaryVersion}" + case cross: Full => + s"${firstSuffix}_${fullVersion}" + } + } +} +object CrossVersion { + case class Constant(value: String, platformed: Boolean) extends CrossVersion + object Constant { + implicit def rw: RW[Constant] = macroRW + } + case class Binary(platformed: Boolean) extends CrossVersion + object Binary { + implicit def rw: RW[Binary] = macroRW + } + case class Full(platformed: Boolean) extends CrossVersion + object Full { + implicit def rw: RW[Full] = macroRW + } + + def empty(platformed: Boolean) = Constant(value = "", platformed) + + implicit def rw: RW[CrossVersion] = RW.merge(Constant.rw, Binary.rw, Full.rw) +} diff --git a/scalalib/src/Dependency.scala b/scalalib/src/Dependency.scala new file mode 100644 index 00000000..0c589663 --- /dev/null +++ b/scalalib/src/Dependency.scala @@ -0,0 +1,22 @@ +package mill.scalalib + +import mill.T +import mill.define.{Discover, ExternalModule} +import mill.eval.Evaluator +import mill.main.EvaluatorScopt +import mill.scalalib.dependency.DependencyUpdatesImpl + +object Dependency extends ExternalModule { + + def updates(ev: Evaluator, allowPreRelease: Boolean = false) = + T.command { + DependencyUpdatesImpl(implicitly, + ev.rootModule, + ev.rootModule.millDiscover, + allowPreRelease) + } + + implicit def millScoptEvaluatorReads[T]: EvaluatorScopt[T] = + new mill.main.EvaluatorScopt[T]() + lazy val millDiscover: Discover[Dependency.this.type] = Discover[this.type] +} diff --git a/scalalib/src/GenIdeaImpl.scala b/scalalib/src/GenIdeaImpl.scala new file mode 100644 index 00000000..2d76d804 --- /dev/null +++ b/scalalib/src/GenIdeaImpl.scala @@ -0,0 +1,474 @@ +package mill.scalalib + +import ammonite.runtime.SpecialClassLoader +import coursier.{Cache, CoursierPaths, Repository} +import mill.define._ +import mill.eval.{Evaluator, PathRef, Result} +import mill.api.Ctx.{Home, Log} +import mill.util.Strict.Agg +import mill.util.{Loose, Strict} +import mill.{T, scalalib} + +import scala.util.Try + + +object GenIdea extends ExternalModule { + + def idea(ev: Evaluator) = T.command{ + mill.scalalib.GenIdeaImpl( + implicitly, + ev.rootModule, + ev.rootModule.millDiscover + ) + } + + implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() + lazy val millDiscover = Discover[this.type] +} + +object GenIdeaImpl { + + def apply(ctx: Log with Home, + rootModule: BaseModule, + discover: Discover[_]): Unit = { + val pp = new scala.xml.PrettyPrinter(999, 4) + + val jdkInfo = extractCurrentJdk(os.pwd / ".idea" / "misc.xml").getOrElse(("JDK_1_8", "1.8 (1)")) + + os.remove.all(os.pwd/".idea"/"libraries") + os.remove.all(os.pwd/".idea"/"scala_compiler.xml") + os.remove.all(os.pwd/".idea_modules") + + + val evaluator = new Evaluator(ctx.home, os.pwd / 'out, os.pwd / 'out, rootModule, ctx.log) + + for((relPath, xml) <- xmlFileLayout(evaluator, rootModule, jdkInfo)){ + os.write.over(os.pwd/relPath, pp.format(xml)) + } + } + + def extractCurrentJdk(ideaPath: os.Path): Option[(String,String)] = { + import scala.xml.XML + Try { + val xml = XML.loadFile(ideaPath.toString) + (xml \\ "component") + .filter(x => x.attribute("project-jdk-type").map(_.text).contains("JavaSDK")) + .map { n => (n.attribute("languageLevel"), n.attribute("project-jdk-name")) } + .collectFirst{ case (Some(lang), Some(jdk)) => (lang.text, jdk.text) } + }.getOrElse(None) + } + + def xmlFileLayout(evaluator: Evaluator, + rootModule: mill.Module, + jdkInfo: (String,String), + fetchMillModules: Boolean = true): Seq[(os.RelPath, scala.xml.Node)] = { + + val modules = rootModule.millInternal.segmentsToModules.values + .collect{ case x: scalalib.JavaModule => (x.millModuleSegments, x)} + .toSeq + + val buildLibraryPaths = + if (!fetchMillModules) Nil + else sys.props.get("MILL_BUILD_LIBRARIES") match { + case Some(found) => found.split(',').map(os.Path(_)).distinct.toList + case None => + val repos = modules.foldLeft(Set.empty[Repository]) { _ ++ _._2.repositories } + val artifactNames = Seq("main-moduledefs", "main-core", "scalalib", "scalajslib") + val Result.Success(res) = scalalib.Lib.resolveDependencies( + repos.toList, + Lib.depToDependency(_, "2.12.4", ""), + for(name <- artifactNames) + yield ivy"com.lihaoyi::mill-$name:${sys.props("MILL_VERSION")}" + ) + res.items.toList.map(_.path) + } + + val buildDepsPaths = Try(evaluator + .rootModule + .getClass + .getClassLoader + .asInstanceOf[SpecialClassLoader] + ).map { + _.allJars + .map(url => os.Path(url.getFile)) + .filter(_.toIO.exists) + }.getOrElse(Seq()) + + val resolved = for((path, mod) <- modules) yield { + val scalaLibraryIvyDeps = mod match{ + case x: ScalaModule => x.scalaLibraryIvyDeps + case _ => T.task{Nil} + } + val allIvyDeps = T.task{mod.transitiveIvyDeps() ++ scalaLibraryIvyDeps() ++ mod.compileIvyDeps()} + val externalDependencies = T.task{ + mod.resolveDeps(allIvyDeps)() ++ + Task.traverse(mod.transitiveModuleDeps)(_.unmanagedClasspath)().flatten + } + + val externalSources = T.task{ + mod.resolveDeps(allIvyDeps, sources = true)() + } + + val (scalacPluginsIvyDeps, scalacOptions) = mod match{ + case mod: ScalaModule => T.task{mod.scalacPluginIvyDeps()} -> T.task{mod.scalacOptions()} + case _ => T.task(Loose.Agg[Dep]()) -> T.task(Seq()) + } + val scalacPluginDependencies = T.task{ + mod.resolveDeps(scalacPluginsIvyDeps)() + } + + val resolvedCp: Loose.Agg[PathRef] = evalOrElse(evaluator, externalDependencies, Loose.Agg.empty) + val resolvedSrcs: Loose.Agg[PathRef] = evalOrElse(evaluator, externalSources, Loose.Agg.empty) + val resolvedSp: Loose.Agg[PathRef] = evalOrElse(evaluator, scalacPluginDependencies, Loose.Agg.empty) + val scalacOpts: Seq[String] = evalOrElse(evaluator, scalacOptions, Seq()) + + ( + path, + resolvedCp.map(_.path).filter(_.ext == "jar") ++ resolvedSrcs.map(_.path), + mod, + resolvedSp.map(_.path).filter(_.ext == "jar"), + scalacOpts + ) + } + val moduleLabels = modules.map(_.swap).toMap + + val allResolved = resolved.flatMap(_._2) ++ buildLibraryPaths ++ buildDepsPaths + + val commonPrefix = + if (allResolved.isEmpty) 0 + else { + val minResolvedLength = allResolved.map(_.segmentCount).min + allResolved.map(_.segments.take(minResolvedLength).toList) + .transpose + .takeWhile(_.distinct.length == 1) + .length + } + + // only resort to full long path names if the jar name is a duplicate + val pathShortLibNameDuplicate = allResolved + .distinct + .map{p => p.last -> p} + .groupBy(_._1) + .filter(_._2.size > 1) + .keySet + + val pathToLibName = allResolved + .map{p => + if (pathShortLibNameDuplicate(p.last)) + (p, p.segments.drop(commonPrefix).mkString("_")) + else + (p, p.last) + } + .toMap + + sealed trait ResolvedLibrary { def path : os.Path } + case class CoursierResolved(path : os.Path, pom : os.Path, sources : Option[os.Path]) + extends ResolvedLibrary + case class OtherResolved(path : os.Path) extends ResolvedLibrary + + // Tries to group jars with their poms and sources. + def toResolvedJar(path : os.Path) : Option[ResolvedLibrary] = { + val inCoursierCache = path.startsWith(os.Path(CoursierPaths.cacheDirectory())) + val isSource = path.last.endsWith("sources.jar") + val isPom = path.ext == "pom" + if (inCoursierCache && (isSource || isPom)) { + // Remove sources and pom as they'll be recovered from the jar path + None + } else if (inCoursierCache && path.ext == "jar") { + val withoutExt = path.last.dropRight(path.ext.length + 1) + val pom = path / os.up / s"$withoutExt.pom" + val sources = Some(path / os.up / s"$withoutExt-sources.jar") + .filter(_.toIO.exists()) + Some(CoursierResolved(path, pom, sources)) + } else Some(OtherResolved(path)) + } + + // Hack so that Intellij does not complain about unresolved magic + // imports in build.sc when in fact they are resolved + def sbtLibraryNameFromPom(pom : os.Path) : String = { + val xml = scala.xml.XML.loadFile(pom.toIO) + + val groupId = (xml \ "groupId").text + val artifactId = (xml \ "artifactId").text + val version = (xml \ "version").text + + // The scala version here is non incidental + s"SBT: $groupId:$artifactId:$version:jar" + } + + def libraryName(resolvedJar: ResolvedLibrary) : String = resolvedJar match { + case CoursierResolved(path, pom, _) if buildDepsPaths.contains(path) => + sbtLibraryNameFromPom(pom) + case CoursierResolved(path, _, _) => + pathToLibName(path) + case OtherResolved(path) => + pathToLibName(path) + } + + def resolvedLibraries(resolved : Seq[os.Path]) : Seq[ResolvedLibrary] = resolved + .map(toResolvedJar) + .collect { case Some(r) => r} + + val compilerSettings = resolved + .foldLeft(Map[(Loose.Agg[os.Path], Seq[String]), Vector[JavaModule]]()) { + (r, q) => + val key = (q._4, q._5) + r + (key -> (r.getOrElse(key, Vector()) :+ q._3)) + } + + val allBuildLibraries : Set[ResolvedLibrary] = + resolvedLibraries(buildLibraryPaths ++ buildDepsPaths).toSet + + val fixedFiles = Seq( + Tuple2(os.rel/".idea"/"misc.xml", miscXmlTemplate(jdkInfo)), + Tuple2(os.rel/".idea"/"scala_settings.xml", scalaSettingsTemplate()), + Tuple2( + os.rel/".idea"/"modules.xml", + allModulesXmlTemplate( + modules + .filter(!_._2.skipIdea) + .map { case (path, mod) => moduleName(path) } + ) + ), + Tuple2( + os.rel/".idea_modules"/"mill-build.iml", + rootXmlTemplate( + for(lib <- allBuildLibraries) + yield libraryName(lib) + ) + ), + Tuple2( + os.rel/".idea"/"scala_compiler.xml", + scalaCompilerTemplate(compilerSettings) + ) + ) + + val libraries = resolvedLibraries(allResolved).map{ resolved => + import resolved.path + val url = "jar://" + path + "!/" + val name = libraryName(resolved) + val sources = resolved match { + case CoursierResolved(_, _, s) => s.map(p => "jar://" + p + "!/") + case OtherResolved(_) => None + } + Tuple2(os.rel/".idea"/'libraries/s"$name.xml", libraryXmlTemplate(name, url, sources)) + } + + val moduleFiles = resolved.map{ case (path, resolvedDeps, mod, _, _) => + val Seq( + resourcesPathRefs: Seq[PathRef], + sourcesPathRef: Seq[PathRef], + generatedSourcePathRefs: Seq[PathRef], + allSourcesPathRefs: Seq[PathRef] + ) = evaluator.evaluate(Agg(mod.resources, mod.sources, mod.generatedSources, mod.allSources)).values + + val generatedSourcePaths = generatedSourcePathRefs.map(_.path) + val normalSourcePaths = (allSourcesPathRefs.map(_.path).toSet -- generatedSourcePaths.toSet).toSeq + + val paths = Evaluator.resolveDestPaths( + evaluator.outPath, + mod.compile.ctx.segments + ) + val scalaVersionOpt = mod match { + case x: ScalaModule => Some(evaluator.evaluate(Agg(x.scalaVersion)).values.head.asInstanceOf[String]) + case _ => None + } + val generatedSourceOutPath = Evaluator.resolveDestPaths( + evaluator.outPath, + mod.generatedSources.ctx.segments + ) + + val isTest = mod.isInstanceOf[TestModule] + + val elem = moduleXmlTemplate( + mod.intellijModulePath, + scalaVersionOpt, + Strict.Agg.from(resourcesPathRefs.map(_.path)), + Strict.Agg.from(normalSourcePaths), + Strict.Agg.from(generatedSourcePaths), + paths.out, + generatedSourceOutPath.dest, + Strict.Agg.from(resolvedDeps.map(pathToLibName)), + Strict.Agg.from(mod.moduleDeps.map{ m => moduleName(moduleLabels(m))}.distinct), + isTest + ) + Tuple2(os.rel/".idea_modules"/s"${moduleName(path)}.iml", elem) + } + + fixedFiles ++ libraries ++ moduleFiles + } + + def evalOrElse[T](evaluator: Evaluator, e: Task[T], default: => T): T = { + evaluator.evaluate(Agg(e)).values match { + case Seq() => default + case Seq(e: T) => e + } + } + + def relify(p: os.Path) = { + val r = p.relativeTo(os.pwd/".idea_modules") + (Seq.fill(r.ups)("..") ++ r.segments).mkString("/") + } + + def moduleName(p: Segments) = p.value.foldLeft(StringBuilder.newBuilder) { + case (sb, Segment.Label(s)) if sb.isEmpty => sb.append(s) + case (sb, Segment.Cross(s)) if sb.isEmpty => sb.append(s.mkString("-")) + case (sb, Segment.Label(s)) => sb.append(".").append(s) + case (sb, Segment.Cross(s)) => sb.append("-").append(s.mkString("-")) + }.mkString.toLowerCase() + + def scalaSettingsTemplate() = { + + + + + + } + def miscXmlTemplate(jdkInfo: (String,String)) = { + + + + + + } + + def allModulesXmlTemplate(selectors: Seq[String]) = { + + + + + { + for(selector <- selectors) + yield { + val filepath = "$PROJECT_DIR$/.idea_modules/" + selector + ".iml" + val fileurl = "file://" + filepath + + } + } + + + + } + def rootXmlTemplate(libNames: Strict.Agg[String]) = { + + + + + + + + + + + { + for(name <- libNames.toSeq.sorted) + yield + } + + + } + def libraryXmlTemplate(name: String, url: String, sources: Option[String]) = { + + + + + + { if (sources.isDefined) { + + + + } + } + + + } + def moduleXmlTemplate(basePath: os.Path, + scalaVersionOpt: Option[String], + resourcePaths: Strict.Agg[os.Path], + normalSourcePaths: Strict.Agg[os.Path], + generatedSourcePaths: Strict.Agg[os.Path], + compileOutputPath: os.Path, + generatedSourceOutputPath: os.Path, + libNames: Strict.Agg[String], + depNames: Strict.Agg[String], + isTest: Boolean + ) = { + + + { + val outputUrl = "file://$MODULE_DIR$/" + relify(compileOutputPath) + "/dest/classes" + if (isTest) + + else + + } + + + + { + for (normalSourcePath <- normalSourcePaths.toSeq.sorted) + yield + + } + { + for (generatedSourcePath <- generatedSourcePaths.toSeq.sorted) + yield + + } + { + val resourceType = if (isTest) "java-test-resource" else "java-resource" + for (resourcePath <- resourcePaths.toSeq.sorted) + yield + + } + + + + + { + for(scalaVersion <- scalaVersionOpt.toSeq) + yield + } + + { + for(name <- libNames.toSeq.sorted) + yield + + } + { + for(depName <- depNames.toSeq.sorted) + yield + } + + + } + def scalaCompilerTemplate(settings: Map[(Loose.Agg[os.Path], Seq[String]), Seq[JavaModule]]) = { + + + + { + for((((plugins, params), mods), i) <- settings.toSeq.zip(1 to settings.size)) + yield + moduleName(m.millModuleSegments)).mkString(",")}> + + { + for(param <- params) + yield + } + + + { + for(plugin <- plugins.toSeq) + yield + } + + + } + + + } +} diff --git a/scalalib/src/JavaModule.scala b/scalalib/src/JavaModule.scala new file mode 100644 index 00000000..78be8893 --- /dev/null +++ b/scalalib/src/JavaModule.scala @@ -0,0 +1,608 @@ +package mill +package scalalib + +import coursier.Repository +import mill.define.Task +import mill.define.TaskModule +import mill.eval.{PathRef, Result} +import mill.modules.{Assembly, Jvm} +import mill.modules.Jvm.{createAssembly, createJar} +import Lib._ +import mill.scalalib.publish.{Artifact, Scope} +import mill.util.Loose.Agg + +/** + * Core configuration required to compile a single Scala compilation target + */ +trait JavaModule extends mill.Module with TaskModule { outer => + def zincWorker: ZincWorkerModule = mill.scalalib.ZincWorkerModule + + trait Tests extends TestModule{ + override def moduleDeps = Seq(outer) + override def repositories = outer.repositories + override def javacOptions = outer.javacOptions + override def zincWorker = outer.zincWorker + } + def defaultCommandName() = "run" + + def resolvePublishDependency: Task[Dep => publish.Dependency] = T.task{ + Artifact.fromDepJava(_: Dep) + } + def resolveCoursierDependency: Task[Dep => coursier.Dependency] = T.task{ + Lib.depToDependencyJava(_: Dep) + } + + /** + * Allows you to specify an explicit main class to use for the `run` command. + * If none is specified, the classpath is searched for an appropriate main + * class to use if one exists + */ + def mainClass: T[Option[String]] = None + + def finalMainClassOpt: T[Either[String, String]] = T{ + mainClass() match{ + case Some(m) => Right(m) + case None => + zincWorker.worker().discoverMainClasses(compile())match { + case Seq() => Left("No main class specified or found") + case Seq(main) => Right(main) + case mains => + Left( + s"Multiple main classes found (${mains.mkString(",")}) " + + "please explicitly specify which one to use by overriding mainClass" + ) + } + } + } + + def finalMainClass: T[String] = T{ + finalMainClassOpt() match { + case Right(main) => Result.Success(main) + case Left(msg) => Result.Failure(msg) + } + } + + /** + * Any ivy dependencies you want to add to this Module, in the format + * ivy"org::name:version" for Scala dependencies or ivy"org:name:version" + * for Java dependencies + */ + def ivyDeps = T{ Agg.empty[Dep] } + + /** + * Same as `ivyDeps`, but only present at compile time. Useful for e.g. + * macro-related dependencies like `scala-reflect` that doesn't need to be + * present at runtime + */ + def compileIvyDeps = T{ Agg.empty[Dep] } + /** + * Same as `ivyDeps`, but only present at runtime. Useful for e.g. + * selecting different versions of a dependency to use at runtime after your + * code has already been compiled + */ + def runIvyDeps = T{ Agg.empty[Dep] } + + /** + * Options to pass to the java compiler + */ + def javacOptions = T{ Seq.empty[String] } + + /** The direct dependencies of this module */ + def moduleDeps = Seq.empty[JavaModule] + + /** The direct and indirect dependencies of this module */ + def recursiveModuleDeps: Seq[JavaModule] = { + moduleDeps.flatMap(_.transitiveModuleDeps).distinct + } + + /** Like `recursiveModuleDeps` but also include the module itself */ + def transitiveModuleDeps: Seq[JavaModule] = { + Seq(this) ++ recursiveModuleDeps + } + + /** + * Additional jars, classfiles or resources to add to the classpath directly + * from disk rather than being downloaded from Maven Central or other package + * repositories + */ + def unmanagedClasspath = T{ Agg.empty[PathRef] } + + /** + * The transitive ivy dependencies of this module and all it's upstream modules + */ + def transitiveIvyDeps: T[Agg[Dep]] = T{ + ivyDeps() ++ Task.traverse(moduleDeps)(_.transitiveIvyDeps)().flatten + } + + /** + * The upstream compilation output of all this module's upstream modules + */ + def upstreamCompileOutput = T{ + Task.traverse(recursiveModuleDeps)(_.compile) + } + + /** + * The transitive version of `localClasspath` + */ + def transitiveLocalClasspath: T[Agg[PathRef]] = T{ + Task.traverse(moduleDeps)(m => + T.task{m.localClasspath() ++ m.transitiveLocalClasspath()} + )().flatten + } + + def mapDependencies = T.task{ d: coursier.Dependency => d } + + def resolveDeps(deps: Task[Agg[Dep]], sources: Boolean = false) = T.task{ + resolveDependencies( + repositories, + resolveCoursierDependency().apply(_), + deps(), + sources, + mapDependencies = Some(mapDependencies()) + ) + } + + + def repositories: Seq[Repository] = zincWorker.repositories + + /** + * What platform suffix to use for publishing, e.g. `_sjs` for Scala.js + * projects + */ + def platformSuffix = T{ "" } + + private val Milestone213 = raw"""2.13.(\d+)-M(\d+)""".r + + /** + * What shell script to use to launch the executable generated by `assembly`. + * Defaults to a generic "universal" launcher that should work for Windows, + * OS-X and Linux + */ + def prependShellScript: T[String] = T{ + finalMainClassOpt().toOption match{ + case None => "" + case Some(cls) => + val isWin = scala.util.Properties.isWin + mill.modules.Jvm.launcherUniversalScript( + cls, + Agg("$0"), Agg("%~dpnx0"), + forkArgs() + ) + } + } + + def assemblyRules: Seq[Assembly.Rule] = Assembly.defaultRules + + /** + * The folders where the source files for this module live + */ + def sources = T.sources{ millSourcePath / 'src } + /** + * The folders where the resource files for this module live + */ + def resources = T.sources{ millSourcePath / 'resources } + /** + * Folders containing source files that are generated rather than + * hand-written; these files can be generated in this target itself, + * or can refer to files generated from other targets + */ + def generatedSources = T{ Seq.empty[PathRef] } + + /** + * The folders containing all source files fed into the compiler + */ + def allSources = T{ sources() ++ generatedSources() } + + /** + * All individual source files fed into the compiler + */ + def allSourceFiles = T{ + def isHiddenFile(path: os.Path) = path.last.startsWith(".") + for { + root <- allSources() + if os.exists(root.path) + path <- (if (os.isDir(root.path)) os.walk(root.path) else Seq(root.path)) + if os.isFile(path) && ((path.ext == "scala" || path.ext == "java") && !isHiddenFile(path)) + } yield PathRef(path) + } + + /** + * Compiles the current module to generate compiled classfiles/bytecode + */ + def compile: T[mill.scalalib.api.CompilationResult] = T.persistent{ + zincWorker.worker().compileJava( + upstreamCompileOutput(), + allSourceFiles().map(_.path), + compileClasspath().map(_.path), + javacOptions() + ) + } + + /** + * The output classfiles/resources from this module, excluding upstream + * modules and third-party dependencies + */ + def localClasspath = T{ + resources() ++ Agg(compile().classes) + } + + /** + * All classfiles and resources from upstream modules and dependencies + * necessary to compile this module + */ + def compileClasspath = T{ + transitiveLocalClasspath() ++ + resources() ++ + unmanagedClasspath() ++ + resolveDeps(T.task{compileIvyDeps() ++ transitiveIvyDeps()})() + } + + /** + * All upstream classfiles and resources necessary to build and executable + * assembly, but without this module's contribution + */ + def upstreamAssemblyClasspath = T{ + transitiveLocalClasspath() ++ + unmanagedClasspath() ++ + resolveDeps(T.task{runIvyDeps() ++ transitiveIvyDeps()})() + } + + /** + * All classfiles and resources from upstream modules and dependencies + * necessary to run this module's code after compilation + */ + def runClasspath = T{ + localClasspath() ++ + upstreamAssemblyClasspath() + } + + /** + * Build the assembly for upstream dependencies separate from the current + * classpath + * + * This should allow much faster assembly creation in the common case where + * upstream dependencies do not change + */ + def upstreamAssembly = T{ + createAssembly( + upstreamAssemblyClasspath().map(_.path), + mainClass(), + assemblyRules = assemblyRules + ) + } + + /** + * An executable uber-jar/assembly containing all the resources and compiled + * classfiles from this module and all it's upstream modules and dependencies + */ + def assembly = T{ + createAssembly( + Agg.from(localClasspath().map(_.path)), + finalMainClassOpt().toOption, + prependShellScript(), + Some(upstreamAssembly().path), + assemblyRules + ) + } + + /** + * A jar containing only this module's resources and compiled classfiles, + * without those from upstream modules and dependencies + */ + def jar = T{ + createJar( + localClasspath().map(_.path).filter(os.exists), + mainClass() + ) + } + + /** + * The documentation jar, containing all the Javadoc/Scaladoc HTML files, for + * publishing to Maven Central + */ + def docJar = T[PathRef] { + val outDir = T.ctx().dest + + val javadocDir = outDir / 'javadoc + os.makeDir.all(javadocDir) + + val files = for{ + ref <- allSources() + if os.exists(ref.path) + p <- (if (os.isDir(ref.path)) os.walk(ref.path) else Seq(ref.path)) + if os.isFile(p) && (p.ext == "java") + } yield p.toNIO.toString + + val options = Seq("-d", javadocDir.toNIO.toString) + + if (files.nonEmpty) Jvm.baseInteractiveSubprocess( + commandArgs = Seq( + "javadoc" + ) ++ options ++ + Seq( + "-classpath", + compileClasspath() + .map(_.path) + .filter(_.ext != "pom") + .mkString(java.io.File.pathSeparator) + ) ++ + files.map(_.toString), + envArgs = Map(), + workingDir = T.ctx().dest + ) + + createJar(Agg(javadocDir))(outDir) + } + + /** + * The source jar, containing only source code for publishing to Maven Central + */ + def sourceJar = T { + createJar((allSources() ++ resources()).map(_.path).filter(os.exists)) + } + + /** + * Any command-line parameters you want to pass to the forked JVM under `run`, + * `test` or `repl` + */ + def forkArgs = T{ Seq.empty[String] } + + /** + * Any environment variables you want to pass to the forked JVM under `run`, + * `test` or `repl` + */ + def forkEnv = T{ sys.env.toMap } + + /** + * Builds a command-line "launcher" file that can be used to run this module's + * code, without the Mill process. Useful for deployment & other places where + * you do not want a build tool running + */ + def launcher = T{ + Result.Success( + Jvm.createLauncher( + finalMainClass(), + runClasspath().map(_.path), + forkArgs() + ) + ) + } + + def ivyDepsTree(inverse: Boolean = false) = T.command { + val (flattened, resolution) = Lib.resolveDependenciesMetadata( + repositories, + resolveCoursierDependency().apply(_), + transitiveIvyDeps(), + Some(mapDependencies()) + ) + + println(coursier.util.Print.dependencyTree(flattened, resolution, + printExclusions = false, reverse = inverse)) + + Result.Success() + } + + /** + * Runs this module's code in-process within an isolated classloader. This is + * faster than `run`, but in exchange you have less isolation between runs + * since the code can dirty the parent Mill process and potentially leave it + * in a bad state. + */ + def runLocal(args: String*) = T.command { + Jvm.runLocal( + finalMainClass(), + runClasspath().map(_.path), + args + ) + } + + /** + * Runs this module's code in a subprocess and waits for it to finish + */ + def run(args: String*) = T.command{ + try Result.Success(Jvm.runSubprocess( + finalMainClass(), + runClasspath().map(_.path), + forkArgs(), + forkEnv(), + args, + workingDir = forkWorkingDir() + )) catch { case e: Exception => + Result.Failure("subprocess failed") + } + } + + private[this] def backgroundSetup(dest: os.Path) = { + val token = java.util.UUID.randomUUID().toString + val procId = dest / ".mill-background-process-id" + val procTombstone = dest / ".mill-background-process-tombstone" + // The backgrounded subprocesses poll the procId file, and kill themselves + // when the procId file is deleted. This deletion happens immediately before + // the body of these commands run, but we cannot be sure the subprocess has + // had time to notice. + // + // To make sure we wait for the previous subprocess to + // die, we make the subprocess write a tombstone file out when it kills + // itself due to procId being deleted, and we wait a short time on task-start + // to see if such a tombstone appears. If a tombstone appears, we can be sure + // the subprocess has killed itself, and can continue. If a tombstone doesn't + // appear in a short amount of time, we assume the subprocess exited or was + // killed via some other means, and continue anyway. + val start = System.currentTimeMillis() + while({ + if (os.exists(procTombstone)) { + Thread.sleep(10) + os.remove.all(procTombstone) + true + } else { + Thread.sleep(10) + System.currentTimeMillis() - start < 100 + } + })() + + os.write(procId, token) + os.write(procTombstone, token) + (procId, procTombstone, token) + } + + /** + * Runs this module's code in a background process, until it dies or + * `runBackground` is used again. This lets you continue using Mill while + * the process is running in the background: editing files, compiling, and + * only re-starting the background process when you're ready. + * + * You can also use `-w foo.runBackground` to make Mill watch for changes + * and automatically recompile your code & restart the background process + * when ready. This is useful when working on long-running server processes + * that would otherwise run forever + */ + def runBackground(args: String*) = T.command{ + val (procId, procTombstone, token) = backgroundSetup(T.ctx().dest) + try Result.Success(Jvm.runSubprocess( + "mill.scalalib.backgroundwrapper.BackgroundWrapper", + (runClasspath() ++ zincWorker.backgroundWrapperClasspath()).map(_.path), + forkArgs(), + forkEnv(), + Seq(procId.toString, procTombstone.toString, token, finalMainClass()) ++ args, + workingDir = forkWorkingDir(), + background = true + )) catch { case e: Exception => + Result.Failure("subprocess failed") + } + } + + /** + * Same as `runBackground`, but lets you specify a main class to run + */ + def runMainBackground(mainClass: String, args: String*) = T.command{ + val (procId, procTombstone, token) = backgroundSetup(T.ctx().dest) + try Result.Success(Jvm.runSubprocess( + "mill.scalalib.backgroundwrapper.BackgroundWrapper", + (runClasspath() ++ zincWorker.backgroundWrapperClasspath()).map(_.path), + forkArgs(), + forkEnv(), + Seq(procId.toString, procTombstone.toString, token, mainClass) ++ args, + workingDir = forkWorkingDir(), + background = true + )) catch { case e: Exception => + Result.Failure("subprocess failed") + } + } + + /** + * Same as `runLocal`, but lets you specify a main class to run + */ + def runMainLocal(mainClass: String, args: String*) = T.command { + Jvm.runLocal( + mainClass, + runClasspath().map(_.path), + args + ) + } + + /** + * Same as `run`, but lets you specify a main class to run + */ + def runMain(mainClass: String, args: String*) = T.command{ + try Result.Success(Jvm.runSubprocess( + mainClass, + runClasspath().map(_.path), + forkArgs(), + forkEnv(), + args, + workingDir = forkWorkingDir() + )) catch { case e: Exception => + Result.Failure("subprocess failed") + } + } + + // publish artifact with name "mill_2.12.4" instead of "mill_2.12" + + def artifactName: T[String] = millModuleSegments.parts.mkString("-") + + def artifactId: T[String] = artifactName() + + def intellijModulePath: os.Path = millSourcePath + + def forkWorkingDir = T{ ammonite.ops.pwd } + + /** + * Skip Idea project file generation. + */ + def skipIdea: Boolean = false +} + +trait TestModule extends JavaModule with TaskModule { + override def defaultCommandName() = "test" + /** + * What test frameworks to use. + */ + def testFrameworks: T[Seq[String]] + /** + * Discovers and runs the module's tests in a subprocess, reporting the + * results to the console + */ + def test(args: String*) = T.command{ + val outputPath = T.ctx().dest/"out.json" + + Jvm.runSubprocess( + mainClass = "mill.scalalib.TestRunner", + classPath = zincWorker.scalalibClasspath().map(_.path), + jvmArgs = forkArgs(), + envArgs = forkEnv(), + mainArgs = + Seq(testFrameworks().length.toString) ++ + testFrameworks() ++ + Seq(runClasspath().length.toString) ++ + runClasspath().map(_.path.toString) ++ + Seq(args.length.toString) ++ + args ++ + Seq(outputPath.toString, T.ctx().log.colored.toString, compile().classes.path.toString, T.ctx().home.toString), + workingDir = forkWorkingDir() + ) + + try { + val jsonOutput = ujson.read(outputPath.toIO) + val (doneMsg, results) = upickle.default.read[(String, Seq[TestRunner.Result])](jsonOutput) + TestModule.handleResults(doneMsg, results) + }catch{case e: Throwable => + Result.Failure("Test reporting failed: " + e) + } + + } + + /** + * Discovers and runs the module's tests in-process in an isolated classloader, + * reporting the results to the console + */ + def testLocal(args: String*) = T.command{ + val outputPath = T.ctx().dest/"out.json" + + val (doneMsg, results) = TestRunner.runTests( + TestRunner.frameworks(testFrameworks()), + runClasspath().map(_.path), + Agg(compile().classes.path), + args + ) + + TestModule.handleResults(doneMsg, results) + + } +} + +object TestModule{ + def handleResults(doneMsg: String, results: Seq[TestRunner.Result]) = { + + val badTests = results.filter(x => Set("Error", "Failure").contains(x.status)) + if (badTests.isEmpty) Result.Success((doneMsg, results)) + else { + val suffix = if (badTests.length == 1) "" else " and " + (badTests.length-1) + " more" + + Result.Failure( + badTests.head.fullyQualifiedName + " " + badTests.head.selector + suffix, + Some((doneMsg, results)) + ) + } + } +} + diff --git a/scalalib/src/Lib.scala b/scalalib/src/Lib.scala new file mode 100644 index 00000000..b8b253bd --- /dev/null +++ b/scalalib/src/Lib.scala @@ -0,0 +1,133 @@ +package mill +package scalalib + +import java.io.{File, FileInputStream} +import java.lang.annotation.Annotation +import java.lang.reflect.Modifier +import java.util.zip.ZipInputStream +import javax.tools.ToolProvider + +import ammonite.util.Util +import coursier.{Cache, Dependency, Fetch, Repository, Resolution} +import mill.scalalib.api.Util.isDotty +import mill.Agg +import mill.eval.{PathRef, Result} +import mill.modules.Jvm +import mill.api.Ctx +import sbt.testing._ + +import scala.collection.mutable + + +object Lib{ + def depToDependencyJava(dep: Dep, platformSuffix: String = ""): Dependency = { + assert(dep.cross.isConstant, s"Not a Java dependency: $dep") + depToDependency(dep, "", platformSuffix) + } + + def depToDependency(dep: Dep, scalaVersion: String, platformSuffix: String = ""): Dependency = + dep.toDependency( + binaryVersion = mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion), + fullVersion = scalaVersion, + platformSuffix = platformSuffix + ) + + def resolveDependenciesMetadata(repositories: Seq[Repository], + depToDependency: Dep => coursier.Dependency, + deps: TraversableOnce[Dep], + mapDependencies: Option[Dependency => Dependency] = None) = { + val depSeq = deps.toSeq + mill.modules.Jvm.resolveDependenciesMetadata( + repositories, + depSeq.map(depToDependency), + depSeq.filter(_.force).map(depToDependency), + mapDependencies + ) + } + /** + * Resolve dependencies using Coursier. + * + * We do not bother breaking this out into the separate ZincWorker classpath, + * because Coursier is already bundled with mill/Ammonite to support the + * `import $ivy` syntax. + */ + def resolveDependencies(repositories: Seq[Repository], + depToDependency: Dep => coursier.Dependency, + deps: TraversableOnce[Dep], + sources: Boolean = false, + mapDependencies: Option[Dependency => Dependency] = None): Result[Agg[PathRef]] = { + val depSeq = deps.toSeq + mill.modules.Jvm.resolveDependencies( + repositories, + depSeq.map(depToDependency), + depSeq.filter(_.force).map(depToDependency), + sources, + mapDependencies + ) + } + def scalaCompilerIvyDeps(scalaOrganization: String, scalaVersion: String) = + if (mill.scalalib.api.Util.isDotty(scalaVersion)) + Agg(ivy"$scalaOrganization::dotty-compiler:$scalaVersion".forceVersion()) + else + Agg( + ivy"$scalaOrganization:scala-compiler:$scalaVersion".forceVersion(), + ivy"$scalaOrganization:scala-reflect:$scalaVersion".forceVersion() + ) + + def scalaRuntimeIvyDeps(scalaOrganization: String, scalaVersion: String) = Agg[Dep]( + ivy"$scalaOrganization:scala-library:$scalaVersion".forceVersion() + ) + + def listClassFiles(base: os.Path): Iterator[String] = { + if (os.isDir(base)) os.walk(base).toIterator.filter(_.ext == "class").map(_.relativeTo(base).toString) + else { + val zip = new ZipInputStream(new FileInputStream(base.toIO)) + Iterator.continually(zip.getNextEntry).takeWhile(_ != null).map(_.getName).filter(_.endsWith(".class")) + } + } + + def discoverTests(cl: ClassLoader, framework: Framework, classpath: Agg[os.Path]) = { + + val fingerprints = framework.fingerprints() + + val testClasses = classpath.flatMap { base => + // Don't blow up if there are no classfiles representing + // the tests to run Instead just don't run anything + if (!os.exists(base)) Nil + else listClassFiles(base).flatMap { path => + val cls = cl.loadClass(path.stripSuffix(".class").replace('/', '.')) + val publicConstructorCount = + cls.getConstructors.count(c => c.getParameterCount == 0 && Modifier.isPublic(c.getModifiers)) + + if (Modifier.isAbstract(cls.getModifiers) || cls.isInterface || publicConstructorCount > 1) { + None + } else { + (cls.getName.endsWith("$"), publicConstructorCount == 0) match{ + case (true, true) => matchFingerprints(cl, cls, fingerprints, isModule = true) + case (false, false) => matchFingerprints(cl, cls, fingerprints, isModule = false) + case _ => None + } + } + } + } + + testClasses + } + def matchFingerprints(cl: ClassLoader, cls: Class[_], fingerprints: Array[Fingerprint], isModule: Boolean) = { + fingerprints.find { + case f: SubclassFingerprint => + f.isModule == isModule && + cl.loadClass(f.superclassName()).isAssignableFrom(cls) + + case f: AnnotatedFingerprint => + val annotationCls = cl.loadClass(f.annotationName()).asInstanceOf[Class[Annotation]] + f.isModule == isModule && + ( + cls.isAnnotationPresent(annotationCls) || + cls.getDeclaredMethods.exists(_.isAnnotationPresent(annotationCls)) + ) + + }.map { f => (cls, f) } + } + +} diff --git a/scalalib/src/MiscModule.scala b/scalalib/src/MiscModule.scala new file mode 100644 index 00000000..c6449d6e --- /dev/null +++ b/scalalib/src/MiscModule.scala @@ -0,0 +1,101 @@ +package mill +package scalalib + +import mill.define.Cross.Resolver +import mill.define.{Cross, Task} +import mill.eval.{PathRef, Result} +import mill.util.Loose.Agg +object CrossModuleBase{ + def scalaVersionPaths(scalaVersion: String, f: String => os.Path) = { + for(segments <- scalaVersion.split('.').inits.filter(_.nonEmpty)) + yield PathRef(f(segments.mkString("."))) + } +} +trait CrossModuleBase extends ScalaModule { + def crossScalaVersion: String + def scalaVersion = T{ crossScalaVersion } + + override def millSourcePath = super.millSourcePath / ammonite.ops.up + implicit def crossSbtModuleResolver: Resolver[CrossModuleBase] = new Resolver[CrossModuleBase]{ + def resolve[V <: CrossModuleBase](c: Cross[V]): V = { + crossScalaVersion.split('.') + .inits + .takeWhile(_.length > 1) + .flatMap( prefix => + c.items.map(_._2).find(_.crossScalaVersion.split('.').startsWith(prefix)) + ) + .collectFirst{case x => x} + .getOrElse( + throw new Exception( + s"Unable to find compatible cross version between $crossScalaVersion and "+ + c.items.map(_._2.crossScalaVersion).mkString(",") + ) + ) + } + } +} + +trait CrossScalaModule extends ScalaModule with CrossModuleBase{ outer => + override def sources = T.sources{ + super.sources() ++ + CrossModuleBase.scalaVersionPaths(crossScalaVersion, s => millSourcePath / s"src-$s" ) + } + + trait Tests extends super.Tests { + override def sources = T.sources{ + super.sources() ++ + CrossModuleBase.scalaVersionPaths(crossScalaVersion, s => millSourcePath / s"src-$s" ) + } + } +} + +trait MavenTests extends TestModule{ + override def sources = T.sources( + millSourcePath / 'src / 'test / 'scala, + millSourcePath / 'src / 'test / 'java + ) + override def resources = T.sources{ millSourcePath / 'src / 'test / 'resources } +} +trait MavenModule extends JavaModule{outer => + + override def sources = T.sources( + millSourcePath / 'src / 'main / 'scala, + millSourcePath / 'src / 'main / 'java + ) + override def resources = T.sources{ millSourcePath / 'src / 'main / 'resources } + trait Tests extends super.Tests with MavenTests { + override def millSourcePath = outer.millSourcePath + override def intellijModulePath = outer.millSourcePath / 'src / 'test + } +} + +trait SbtModule extends MavenModule with ScalaModule{ outer => + trait Tests extends super.Tests with MavenTests { + override def millSourcePath = outer.millSourcePath + override def intellijModulePath = outer.millSourcePath / 'src / 'test + } +} + +trait CrossSbtModule extends SbtModule with CrossModuleBase{ outer => + + override def sources = T.sources{ + super.sources() ++ + CrossModuleBase.scalaVersionPaths( + crossScalaVersion, + s => millSourcePath / 'src / 'main / s"scala-$s" + ) + + } + trait Tests extends super.Tests { + override def millSourcePath = outer.millSourcePath + override def sources = T.sources{ + super.sources() ++ + CrossModuleBase.scalaVersionPaths( + crossScalaVersion, + s => millSourcePath / 'src / 'test / s"scala-$s" + ) + } + } +} + + diff --git a/scalalib/src/PublishModule.scala b/scalalib/src/PublishModule.scala new file mode 100644 index 00000000..588781f4 --- /dev/null +++ b/scalalib/src/PublishModule.scala @@ -0,0 +1,124 @@ +package mill +package scalalib + +import mill.define.{ExternalModule, Task} +import mill.api.PathRef +import mill.scalalib.publish.{Artifact, SonatypePublisher} + +/** + * Configuration necessary for publishing a Scala module to Maven Central or similar + */ +trait PublishModule extends JavaModule { outer => + import mill.scalalib.publish._ + + override def moduleDeps = Seq.empty[PublishModule] + + def pomSettings: T[PomSettings] + def publishVersion: T[String] + + def publishSelfDependency = T { + Artifact(pomSettings().organization, artifactId(), publishVersion()) + } + + def publishXmlDeps = T.task { + val ivyPomDeps = ivyDeps().map(resolvePublishDependency().apply(_)) + val modulePomDeps = Task.sequence(moduleDeps.map(_.publishSelfDependency))() + ivyPomDeps ++ modulePomDeps.map(Dependency(_, Scope.Compile)) + } + def pom = T { + val pom = Pom(artifactMetadata(), publishXmlDeps(), artifactId(), pomSettings()) + val pomPath = T.ctx().dest / s"${artifactId()}-${publishVersion()}.pom" + os.write.over(pomPath, pom) + PathRef(pomPath) + } + + def ivy = T { + val ivy = Ivy(artifactMetadata(), publishXmlDeps()) + val ivyPath = T.ctx().dest / "ivy.xml" + os.write.over(ivyPath, ivy) + PathRef(ivyPath) + } + + def artifactMetadata: T[Artifact] = T { + Artifact(pomSettings().organization, artifactId(), publishVersion()) + } + + def publishLocal(): define.Command[Unit] = T.command { + LocalPublisher.publish( + jar = jar().path, + sourcesJar = sourceJar().path, + docJar = docJar().path, + pom = pom().path, + ivy = ivy().path, + artifact = artifactMetadata() + ) + } + + def sonatypeUri: String = "https://oss.sonatype.org/service/local" + + def sonatypeSnapshotUri: String = "https://oss.sonatype.org/content/repositories/snapshots" + + def publishArtifacts = T { + val baseName = s"${artifactId()}-${publishVersion()}" + PublishModule.PublishData( + artifactMetadata(), + Seq( + jar() -> s"$baseName.jar", + sourceJar() -> s"$baseName-sources.jar", + docJar() -> s"$baseName-javadoc.jar", + pom() -> s"$baseName.pom" + ) + ) + } + + def publish(sonatypeCreds: String, + gpgPassphrase: String = null, + signed: Boolean = true, + release: Boolean): define.Command[Unit] = T.command { + val PublishModule.PublishData(artifactInfo, artifacts) = publishArtifacts() + new SonatypePublisher( + sonatypeUri, + sonatypeSnapshotUri, + sonatypeCreds, + Option(gpgPassphrase), + signed, + T.ctx().log + ).publish(artifacts.map{case (a, b) => (a.path, b)}, artifactInfo, release) + } +} + +object PublishModule extends ExternalModule { + case class PublishData(meta: Artifact, payload: Seq[(PathRef, String)]) + + object PublishData{ + implicit def jsonify: upickle.default.ReadWriter[PublishData] = upickle.default.macroRW + } + + def publishAll(sonatypeCreds: String, + gpgPassphrase: String = null, + publishArtifacts: mill.main.Tasks[PublishModule.PublishData], + release: Boolean = false, + sonatypeUri: String = "https://oss.sonatype.org/service/local", + sonatypeSnapshotUri: String = "https://oss.sonatype.org/content/repositories/snapshots", + signed: Boolean = true) = T.command { + + val x: Seq[(Seq[(os.Path, String)], Artifact)] = Task.sequence(publishArtifacts.value)().map{ + case PublishModule.PublishData(a, s) => (s.map{case (p, f) => (p.path, f)}, a) + } + new SonatypePublisher( + sonatypeUri, + sonatypeSnapshotUri, + sonatypeCreds, + Option(gpgPassphrase), + signed, + T.ctx().log + ).publishAll( + release, + x:_* + ) + } + + implicit def millScoptTargetReads[T] = new mill.main.Tasks.Scopt[T]() + + lazy val millDiscover: mill.define.Discover[this.type] = mill.define.Discover[this.type] +} diff --git a/scalalib/src/ScalaModule.scala b/scalalib/src/ScalaModule.scala new file mode 100644 index 00000000..9d669bf4 --- /dev/null +++ b/scalalib/src/ScalaModule.scala @@ -0,0 +1,275 @@ +package mill +package scalalib + +import coursier.Repository +import mill.define.{Target, Task, TaskModule} +import mill.eval.{PathRef, Result} +import mill.modules.Jvm +import mill.modules.Jvm.createJar +import mill.scalalib.api.Util.isDotty +import Lib._ +import mill.util.Loose.Agg +import mill.api.DummyInputStream + +/** + * Core configuration required to compile a single Scala compilation target + */ +trait ScalaModule extends JavaModule { outer => + trait Tests extends TestModule with ScalaModule{ + override def scalaOrganization = outer.scalaOrganization() + def scalaVersion = outer.scalaVersion() + override def repositories = outer.repositories + override def scalacPluginIvyDeps = outer.scalacPluginIvyDeps + override def scalacOptions = outer.scalacOptions + override def javacOptions = outer.javacOptions + override def zincWorker = outer.zincWorker + override def moduleDeps: Seq[JavaModule] = Seq(outer) + } + + /** + * What Scala organization to use + * @return + */ + def scalaOrganization: T[String] = T { + if (isDotty(scalaVersion())) + "ch.epfl.lamp" + else + "org.scala-lang" + } + + /** + * What version of Scala to use + */ + def scalaVersion: T[String] + + override def mapDependencies = T.task{ d: coursier.Dependency => + val artifacts = + if (isDotty(scalaVersion())) + Set("dotty-library", "dotty-compiler") + else + Set("scala-library", "scala-compiler", "scala-reflect") + if (!artifacts(d.module.name)) d + else d.copy(module = d.module.copy(organization = scalaOrganization()), version = scalaVersion()) + } + + override def resolveCoursierDependency: Task[Dep => coursier.Dependency] = T.task{ + Lib.depToDependency(_: Dep, scalaVersion(), platformSuffix()) + } + + override def resolvePublishDependency: Task[Dep => publish.Dependency] = T.task{ + publish.Artifact.fromDep( + _: Dep, + scalaVersion(), + mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion()), + platformSuffix() + ) + } + + /** + * Allows you to make use of Scala compiler plugins from maven central + */ + def scalacPluginIvyDeps = T{ Agg.empty[Dep] } + + def scalaDocPluginIvyDeps = T{ scalacPluginIvyDeps() } + + /** + * Command-line options to pass to the Scala compiler + */ + def scalacOptions = T{ Seq.empty[String] } + + def scalaDocOptions = T{ scalacOptions() } + + private val Milestone213 = raw"""2.13.(\d+)-M(\d+)""".r + + def scalaCompilerBridgeSources = T { + val (scalaVersion0, scalaBinaryVersion0) = scalaVersion() match { + case Milestone213(_, _) => ("2.13.0-M2", "2.13.0-M2") + case _ => (scalaVersion(), mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion())) + } + + val (bridgeDep, bridgeName, bridgeVersion) = + if (isDotty(scalaVersion0)) { + val org = scalaOrganization() + val name = "dotty-sbt-bridge" + val version = scalaVersion() + (ivy"$org:$name:$version", name, version) + } else { + val org = "org.scala-sbt" + val name = "compiler-bridge" + val version = Versions.zinc + (ivy"$org::$name:$version", s"${name}_$scalaBinaryVersion0", version) + } + + resolveDependencies( + repositories, + Lib.depToDependency(_, scalaVersion0, platformSuffix()), + Seq(bridgeDep), + sources = true + ).map(deps => + mill.scalalib.api.Util.grepJar(deps.map(_.path), bridgeName, bridgeVersion, sources = true) + ) + } + + /** + * The local classpath of Scala compiler plugins on-disk; you can add + * additional jars here if you have some copiler plugin that isn't present + * on maven central + */ + def scalacPluginClasspath: T[Agg[PathRef]] = T { + resolveDeps(scalacPluginIvyDeps)() + } + + /** + * The ivy coordinates of Scala's own standard library + */ + def scalaDocPluginClasspath: T[Agg[PathRef]] = T { + resolveDeps(scalaDocPluginIvyDeps)() + } + + def scalaLibraryIvyDeps = T{ scalaRuntimeIvyDeps(scalaOrganization(), scalaVersion()) } + + /** + * Classpath of the Scala Compiler & any compiler plugins + */ + def scalaCompilerClasspath: T[Agg[PathRef]] = T{ + resolveDeps( + T.task{ + scalaCompilerIvyDeps(scalaOrganization(), scalaVersion()) ++ + scalaRuntimeIvyDeps(scalaOrganization(), scalaVersion()) + } + )() + } + override def compileClasspath = T{ + transitiveLocalClasspath() ++ + resources() ++ + unmanagedClasspath() ++ + resolveDeps(T.task{compileIvyDeps() ++ scalaLibraryIvyDeps() ++ transitiveIvyDeps()})() + } + + override def upstreamAssemblyClasspath = T{ + transitiveLocalClasspath() ++ + unmanagedClasspath() ++ + resolveDeps(T.task{runIvyDeps() ++ scalaLibraryIvyDeps() ++ transitiveIvyDeps()})() + } + + override def compile: T[mill.scalalib.api.CompilationResult] = T.persistent{ + zincWorker.worker().compileMixed( + upstreamCompileOutput(), + allSourceFiles().map(_.path), + compileClasspath().map(_.path), + javacOptions(), + scalaVersion(), + scalacOptions(), + scalaCompilerBridgeSources(), + scalaCompilerClasspath().map(_.path), + scalacPluginClasspath().map(_.path), + ) + } + + override def docJar = T { + val outDir = T.ctx().dest + + val javadocDir = outDir / 'javadoc + os.makeDir.all(javadocDir) + + val files = allSourceFiles().map(_.path.toString) + + val pluginOptions = scalaDocPluginClasspath().map(pluginPathRef => s"-Xplugin:${pluginPathRef.path}") + val compileCp = compileClasspath().filter(_.path.ext != "pom").map(_.path) + val options = Seq( + "-d", javadocDir.toNIO.toString, + "-classpath", compileCp.mkString(":") + ) ++ + pluginOptions ++ + scalaDocOptions() + + if (files.isEmpty) Result.Success(createJar(Agg(javadocDir))(outDir)) + else { + zincWorker.worker().docJar( + scalaVersion(), + scalaCompilerBridgeSources(), + scalaCompilerClasspath().map(_.path), + scalacPluginClasspath().map(_.path), + files ++ options + ) match{ + case true => Result.Success(createJar(Agg(javadocDir))(outDir)) + case false => Result.Failure("docJar generation failed") + } + } + } + + /** + * Opens up a Scala console with your module and all dependencies present, + * for you to test and operate your code interactively + */ + def console() = T.command{ + if (T.ctx().log.inStream == DummyInputStream){ + Result.Failure("repl needs to be run with the -i/--interactive flag") + }else{ + Jvm.runSubprocess( + mainClass = + if (isDotty(scalaVersion())) + "dotty.tools.repl.Main" + else + "scala.tools.nsc.MainGenericRunner", + classPath = runClasspath().map(_.path) ++ scalaCompilerClasspath().map(_.path), + mainArgs = Seq("-usejavacp"), + workingDir = os.pwd + ) + Result.Success() + } + } + + /** + * Dependencies that are necessary to run the Ammonite Scala REPL + */ + def ammoniteReplClasspath = T{ + localClasspath() ++ + transitiveLocalClasspath() ++ + unmanagedClasspath() ++ + resolveDeps(T.task{ + runIvyDeps() ++ scalaLibraryIvyDeps() ++ transitiveIvyDeps() ++ + Agg(ivy"com.lihaoyi:::ammonite:${Versions.ammonite}") + })() + } + + /** + * Opens up an Ammonite Scala REPL with your module and all dependencies present, + * for you to test and operate your code interactively + */ + def repl(replOptions: String*) = T.command{ + if (T.ctx().log.inStream == DummyInputStream){ + Result.Failure("repl needs to be run with the -i/--interactive flag") + }else{ + Jvm.runSubprocess( + mainClass = "ammonite.Main", + classPath = ammoniteReplClasspath().map(_.path), + mainArgs = replOptions, + workingDir = os.pwd + ) + Result.Success() + } + + } + + /** + * Whether to publish artifacts with name "mill_2.12.4" instead of "mill_2.12" + */ + def crossFullScalaVersion: T[Boolean] = false + + /** + * What Scala version string to use when publishing + */ + def artifactScalaVersion: T[String] = T { + if (crossFullScalaVersion()) scalaVersion() + else mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion()) + } + + /** + * The suffix appended to the artifact IDs during publishing + */ + def artifactSuffix: T[String] = s"_${artifactScalaVersion()}" + + override def artifactId: T[String] = artifactName() + artifactSuffix() + +} diff --git a/scalalib/src/TestRunner.scala b/scalalib/src/TestRunner.scala new file mode 100644 index 00000000..42e65d63 --- /dev/null +++ b/scalalib/src/TestRunner.scala @@ -0,0 +1,153 @@ +package mill.scalalib +import ammonite.util.Colors +import mill.Agg +import mill.modules.Jvm +import mill.scalalib.Lib.discoverTests +import mill.util.{Ctx, PrintLogger} +import mill.util.JsonFormatters._ +import sbt.testing._ + +import scala.collection.mutable +object TestRunner { + + + def main(args: Array[String]): Unit = { + try{ + var i = 0 + def readArray() = { + val count = args(i).toInt + val slice = args.slice(i + 1, i + count + 1) + i = i + count + 1 + slice + } + val frameworks = readArray() + val classpath = readArray() + val arguments = readArray() + val outputPath = args(i + 0) + val colored = args(i + 1) + val testCp = args(i + 2) + val homeStr = args(i + 3) + val ctx = new Ctx.Log with Ctx.Home { + val log = PrintLogger( + colored == "true", + true, + if(colored == "true") Colors.Default + else Colors.BlackWhite, + System.out, + System.err, + System.err, + System.in, + debugEnabled = false + ) + val home = os.Path(homeStr) + } + val result = runTests( + frameworkInstances = TestRunner.frameworks(frameworks), + entireClasspath = Agg.from(classpath.map(os.Path(_))), + testClassfilePath = Agg(os.Path(testCp)), + args = arguments + )(ctx) + + // Clear interrupted state in case some badly-behaved test suite + // dirtied the thread-interrupted flag and forgot to clean up. Otherwise + // that flag causes writing the results to disk to fail + Thread.interrupted() + ammonite.ops.write(os.Path(outputPath), upickle.default.write(result)) + }catch{case e: Throwable => + println(e) + e.printStackTrace() + } + // Tests are over, kill the JVM whether or not anyone's threads are still running + // Always return 0, even if tests fail. The caller can pick up the detailed test + // results from the outputPath + System.exit(0) + } + + def runTests(frameworkInstances: ClassLoader => Seq[sbt.testing.Framework], + entireClasspath: Agg[os.Path], + testClassfilePath: Agg[os.Path], + args: Seq[String]) + (implicit ctx: Ctx.Log with Ctx.Home): (String, Seq[mill.scalalib.TestRunner.Result]) = { + //Leave the context class loader set and open so that shutdown hooks can access it + Jvm.inprocess(entireClasspath, classLoaderOverrideSbtTesting = true, isolated = true, closeContextClassLoaderWhenDone = false, cl => { + val frameworks = frameworkInstances(cl) + + val events = mutable.Buffer.empty[Event] + + val doneMessages = frameworks.map{ framework => + val runner = framework.runner(args.toArray, Array[String](), cl) + + val testClasses = discoverTests(cl, framework, testClassfilePath) + + val tasks = runner.tasks( + for ((cls, fingerprint) <- testClasses.toArray) + yield new TaskDef(cls.getName.stripSuffix("$"), fingerprint, true, Array(new SuiteSelector)) + ) + + val taskQueue = tasks.to[mutable.Queue] + while (taskQueue.nonEmpty){ + val next = taskQueue.dequeue().execute( + new EventHandler { + def handle(event: Event) = events.append(event) + }, + Array( + new Logger { + def debug(msg: String) = ctx.log.outputStream.println(msg) + + def error(msg: String) = ctx.log.outputStream.println(msg) + + def ansiCodesSupported() = true + + def warn(msg: String) = ctx.log.outputStream.println(msg) + + def trace(t: Throwable) = t.printStackTrace(ctx.log.outputStream) + + def info(msg: String) = ctx.log.outputStream.println(msg) + }) + ) + taskQueue.enqueue(next:_*) + } + runner.done() + } + + val results = for(e <- events) yield { + val ex = if (e.throwable().isDefined) Some(e.throwable().get) else None + mill.scalalib.TestRunner.Result( + e.fullyQualifiedName(), + e.selector() match{ + case s: NestedSuiteSelector => s.suiteId() + case s: NestedTestSelector => s.suiteId() + "." + s.testName() + case s: SuiteSelector => s.toString + case s: TestSelector => s.testName() + case s: TestWildcardSelector => s.testWildcard() + }, + e.duration(), + e.status().toString, + ex.map(_.getClass.getName), + ex.map(_.getMessage), + ex.map(_.getStackTrace) + ) + } + + (doneMessages.mkString("\n"), results) + }) + } + + def frameworks(frameworkNames: Seq[String])(cl: ClassLoader): Seq[sbt.testing.Framework] = { + frameworkNames.map { name => + cl.loadClass(name).newInstance().asInstanceOf[sbt.testing.Framework] + } + } + + case class Result(fullyQualifiedName: String, + selector: String, + duration: Long, + status: String, + exceptionName: Option[String] = None, + exceptionMsg: Option[String] = None, + exceptionTrace: Option[Seq[StackTraceElement]] = None) + + object Result{ + implicit def resultRW: upickle.default.ReadWriter[Result] = upickle.default.macroRW[Result] + } +} diff --git a/scalalib/src/Versions.scala b/scalalib/src/Versions.scala new file mode 100644 index 00000000..e7eaf847 --- /dev/null +++ b/scalalib/src/Versions.scala @@ -0,0 +1,8 @@ +package mill.scalalib + +object Versions { + // Keep synchronized with ammonite dependency in core in build.sc + val ammonite = "1.5.0" + // Keep synchronized with zinc dependency in scalalib.worker in build.sc + val zinc = "1.2.1" +} diff --git a/scalalib/src/ZincWorkerModule.scala b/scalalib/src/ZincWorkerModule.scala new file mode 100644 index 00000000..5ca824ce --- /dev/null +++ b/scalalib/src/ZincWorkerModule.scala @@ -0,0 +1,56 @@ +package mill.scalalib + +import coursier.Cache +import coursier.maven.MavenRepository +import mill.Agg +import mill.T +import mill.define.{Discover, Worker} +import mill.scalalib.Lib.resolveDependencies +import mill.util.Loose +import mill.util.JsonFormatters._ + +object ZincWorkerModule extends mill.define.ExternalModule with ZincWorkerModule{ + lazy val millDiscover = Discover[this.type] +} +trait ZincWorkerModule extends mill.Module{ + def repositories = Seq( + Cache.ivy2Local, + MavenRepository("https://repo1.maven.org/maven2"), + MavenRepository("https://oss.sonatype.org/content/repositories/releases") + ) + + def classpath = T{ + mill.modules.Util.millProjectModule("MILL_SCALA_WORKER", "mill-scalalib-worker", repositories) + } + + def scalalibClasspath = T{ + mill.modules.Util.millProjectModule("MILL_SCALA_LIB", "mill-scalalib", repositories) + } + + def backgroundWrapperClasspath = T{ + mill.modules.Util.millProjectModule( + "MILL_BACKGROUNDWRAPPER", "mill-scalalib-backgroundwrapper", + repositories, artifactSuffix = "" + ) + } + + def worker: Worker[mill.scalalib.api.ZincWorkerApi] = T.worker{ + val cl = mill.api.ClassLoader.create( + classpath().map(_.path.toNIO.toUri.toURL).toVector, + getClass.getClassLoader + ) + val cls = cl.loadClass("mill.scalalib.worker.ZincWorkerImpl") + val instance = cls.getConstructor(classOf[mill.api.Ctx], classOf[Array[String]]) + .newInstance(T.ctx(), compilerInterfaceClasspath().map(_.path.toString).toArray[String]) + instance.asInstanceOf[mill.scalalib.api.ZincWorkerApi] + } + + def compilerInterfaceClasspath = T{ + resolveDependencies( + repositories, + Lib.depToDependency(_, "2.12.4", ""), + Seq(ivy"org.scala-sbt:compiler-interface:${Versions.zinc}") + ) + } + +} diff --git a/scalalib/src/dependency/DependencyUpdatesImpl.scala b/scalalib/src/dependency/DependencyUpdatesImpl.scala new file mode 100644 index 00000000..3bb94202 --- /dev/null +++ b/scalalib/src/dependency/DependencyUpdatesImpl.scala @@ -0,0 +1,52 @@ +package mill.scalalib.dependency + +import mill.define._ +import mill.scalalib.dependency.updates.{ + DependencyUpdates, + ModuleDependenciesUpdates, + UpdatesFinder +} +import mill.scalalib.dependency.versions.VersionsFinder +import mill.api.Ctx.{Home, Log} + +object DependencyUpdatesImpl { + + def apply(ctx: Log with Home, + rootModule: BaseModule, + discover: Discover[_], + allowPreRelease: Boolean): Unit = { + + // 1. Find all available versions for each dependency + val allDependencyVersions = VersionsFinder.findVersions(ctx, rootModule) + + // 2. Extract updated versions from all available versions + val allUpdates = allDependencyVersions.map { dependencyVersions => + UpdatesFinder.findUpdates(dependencyVersions, allowPreRelease) + } + + // 3. Print the results + showAllUpdates(allUpdates) + } + + private def showAllUpdates(updates: Seq[ModuleDependenciesUpdates]): Unit = + updates.foreach { dependencyUpdates => + val module = dependencyUpdates.module.toString + val actualUpdates = + dependencyUpdates.dependencies.filter(_.updates.nonEmpty) + if (actualUpdates.isEmpty) { + println(s"No dependency updates found for $module") + } else { + println(s"Found ${actualUpdates.length} dependency update for $module") + showUpdates(actualUpdates) + } + } + + private def showUpdates(updates: Seq[DependencyUpdates]): Unit = + updates.foreach { dependencyUpdate => + val module = s"${dependencyUpdate.dependency.module}" + val allVersions = + (dependencyUpdate.currentVersion +: dependencyUpdate.updates.toList) + .mkString(" -> ") + println(s" $module : $allVersions") + } +} diff --git a/scalalib/src/dependency/metadata/MavenMetadataLoader.scala b/scalalib/src/dependency/metadata/MavenMetadataLoader.scala new file mode 100644 index 00000000..491911bf --- /dev/null +++ b/scalalib/src/dependency/metadata/MavenMetadataLoader.scala @@ -0,0 +1,21 @@ +package mill.scalalib.dependency.metadata + +import coursier.Cache +import coursier.maven.MavenRepository +import coursier.util.Task +import mill.scalalib.dependency.versions.Version + +private[dependency] final case class MavenMetadataLoader(mavenRepo: MavenRepository) + extends MetadataLoader { + + private val fetch = Cache.fetch[Task]() + + override def getVersions(module: coursier.Module): List[Version] = { + import scala.concurrent.ExecutionContext.Implicits.global + // TODO fallback to 'versionsFromListing' if 'versions' doesn't work? (needs to be made public in coursier first) + val allVersions = mavenRepo.versions(module, fetch).run.unsafeRun + allVersions + .map(_.available.map(Version(_))) + .getOrElse(List.empty) + } +} diff --git a/scalalib/src/dependency/metadata/MetadataLoader.scala b/scalalib/src/dependency/metadata/MetadataLoader.scala new file mode 100644 index 00000000..20271f0e --- /dev/null +++ b/scalalib/src/dependency/metadata/MetadataLoader.scala @@ -0,0 +1,7 @@ +package mill.scalalib.dependency.metadata + +import mill.scalalib.dependency.versions.Version + +private[dependency] trait MetadataLoader { + def getVersions(module: coursier.Module): Seq[Version] +} diff --git a/scalalib/src/dependency/metadata/MetadataLoaderFactory.scala b/scalalib/src/dependency/metadata/MetadataLoaderFactory.scala new file mode 100644 index 00000000..4495d6b0 --- /dev/null +++ b/scalalib/src/dependency/metadata/MetadataLoaderFactory.scala @@ -0,0 +1,11 @@ +package mill.scalalib.dependency.metadata + +import coursier.Repository +import coursier.maven.MavenRepository + +private[dependency] object MetadataLoaderFactory { + def apply(repo: Repository): Option[MetadataLoader] = repo match { + case mavenRepo: MavenRepository => Some(MavenMetadataLoader(mavenRepo)) + case _ => None + } +} diff --git a/scalalib/src/dependency/updates/ModuleDependenciesUpdates.scala b/scalalib/src/dependency/updates/ModuleDependenciesUpdates.scala new file mode 100644 index 00000000..a989cd31 --- /dev/null +++ b/scalalib/src/dependency/updates/ModuleDependenciesUpdates.scala @@ -0,0 +1,15 @@ +package mill.scalalib.dependency.updates + +import mill.scalalib.JavaModule +import mill.scalalib.dependency.versions.Version + +import scala.collection.SortedSet + +private[dependency] final case class ModuleDependenciesUpdates( + module: JavaModule, + dependencies: Seq[DependencyUpdates]) + +private[dependency] final case class DependencyUpdates( + dependency: coursier.Dependency, + currentVersion: Version, + updates: SortedSet[Version]) diff --git a/scalalib/src/dependency/updates/UpdatesFinder.scala b/scalalib/src/dependency/updates/UpdatesFinder.scala new file mode 100644 index 00000000..3430592f --- /dev/null +++ b/scalalib/src/dependency/updates/UpdatesFinder.scala @@ -0,0 +1,75 @@ +/* + * This file contains code originally published under the following license: + * + * Copyright (c) 2012, Roman Timushev + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * * The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package mill.scalalib.dependency.updates + +import mill.scalalib.dependency.versions._ + +import scala.collection.SortedSet + +private[dependency] object UpdatesFinder { + + import scala.Ordered._ + + def findUpdates(dependencyVersions: ModuleDependenciesVersions, + allowPreRelease: Boolean): ModuleDependenciesUpdates = { + val dependencies = + dependencyVersions.dependencies.map { dependencyVersion => + findUpdates(dependencyVersion, allowPreRelease) + } + ModuleDependenciesUpdates(dependencyVersions.module, dependencies) + } + + def findUpdates(dependencyVersion: DependencyVersions, + allowPreRelease: Boolean): DependencyUpdates = { + val current = dependencyVersion.currentVersion + val versions = dependencyVersion.allversions.to[SortedSet] + + val updates = versions + .filter(isUpdate(current)) + .filterNot(lessStable(current, allowPreRelease)) + + DependencyUpdates(dependencyVersion.dependency, + dependencyVersion.currentVersion, + updates) + } + + private def lessStable(current: Version, allowPreRelease: Boolean)( + another: Version): Boolean = (current, another) match { + case (ReleaseVersion(_), ReleaseVersion(_)) => false + case (SnapshotVersion(_, _, _), _) => false + case (_, SnapshotVersion(_, _, _)) => true + case (ReleaseVersion(_), PreReleaseVersion(_, _)) => !allowPreRelease + case (ReleaseVersion(_), PreReleaseBuildVersion(_, _, _)) => + !allowPreRelease + case (ReleaseVersion(_), _) => true + case (_, _) => false + } + + private def isUpdate(current: Version) = current < _ +} diff --git a/scalalib/src/dependency/versions/ModuleDependenciesVersions.scala b/scalalib/src/dependency/versions/ModuleDependenciesVersions.scala new file mode 100644 index 00000000..12d57059 --- /dev/null +++ b/scalalib/src/dependency/versions/ModuleDependenciesVersions.scala @@ -0,0 +1,12 @@ +package mill.scalalib.dependency.versions + +import mill.scalalib.JavaModule + +private[dependency] final case class ModuleDependenciesVersions( + module: JavaModule, + dependencies: Seq[DependencyVersions]) + +private[dependency] final case class DependencyVersions( + dependency: coursier.Dependency, + currentVersion: Version, + allversions: Set[Version]) diff --git a/scalalib/src/dependency/versions/Version.scala b/scalalib/src/dependency/versions/Version.scala new file mode 100644 index 00000000..a2719023 --- /dev/null +++ b/scalalib/src/dependency/versions/Version.scala @@ -0,0 +1,227 @@ +/* + * This file contains code originally published under the following license: + * + * Copyright (c) 2012, Roman Timushev + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * * The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package mill.scalalib.dependency.versions + +import scala.util.matching.Regex +import scala.util.matching.Regex.Groups + +private[dependency] sealed trait Version { + def major: Long + + def minor: Long + + def patch: Long +} + +private[dependency] case class ValidVersion(text: String, + releasePart: List[Long], + preReleasePart: List[String], + buildPart: List[String]) + extends Version { + def major: Long = releasePart.headOption getOrElse 0 + + def minor: Long = releasePart.drop(1).headOption getOrElse 1 + + def patch: Long = releasePart.drop(2).headOption getOrElse 1 + + override def toString: String = text +} + +private[dependency] case class InvalidVersion(text: String) extends Version { + def major: Long = -1 + + def minor: Long = -1 + + def patch: Long = -1 +} + +private[dependency] object ReleaseVersion { + private val releaseKeyword: Regex = "(?i)final|release".r + + def unapply(v: Version): Option[List[Long]] = v match { + case ValidVersion(_, releasePart, Nil, Nil) => Some(releasePart) + case ValidVersion(_, releasePart, releaseKeyword() :: Nil, Nil) => + Some(releasePart) + case _ => None + } +} + +private[dependency] object PreReleaseVersion { + def unapply(v: Version): Option[(List[Long], List[String])] = v match { + case ValidVersion(_, releasePart, preReleasePart, Nil) + if preReleasePart.nonEmpty => + Some(releasePart, preReleasePart) + case _ => None + } +} + +private[dependency] object PreReleaseBuildVersion { + def unapply(v: Version): Option[(List[Long], List[String], List[String])] = + v match { + case ValidVersion(_, releasePart, preReleasePart, buildPart) + if preReleasePart.nonEmpty && buildPart.nonEmpty => + Some(releasePart, preReleasePart, buildPart) + case _ => None + } +} + +private[dependency] object SnapshotVersion { + def unapply(v: Version): Option[(List[Long], List[String], List[String])] = + v match { + case ValidVersion(_, releasePart, preReleasePart, buildPart) + if preReleasePart.lastOption.contains("SNAPSHOT") => + Some(releasePart, preReleasePart, buildPart) + case _ => None + } +} + +private[dependency] object BuildVersion { + def unapply(v: Version): Option[(List[Long], List[String])] = v match { + case ValidVersion(_, releasePart, Nil, buildPart) if buildPart.nonEmpty => + Some(releasePart, buildPart) + case _ => None + } +} + +private[dependency] object Version { + def apply(text: String): Version = synchronized { + VersionParser + .parse(text) + .fold( + (_, _, _) => InvalidVersion(text), + { case ((a, b, c), _) => ValidVersion(text, a.toList, b.toList, c.toList)} + ) + } + + implicit def versionOrdering: Ordering[Version] = VersionOrdering +} + +private[dependency] object VersionOrdering extends Ordering[Version] { + + private val subParts = "(\\d+)?(\\D+)?".r + + private def parsePart(s: String): Seq[Either[Int, String]] = + try { + subParts + .findAllIn(s) + .matchData + .flatMap { + case Groups(num, str) => + Seq(Option(num).map(_.toInt).map(Left.apply), + Option(str).map(Right.apply)) + } + .flatten + .toList + } catch { + case _: NumberFormatException => List(Right(s)) + } + + private def toOpt(x: Int): Option[Int] = if (x == 0) None else Some(x) + + private def comparePart(a: String, b: String) = { + if (a == b) None + else + (parsePart(a) zip parsePart(b)) map { + case (Left(x), Left(y)) => x compareTo y + case (Left(_), Right(_)) => -1 + case (Right(_), Left(_)) => 1 + case (Right(x), Right(y)) => x compareTo y + } find (0 != _) orElse Some(a compareTo b) + } + + private def compareNumericParts(a: List[Long], b: List[Long]): Option[Int] = + (a, b) match { + case (ah :: at, bh :: bt) => + toOpt(ah compareTo bh) orElse compareNumericParts(at, bt) + case (ah :: at, Nil) => + toOpt(ah compareTo 0L) orElse compareNumericParts(at, Nil) + case (Nil, bh :: bt) => + toOpt(0L compareTo bh) orElse compareNumericParts(Nil, bt) + case (Nil, Nil) => + None + } + + private def compareParts(a: List[String], b: List[String]): Option[Int] = + (a, b) match { + case (ah :: at, bh :: bt) => + comparePart(ah, bh) orElse compareParts(at, bt) + case (_ :: _, Nil) => + Some(1) + case (Nil, _ :: _) => + Some(-1) + case (Nil, Nil) => + None + } + + def compare(x: Version, y: Version): Int = (x, y) match { + case (InvalidVersion(a), InvalidVersion(b)) => + a compareTo b + case (InvalidVersion(_), _) => + -1 + case (_, InvalidVersion(_)) => + 1 + case (ReleaseVersion(r1), ReleaseVersion(r2)) => + compareNumericParts(r1, r2) getOrElse 0 + case (ReleaseVersion(r1), PreReleaseVersion(r2, p2)) => + compareNumericParts(r1, r2) getOrElse 1 + case (ReleaseVersion(r1), PreReleaseBuildVersion(r2, p2, b2)) => + compareNumericParts(r1, r2) getOrElse 1 + case (ReleaseVersion(r1), BuildVersion(r2, b2)) => + compareNumericParts(r1, r2) getOrElse -1 + case (PreReleaseVersion(r1, p1), ReleaseVersion(r2)) => + compareNumericParts(r1, r2) getOrElse -1 + case (PreReleaseVersion(r1, p1), PreReleaseVersion(r2, p2)) => + compareNumericParts(r1, r2) orElse compareParts(p1, p2) getOrElse 0 + case (PreReleaseVersion(r1, p1), PreReleaseBuildVersion(r2, p2, b2)) => + compareNumericParts(r1, r2) orElse compareParts(p1, p2) getOrElse -1 + case (PreReleaseVersion(r1, p1), BuildVersion(r2, b2)) => + compareNumericParts(r1, r2) getOrElse -1 + case (PreReleaseBuildVersion(r1, p1, b1), ReleaseVersion(r2)) => + compareNumericParts(r1, r2) getOrElse -1 + case (PreReleaseBuildVersion(r1, p1, b1), PreReleaseVersion(r2, p2)) => + compareNumericParts(r1, r2) orElse compareParts(p1, p2) getOrElse 1 + case (PreReleaseBuildVersion(r1, p1, b1), + PreReleaseBuildVersion(r2, p2, b2)) => + compareNumericParts(r1, r2) orElse + compareParts(p1, p2) orElse + compareParts(b1, b2) getOrElse + 0 + case (PreReleaseBuildVersion(r1, p1, b1), BuildVersion(r2, b2)) => + compareNumericParts(r1, r2) getOrElse -1 + case (BuildVersion(r1, b1), ReleaseVersion(r2)) => + compareNumericParts(r1, r2) getOrElse 1 + case (BuildVersion(r1, b1), PreReleaseVersion(r2, p2)) => + compareNumericParts(r1, r2) getOrElse 1 + case (BuildVersion(r1, b1), PreReleaseBuildVersion(r2, p2, b2)) => + compareNumericParts(r1, r2) getOrElse 1 + case (BuildVersion(r1, b1), BuildVersion(r2, b2)) => + compareNumericParts(r1, r2) orElse compareParts(b1, b2) getOrElse 0 + } + +} diff --git a/scalalib/src/dependency/versions/VersionParser.scala b/scalalib/src/dependency/versions/VersionParser.scala new file mode 100644 index 00000000..10aebd73 --- /dev/null +++ b/scalalib/src/dependency/versions/VersionParser.scala @@ -0,0 +1,30 @@ +package mill.scalalib.dependency.versions + +import fastparse._, NoWhitespace._ + +private[dependency] object VersionParser { + + private def numberParser[_: P] = + P(CharIn("0-9").rep(1).!.map(_.toLong)) + private def numericPartParser[_: P] = + P(numberParser ~ &(CharIn(".\\-+") | End)).rep(min = 1, sep = ".") + + private def tokenParser[_: P] = + CharPred(c => c != '.' && c != '-' && c != '+').rep(1).! + private def tokenPartParser[_: P] = + tokenParser.rep(sep = CharIn(".\\-")) + + private def firstPartParser[_: P] = + P(CharIn(".\\-") ~ tokenPartParser).? + + private def secondPartParser[_: P] = + P("+" ~ tokenPartParser).? + + private def versionParser[_: P] = + P(numericPartParser ~ firstPartParser ~ secondPartParser).map { + case (a, b, c) => (a, b.getOrElse(Seq.empty), c.getOrElse(Seq.empty)) + } + + def parse(text: String): Parsed[(Seq[Long], Seq[String], Seq[String])] = + fastparse.parse(text, versionParser(_)) +} diff --git a/scalalib/src/dependency/versions/VersionsFinder.scala b/scalalib/src/dependency/versions/VersionsFinder.scala new file mode 100644 index 00000000..a831ffc3 --- /dev/null +++ b/scalalib/src/dependency/versions/VersionsFinder.scala @@ -0,0 +1,73 @@ +package mill.scalalib.dependency.versions + +import mill.define.{BaseModule, Task} +import mill.eval.Evaluator +import mill.scalalib.dependency.metadata.MetadataLoaderFactory +import mill.scalalib.{Dep, JavaModule, Lib} +import mill.api.Ctx.{Home, Log} +import mill.util.{Loose, Strict} + +private[dependency] object VersionsFinder { + + def findVersions(ctx: Log with Home, + rootModule: BaseModule): Seq[ModuleDependenciesVersions] = { + val evaluator = + new Evaluator(ctx.home, os.pwd / 'out, os.pwd / 'out, rootModule, ctx.log) + + val javaModules = rootModule.millInternal.modules.collect { + case javaModule: JavaModule => javaModule + } + + val resolvedDependencies = resolveDependencies(evaluator, javaModules) + resolveVersions(resolvedDependencies) + } + + private def resolveDependencies(evaluator: Evaluator, + javaModules: Seq[JavaModule]) = + javaModules.map { javaModule => + val depToDependency = + eval(evaluator, javaModule.resolveCoursierDependency) + val deps = evalOrElse(evaluator, javaModule.ivyDeps, Loose.Agg.empty[Dep]) + + val (dependencies, _) = + Lib.resolveDependenciesMetadata(javaModule.repositories, + depToDependency, + deps) + + (javaModule, dependencies) + } + + private def resolveVersions(resolvedDependencies: Seq[ResolvedDependencies]) = + resolvedDependencies.map { + case (javaModule, dependencies) => + val metadataLoaders = + javaModule.repositories.flatMap(MetadataLoaderFactory(_)) + + val versions = dependencies.map { dependency => + val currentVersion = Version(dependency.version) + val allVersions = + metadataLoaders + .flatMap(_.getVersions(dependency.module)) + .toSet + DependencyVersions(dependency, currentVersion, allVersions) + } + + ModuleDependenciesVersions(javaModule, versions) + } + + private def eval[T](evaluator: Evaluator, e: Task[T]): T = + evaluator.evaluate(Strict.Agg(e)).values match { + case Seq() => throw new NoSuchElementException + case Seq(e: T) => e + } + + private def evalOrElse[T](evaluator: Evaluator, + e: Task[T], + default: => T): T = + evaluator.evaluate(Strict.Agg(e)).values match { + case Seq() => default + case Seq(e: T) => e + } + + private type ResolvedDependencies = (JavaModule, Seq[coursier.Dependency]) +} diff --git a/scalalib/src/mill/scalalib/Dep.scala b/scalalib/src/mill/scalalib/Dep.scala deleted file mode 100644 index 714fa21e..00000000 --- a/scalalib/src/mill/scalalib/Dep.scala +++ /dev/null @@ -1,121 +0,0 @@ -package mill.scalalib -import mill.util.JsonFormatters._ -import upickle.default.{macroRW, ReadWriter => RW} - -import CrossVersion._ - -case class Dep(dep: coursier.Dependency, cross: CrossVersion, force: Boolean) { - import mill.scalalib.api.Util.isDotty - - def artifactName(binaryVersion: String, fullVersion: String, platformSuffix: String) = { - val suffix = cross.suffixString(binaryVersion, fullVersion, platformSuffix) - dep.module.name + suffix - } - def configure(attributes: coursier.Attributes): Dep = copy(dep = dep.copy(attributes = attributes)) - def forceVersion(): Dep = copy(force = true) - def exclude(exclusions: (String, String)*) = copy(dep = dep.copy(exclusions = dep.exclusions ++ exclusions)) - def excludeOrg(organizations: String*): Dep = exclude(organizations.map(_ -> "*"): _*) - def excludeName(names: String*): Dep = exclude(names.map("*" -> _): _*) - def toDependency(binaryVersion: String, fullVersion: String, platformSuffix: String) = - dep.copy(module = dep.module.copy(name = artifactName(binaryVersion, fullVersion, platformSuffix))) - def withConfiguration(configuration: String): Dep = copy(dep = dep.copy(configuration = configuration)) - - /** - * If scalaVersion is a Dotty version, replace the cross-version suffix - * by the Scala 2.x version that the Dotty version is retro-compatible with, - * otherwise do nothing. - * - * This setting is useful when your build contains dependencies that have only - * been published with Scala 2.x, if you have: - * {{{ - * def ivyDeps = Agg(ivy"a::b:c") - * }}} - * you can replace it by: - * {{{ - * def ivyDeps = Agg(ivy"a::b:c".withDottyCompat(scalaVersion())) - * }}} - * This will have no effect when compiling with Scala 2.x, but when compiling - * with Dotty this will change the cross-version to a Scala 2.x one. This - * works because Dotty is currently retro-compatible with Scala 2.x. - */ - def withDottyCompat(scalaVersion: String): Dep = - cross match { - case cross: Binary if isDotty(scalaVersion) => - copy(cross = Constant(value = "_2.12", platformed = cross.platformed)) - case _ => - this - } -} - -object Dep { - - val DefaultConfiguration = "default(compile)" - - implicit def parse(signature: String): Dep = { - val parts = signature.split(';') - val module = parts.head - val attributes = parts.tail.foldLeft(coursier.Attributes()) { (as, s) => - s.split('=') match { - case Array("classifier", v) => as.copy(classifier = v) - case Array(k, v) => throw new Exception(s"Unrecognized attribute: [$s]") - case _ => throw new Exception(s"Unable to parse attribute specifier: [$s]") - } - } - (module.split(':') match { - case Array(a, b, c) => Dep(a, b, c, cross = empty(platformed = false)) - case Array(a, b, "", c) => Dep(a, b, c, cross = empty(platformed = true)) - case Array(a, "", b, c) => Dep(a, b, c, cross = Binary(platformed = false)) - case Array(a, "", b, "", c) => Dep(a, b, c, cross = Binary(platformed = true)) - case Array(a, "", "", b, c) => Dep(a, b, c, cross = Full(platformed = false)) - case Array(a, "", "", b, "", c) => Dep(a, b, c, cross = Full(platformed = true)) - case _ => throw new Exception(s"Unable to parse signature: [$signature]") - }).configure(attributes = attributes) - } - def apply(org: String, name: String, version: String, cross: CrossVersion, force: Boolean = false): Dep = { - apply(coursier.Dependency(coursier.Module(org, name), version, DefaultConfiguration), cross, force) - } - implicit def rw: RW[Dep] = macroRW -} - -sealed trait CrossVersion { - /** If true, the cross-version suffix should start with a platform suffix if it exists */ - def platformed: Boolean - - def isBinary: Boolean = - this.isInstanceOf[Binary] - def isConstant: Boolean = - this.isInstanceOf[Constant] - def isFull: Boolean = - this.isInstanceOf[Full] - - /** The string that should be appended to the module name to get the artifact name */ - def suffixString(binaryVersion: String, fullVersion: String, platformSuffix: String): String = { - val firstSuffix = if (platformed) platformSuffix else "" - this match { - case cross: Constant => - s"${firstSuffix}${cross.value}" - case cross: Binary => - s"${firstSuffix}_${binaryVersion}" - case cross: Full => - s"${firstSuffix}_${fullVersion}" - } - } -} -object CrossVersion { - case class Constant(value: String, platformed: Boolean) extends CrossVersion - object Constant { - implicit def rw: RW[Constant] = macroRW - } - case class Binary(platformed: Boolean) extends CrossVersion - object Binary { - implicit def rw: RW[Binary] = macroRW - } - case class Full(platformed: Boolean) extends CrossVersion - object Full { - implicit def rw: RW[Full] = macroRW - } - - def empty(platformed: Boolean) = Constant(value = "", platformed) - - implicit def rw: RW[CrossVersion] = RW.merge(Constant.rw, Binary.rw, Full.rw) -} diff --git a/scalalib/src/mill/scalalib/Dependency.scala b/scalalib/src/mill/scalalib/Dependency.scala deleted file mode 100644 index 0c589663..00000000 --- a/scalalib/src/mill/scalalib/Dependency.scala +++ /dev/null @@ -1,22 +0,0 @@ -package mill.scalalib - -import mill.T -import mill.define.{Discover, ExternalModule} -import mill.eval.Evaluator -import mill.main.EvaluatorScopt -import mill.scalalib.dependency.DependencyUpdatesImpl - -object Dependency extends ExternalModule { - - def updates(ev: Evaluator, allowPreRelease: Boolean = false) = - T.command { - DependencyUpdatesImpl(implicitly, - ev.rootModule, - ev.rootModule.millDiscover, - allowPreRelease) - } - - implicit def millScoptEvaluatorReads[T]: EvaluatorScopt[T] = - new mill.main.EvaluatorScopt[T]() - lazy val millDiscover: Discover[Dependency.this.type] = Discover[this.type] -} diff --git a/scalalib/src/mill/scalalib/GenIdeaImpl.scala b/scalalib/src/mill/scalalib/GenIdeaImpl.scala deleted file mode 100644 index 2d76d804..00000000 --- a/scalalib/src/mill/scalalib/GenIdeaImpl.scala +++ /dev/null @@ -1,474 +0,0 @@ -package mill.scalalib - -import ammonite.runtime.SpecialClassLoader -import coursier.{Cache, CoursierPaths, Repository} -import mill.define._ -import mill.eval.{Evaluator, PathRef, Result} -import mill.api.Ctx.{Home, Log} -import mill.util.Strict.Agg -import mill.util.{Loose, Strict} -import mill.{T, scalalib} - -import scala.util.Try - - -object GenIdea extends ExternalModule { - - def idea(ev: Evaluator) = T.command{ - mill.scalalib.GenIdeaImpl( - implicitly, - ev.rootModule, - ev.rootModule.millDiscover - ) - } - - implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() - lazy val millDiscover = Discover[this.type] -} - -object GenIdeaImpl { - - def apply(ctx: Log with Home, - rootModule: BaseModule, - discover: Discover[_]): Unit = { - val pp = new scala.xml.PrettyPrinter(999, 4) - - val jdkInfo = extractCurrentJdk(os.pwd / ".idea" / "misc.xml").getOrElse(("JDK_1_8", "1.8 (1)")) - - os.remove.all(os.pwd/".idea"/"libraries") - os.remove.all(os.pwd/".idea"/"scala_compiler.xml") - os.remove.all(os.pwd/".idea_modules") - - - val evaluator = new Evaluator(ctx.home, os.pwd / 'out, os.pwd / 'out, rootModule, ctx.log) - - for((relPath, xml) <- xmlFileLayout(evaluator, rootModule, jdkInfo)){ - os.write.over(os.pwd/relPath, pp.format(xml)) - } - } - - def extractCurrentJdk(ideaPath: os.Path): Option[(String,String)] = { - import scala.xml.XML - Try { - val xml = XML.loadFile(ideaPath.toString) - (xml \\ "component") - .filter(x => x.attribute("project-jdk-type").map(_.text).contains("JavaSDK")) - .map { n => (n.attribute("languageLevel"), n.attribute("project-jdk-name")) } - .collectFirst{ case (Some(lang), Some(jdk)) => (lang.text, jdk.text) } - }.getOrElse(None) - } - - def xmlFileLayout(evaluator: Evaluator, - rootModule: mill.Module, - jdkInfo: (String,String), - fetchMillModules: Boolean = true): Seq[(os.RelPath, scala.xml.Node)] = { - - val modules = rootModule.millInternal.segmentsToModules.values - .collect{ case x: scalalib.JavaModule => (x.millModuleSegments, x)} - .toSeq - - val buildLibraryPaths = - if (!fetchMillModules) Nil - else sys.props.get("MILL_BUILD_LIBRARIES") match { - case Some(found) => found.split(',').map(os.Path(_)).distinct.toList - case None => - val repos = modules.foldLeft(Set.empty[Repository]) { _ ++ _._2.repositories } - val artifactNames = Seq("main-moduledefs", "main-core", "scalalib", "scalajslib") - val Result.Success(res) = scalalib.Lib.resolveDependencies( - repos.toList, - Lib.depToDependency(_, "2.12.4", ""), - for(name <- artifactNames) - yield ivy"com.lihaoyi::mill-$name:${sys.props("MILL_VERSION")}" - ) - res.items.toList.map(_.path) - } - - val buildDepsPaths = Try(evaluator - .rootModule - .getClass - .getClassLoader - .asInstanceOf[SpecialClassLoader] - ).map { - _.allJars - .map(url => os.Path(url.getFile)) - .filter(_.toIO.exists) - }.getOrElse(Seq()) - - val resolved = for((path, mod) <- modules) yield { - val scalaLibraryIvyDeps = mod match{ - case x: ScalaModule => x.scalaLibraryIvyDeps - case _ => T.task{Nil} - } - val allIvyDeps = T.task{mod.transitiveIvyDeps() ++ scalaLibraryIvyDeps() ++ mod.compileIvyDeps()} - val externalDependencies = T.task{ - mod.resolveDeps(allIvyDeps)() ++ - Task.traverse(mod.transitiveModuleDeps)(_.unmanagedClasspath)().flatten - } - - val externalSources = T.task{ - mod.resolveDeps(allIvyDeps, sources = true)() - } - - val (scalacPluginsIvyDeps, scalacOptions) = mod match{ - case mod: ScalaModule => T.task{mod.scalacPluginIvyDeps()} -> T.task{mod.scalacOptions()} - case _ => T.task(Loose.Agg[Dep]()) -> T.task(Seq()) - } - val scalacPluginDependencies = T.task{ - mod.resolveDeps(scalacPluginsIvyDeps)() - } - - val resolvedCp: Loose.Agg[PathRef] = evalOrElse(evaluator, externalDependencies, Loose.Agg.empty) - val resolvedSrcs: Loose.Agg[PathRef] = evalOrElse(evaluator, externalSources, Loose.Agg.empty) - val resolvedSp: Loose.Agg[PathRef] = evalOrElse(evaluator, scalacPluginDependencies, Loose.Agg.empty) - val scalacOpts: Seq[String] = evalOrElse(evaluator, scalacOptions, Seq()) - - ( - path, - resolvedCp.map(_.path).filter(_.ext == "jar") ++ resolvedSrcs.map(_.path), - mod, - resolvedSp.map(_.path).filter(_.ext == "jar"), - scalacOpts - ) - } - val moduleLabels = modules.map(_.swap).toMap - - val allResolved = resolved.flatMap(_._2) ++ buildLibraryPaths ++ buildDepsPaths - - val commonPrefix = - if (allResolved.isEmpty) 0 - else { - val minResolvedLength = allResolved.map(_.segmentCount).min - allResolved.map(_.segments.take(minResolvedLength).toList) - .transpose - .takeWhile(_.distinct.length == 1) - .length - } - - // only resort to full long path names if the jar name is a duplicate - val pathShortLibNameDuplicate = allResolved - .distinct - .map{p => p.last -> p} - .groupBy(_._1) - .filter(_._2.size > 1) - .keySet - - val pathToLibName = allResolved - .map{p => - if (pathShortLibNameDuplicate(p.last)) - (p, p.segments.drop(commonPrefix).mkString("_")) - else - (p, p.last) - } - .toMap - - sealed trait ResolvedLibrary { def path : os.Path } - case class CoursierResolved(path : os.Path, pom : os.Path, sources : Option[os.Path]) - extends ResolvedLibrary - case class OtherResolved(path : os.Path) extends ResolvedLibrary - - // Tries to group jars with their poms and sources. - def toResolvedJar(path : os.Path) : Option[ResolvedLibrary] = { - val inCoursierCache = path.startsWith(os.Path(CoursierPaths.cacheDirectory())) - val isSource = path.last.endsWith("sources.jar") - val isPom = path.ext == "pom" - if (inCoursierCache && (isSource || isPom)) { - // Remove sources and pom as they'll be recovered from the jar path - None - } else if (inCoursierCache && path.ext == "jar") { - val withoutExt = path.last.dropRight(path.ext.length + 1) - val pom = path / os.up / s"$withoutExt.pom" - val sources = Some(path / os.up / s"$withoutExt-sources.jar") - .filter(_.toIO.exists()) - Some(CoursierResolved(path, pom, sources)) - } else Some(OtherResolved(path)) - } - - // Hack so that Intellij does not complain about unresolved magic - // imports in build.sc when in fact they are resolved - def sbtLibraryNameFromPom(pom : os.Path) : String = { - val xml = scala.xml.XML.loadFile(pom.toIO) - - val groupId = (xml \ "groupId").text - val artifactId = (xml \ "artifactId").text - val version = (xml \ "version").text - - // The scala version here is non incidental - s"SBT: $groupId:$artifactId:$version:jar" - } - - def libraryName(resolvedJar: ResolvedLibrary) : String = resolvedJar match { - case CoursierResolved(path, pom, _) if buildDepsPaths.contains(path) => - sbtLibraryNameFromPom(pom) - case CoursierResolved(path, _, _) => - pathToLibName(path) - case OtherResolved(path) => - pathToLibName(path) - } - - def resolvedLibraries(resolved : Seq[os.Path]) : Seq[ResolvedLibrary] = resolved - .map(toResolvedJar) - .collect { case Some(r) => r} - - val compilerSettings = resolved - .foldLeft(Map[(Loose.Agg[os.Path], Seq[String]), Vector[JavaModule]]()) { - (r, q) => - val key = (q._4, q._5) - r + (key -> (r.getOrElse(key, Vector()) :+ q._3)) - } - - val allBuildLibraries : Set[ResolvedLibrary] = - resolvedLibraries(buildLibraryPaths ++ buildDepsPaths).toSet - - val fixedFiles = Seq( - Tuple2(os.rel/".idea"/"misc.xml", miscXmlTemplate(jdkInfo)), - Tuple2(os.rel/".idea"/"scala_settings.xml", scalaSettingsTemplate()), - Tuple2( - os.rel/".idea"/"modules.xml", - allModulesXmlTemplate( - modules - .filter(!_._2.skipIdea) - .map { case (path, mod) => moduleName(path) } - ) - ), - Tuple2( - os.rel/".idea_modules"/"mill-build.iml", - rootXmlTemplate( - for(lib <- allBuildLibraries) - yield libraryName(lib) - ) - ), - Tuple2( - os.rel/".idea"/"scala_compiler.xml", - scalaCompilerTemplate(compilerSettings) - ) - ) - - val libraries = resolvedLibraries(allResolved).map{ resolved => - import resolved.path - val url = "jar://" + path + "!/" - val name = libraryName(resolved) - val sources = resolved match { - case CoursierResolved(_, _, s) => s.map(p => "jar://" + p + "!/") - case OtherResolved(_) => None - } - Tuple2(os.rel/".idea"/'libraries/s"$name.xml", libraryXmlTemplate(name, url, sources)) - } - - val moduleFiles = resolved.map{ case (path, resolvedDeps, mod, _, _) => - val Seq( - resourcesPathRefs: Seq[PathRef], - sourcesPathRef: Seq[PathRef], - generatedSourcePathRefs: Seq[PathRef], - allSourcesPathRefs: Seq[PathRef] - ) = evaluator.evaluate(Agg(mod.resources, mod.sources, mod.generatedSources, mod.allSources)).values - - val generatedSourcePaths = generatedSourcePathRefs.map(_.path) - val normalSourcePaths = (allSourcesPathRefs.map(_.path).toSet -- generatedSourcePaths.toSet).toSeq - - val paths = Evaluator.resolveDestPaths( - evaluator.outPath, - mod.compile.ctx.segments - ) - val scalaVersionOpt = mod match { - case x: ScalaModule => Some(evaluator.evaluate(Agg(x.scalaVersion)).values.head.asInstanceOf[String]) - case _ => None - } - val generatedSourceOutPath = Evaluator.resolveDestPaths( - evaluator.outPath, - mod.generatedSources.ctx.segments - ) - - val isTest = mod.isInstanceOf[TestModule] - - val elem = moduleXmlTemplate( - mod.intellijModulePath, - scalaVersionOpt, - Strict.Agg.from(resourcesPathRefs.map(_.path)), - Strict.Agg.from(normalSourcePaths), - Strict.Agg.from(generatedSourcePaths), - paths.out, - generatedSourceOutPath.dest, - Strict.Agg.from(resolvedDeps.map(pathToLibName)), - Strict.Agg.from(mod.moduleDeps.map{ m => moduleName(moduleLabels(m))}.distinct), - isTest - ) - Tuple2(os.rel/".idea_modules"/s"${moduleName(path)}.iml", elem) - } - - fixedFiles ++ libraries ++ moduleFiles - } - - def evalOrElse[T](evaluator: Evaluator, e: Task[T], default: => T): T = { - evaluator.evaluate(Agg(e)).values match { - case Seq() => default - case Seq(e: T) => e - } - } - - def relify(p: os.Path) = { - val r = p.relativeTo(os.pwd/".idea_modules") - (Seq.fill(r.ups)("..") ++ r.segments).mkString("/") - } - - def moduleName(p: Segments) = p.value.foldLeft(StringBuilder.newBuilder) { - case (sb, Segment.Label(s)) if sb.isEmpty => sb.append(s) - case (sb, Segment.Cross(s)) if sb.isEmpty => sb.append(s.mkString("-")) - case (sb, Segment.Label(s)) => sb.append(".").append(s) - case (sb, Segment.Cross(s)) => sb.append("-").append(s.mkString("-")) - }.mkString.toLowerCase() - - def scalaSettingsTemplate() = { - - - - - - } - def miscXmlTemplate(jdkInfo: (String,String)) = { - - - - - - } - - def allModulesXmlTemplate(selectors: Seq[String]) = { - - - - - { - for(selector <- selectors) - yield { - val filepath = "$PROJECT_DIR$/.idea_modules/" + selector + ".iml" - val fileurl = "file://" + filepath - - } - } - - - - } - def rootXmlTemplate(libNames: Strict.Agg[String]) = { - - - - - - - - - - - { - for(name <- libNames.toSeq.sorted) - yield - } - - - } - def libraryXmlTemplate(name: String, url: String, sources: Option[String]) = { - - - - - - { if (sources.isDefined) { - - - - } - } - - - } - def moduleXmlTemplate(basePath: os.Path, - scalaVersionOpt: Option[String], - resourcePaths: Strict.Agg[os.Path], - normalSourcePaths: Strict.Agg[os.Path], - generatedSourcePaths: Strict.Agg[os.Path], - compileOutputPath: os.Path, - generatedSourceOutputPath: os.Path, - libNames: Strict.Agg[String], - depNames: Strict.Agg[String], - isTest: Boolean - ) = { - - - { - val outputUrl = "file://$MODULE_DIR$/" + relify(compileOutputPath) + "/dest/classes" - if (isTest) - - else - - } - - - - { - for (normalSourcePath <- normalSourcePaths.toSeq.sorted) - yield - - } - { - for (generatedSourcePath <- generatedSourcePaths.toSeq.sorted) - yield - - } - { - val resourceType = if (isTest) "java-test-resource" else "java-resource" - for (resourcePath <- resourcePaths.toSeq.sorted) - yield - - } - - - - - { - for(scalaVersion <- scalaVersionOpt.toSeq) - yield - } - - { - for(name <- libNames.toSeq.sorted) - yield - - } - { - for(depName <- depNames.toSeq.sorted) - yield - } - - - } - def scalaCompilerTemplate(settings: Map[(Loose.Agg[os.Path], Seq[String]), Seq[JavaModule]]) = { - - - - { - for((((plugins, params), mods), i) <- settings.toSeq.zip(1 to settings.size)) - yield - moduleName(m.millModuleSegments)).mkString(",")}> - - { - for(param <- params) - yield - } - - - { - for(plugin <- plugins.toSeq) - yield - } - - - } - - - } -} diff --git a/scalalib/src/mill/scalalib/JavaModule.scala b/scalalib/src/mill/scalalib/JavaModule.scala deleted file mode 100644 index 78be8893..00000000 --- a/scalalib/src/mill/scalalib/JavaModule.scala +++ /dev/null @@ -1,608 +0,0 @@ -package mill -package scalalib - -import coursier.Repository -import mill.define.Task -import mill.define.TaskModule -import mill.eval.{PathRef, Result} -import mill.modules.{Assembly, Jvm} -import mill.modules.Jvm.{createAssembly, createJar} -import Lib._ -import mill.scalalib.publish.{Artifact, Scope} -import mill.util.Loose.Agg - -/** - * Core configuration required to compile a single Scala compilation target - */ -trait JavaModule extends mill.Module with TaskModule { outer => - def zincWorker: ZincWorkerModule = mill.scalalib.ZincWorkerModule - - trait Tests extends TestModule{ - override def moduleDeps = Seq(outer) - override def repositories = outer.repositories - override def javacOptions = outer.javacOptions - override def zincWorker = outer.zincWorker - } - def defaultCommandName() = "run" - - def resolvePublishDependency: Task[Dep => publish.Dependency] = T.task{ - Artifact.fromDepJava(_: Dep) - } - def resolveCoursierDependency: Task[Dep => coursier.Dependency] = T.task{ - Lib.depToDependencyJava(_: Dep) - } - - /** - * Allows you to specify an explicit main class to use for the `run` command. - * If none is specified, the classpath is searched for an appropriate main - * class to use if one exists - */ - def mainClass: T[Option[String]] = None - - def finalMainClassOpt: T[Either[String, String]] = T{ - mainClass() match{ - case Some(m) => Right(m) - case None => - zincWorker.worker().discoverMainClasses(compile())match { - case Seq() => Left("No main class specified or found") - case Seq(main) => Right(main) - case mains => - Left( - s"Multiple main classes found (${mains.mkString(",")}) " + - "please explicitly specify which one to use by overriding mainClass" - ) - } - } - } - - def finalMainClass: T[String] = T{ - finalMainClassOpt() match { - case Right(main) => Result.Success(main) - case Left(msg) => Result.Failure(msg) - } - } - - /** - * Any ivy dependencies you want to add to this Module, in the format - * ivy"org::name:version" for Scala dependencies or ivy"org:name:version" - * for Java dependencies - */ - def ivyDeps = T{ Agg.empty[Dep] } - - /** - * Same as `ivyDeps`, but only present at compile time. Useful for e.g. - * macro-related dependencies like `scala-reflect` that doesn't need to be - * present at runtime - */ - def compileIvyDeps = T{ Agg.empty[Dep] } - /** - * Same as `ivyDeps`, but only present at runtime. Useful for e.g. - * selecting different versions of a dependency to use at runtime after your - * code has already been compiled - */ - def runIvyDeps = T{ Agg.empty[Dep] } - - /** - * Options to pass to the java compiler - */ - def javacOptions = T{ Seq.empty[String] } - - /** The direct dependencies of this module */ - def moduleDeps = Seq.empty[JavaModule] - - /** The direct and indirect dependencies of this module */ - def recursiveModuleDeps: Seq[JavaModule] = { - moduleDeps.flatMap(_.transitiveModuleDeps).distinct - } - - /** Like `recursiveModuleDeps` but also include the module itself */ - def transitiveModuleDeps: Seq[JavaModule] = { - Seq(this) ++ recursiveModuleDeps - } - - /** - * Additional jars, classfiles or resources to add to the classpath directly - * from disk rather than being downloaded from Maven Central or other package - * repositories - */ - def unmanagedClasspath = T{ Agg.empty[PathRef] } - - /** - * The transitive ivy dependencies of this module and all it's upstream modules - */ - def transitiveIvyDeps: T[Agg[Dep]] = T{ - ivyDeps() ++ Task.traverse(moduleDeps)(_.transitiveIvyDeps)().flatten - } - - /** - * The upstream compilation output of all this module's upstream modules - */ - def upstreamCompileOutput = T{ - Task.traverse(recursiveModuleDeps)(_.compile) - } - - /** - * The transitive version of `localClasspath` - */ - def transitiveLocalClasspath: T[Agg[PathRef]] = T{ - Task.traverse(moduleDeps)(m => - T.task{m.localClasspath() ++ m.transitiveLocalClasspath()} - )().flatten - } - - def mapDependencies = T.task{ d: coursier.Dependency => d } - - def resolveDeps(deps: Task[Agg[Dep]], sources: Boolean = false) = T.task{ - resolveDependencies( - repositories, - resolveCoursierDependency().apply(_), - deps(), - sources, - mapDependencies = Some(mapDependencies()) - ) - } - - - def repositories: Seq[Repository] = zincWorker.repositories - - /** - * What platform suffix to use for publishing, e.g. `_sjs` for Scala.js - * projects - */ - def platformSuffix = T{ "" } - - private val Milestone213 = raw"""2.13.(\d+)-M(\d+)""".r - - /** - * What shell script to use to launch the executable generated by `assembly`. - * Defaults to a generic "universal" launcher that should work for Windows, - * OS-X and Linux - */ - def prependShellScript: T[String] = T{ - finalMainClassOpt().toOption match{ - case None => "" - case Some(cls) => - val isWin = scala.util.Properties.isWin - mill.modules.Jvm.launcherUniversalScript( - cls, - Agg("$0"), Agg("%~dpnx0"), - forkArgs() - ) - } - } - - def assemblyRules: Seq[Assembly.Rule] = Assembly.defaultRules - - /** - * The folders where the source files for this module live - */ - def sources = T.sources{ millSourcePath / 'src } - /** - * The folders where the resource files for this module live - */ - def resources = T.sources{ millSourcePath / 'resources } - /** - * Folders containing source files that are generated rather than - * hand-written; these files can be generated in this target itself, - * or can refer to files generated from other targets - */ - def generatedSources = T{ Seq.empty[PathRef] } - - /** - * The folders containing all source files fed into the compiler - */ - def allSources = T{ sources() ++ generatedSources() } - - /** - * All individual source files fed into the compiler - */ - def allSourceFiles = T{ - def isHiddenFile(path: os.Path) = path.last.startsWith(".") - for { - root <- allSources() - if os.exists(root.path) - path <- (if (os.isDir(root.path)) os.walk(root.path) else Seq(root.path)) - if os.isFile(path) && ((path.ext == "scala" || path.ext == "java") && !isHiddenFile(path)) - } yield PathRef(path) - } - - /** - * Compiles the current module to generate compiled classfiles/bytecode - */ - def compile: T[mill.scalalib.api.CompilationResult] = T.persistent{ - zincWorker.worker().compileJava( - upstreamCompileOutput(), - allSourceFiles().map(_.path), - compileClasspath().map(_.path), - javacOptions() - ) - } - - /** - * The output classfiles/resources from this module, excluding upstream - * modules and third-party dependencies - */ - def localClasspath = T{ - resources() ++ Agg(compile().classes) - } - - /** - * All classfiles and resources from upstream modules and dependencies - * necessary to compile this module - */ - def compileClasspath = T{ - transitiveLocalClasspath() ++ - resources() ++ - unmanagedClasspath() ++ - resolveDeps(T.task{compileIvyDeps() ++ transitiveIvyDeps()})() - } - - /** - * All upstream classfiles and resources necessary to build and executable - * assembly, but without this module's contribution - */ - def upstreamAssemblyClasspath = T{ - transitiveLocalClasspath() ++ - unmanagedClasspath() ++ - resolveDeps(T.task{runIvyDeps() ++ transitiveIvyDeps()})() - } - - /** - * All classfiles and resources from upstream modules and dependencies - * necessary to run this module's code after compilation - */ - def runClasspath = T{ - localClasspath() ++ - upstreamAssemblyClasspath() - } - - /** - * Build the assembly for upstream dependencies separate from the current - * classpath - * - * This should allow much faster assembly creation in the common case where - * upstream dependencies do not change - */ - def upstreamAssembly = T{ - createAssembly( - upstreamAssemblyClasspath().map(_.path), - mainClass(), - assemblyRules = assemblyRules - ) - } - - /** - * An executable uber-jar/assembly containing all the resources and compiled - * classfiles from this module and all it's upstream modules and dependencies - */ - def assembly = T{ - createAssembly( - Agg.from(localClasspath().map(_.path)), - finalMainClassOpt().toOption, - prependShellScript(), - Some(upstreamAssembly().path), - assemblyRules - ) - } - - /** - * A jar containing only this module's resources and compiled classfiles, - * without those from upstream modules and dependencies - */ - def jar = T{ - createJar( - localClasspath().map(_.path).filter(os.exists), - mainClass() - ) - } - - /** - * The documentation jar, containing all the Javadoc/Scaladoc HTML files, for - * publishing to Maven Central - */ - def docJar = T[PathRef] { - val outDir = T.ctx().dest - - val javadocDir = outDir / 'javadoc - os.makeDir.all(javadocDir) - - val files = for{ - ref <- allSources() - if os.exists(ref.path) - p <- (if (os.isDir(ref.path)) os.walk(ref.path) else Seq(ref.path)) - if os.isFile(p) && (p.ext == "java") - } yield p.toNIO.toString - - val options = Seq("-d", javadocDir.toNIO.toString) - - if (files.nonEmpty) Jvm.baseInteractiveSubprocess( - commandArgs = Seq( - "javadoc" - ) ++ options ++ - Seq( - "-classpath", - compileClasspath() - .map(_.path) - .filter(_.ext != "pom") - .mkString(java.io.File.pathSeparator) - ) ++ - files.map(_.toString), - envArgs = Map(), - workingDir = T.ctx().dest - ) - - createJar(Agg(javadocDir))(outDir) - } - - /** - * The source jar, containing only source code for publishing to Maven Central - */ - def sourceJar = T { - createJar((allSources() ++ resources()).map(_.path).filter(os.exists)) - } - - /** - * Any command-line parameters you want to pass to the forked JVM under `run`, - * `test` or `repl` - */ - def forkArgs = T{ Seq.empty[String] } - - /** - * Any environment variables you want to pass to the forked JVM under `run`, - * `test` or `repl` - */ - def forkEnv = T{ sys.env.toMap } - - /** - * Builds a command-line "launcher" file that can be used to run this module's - * code, without the Mill process. Useful for deployment & other places where - * you do not want a build tool running - */ - def launcher = T{ - Result.Success( - Jvm.createLauncher( - finalMainClass(), - runClasspath().map(_.path), - forkArgs() - ) - ) - } - - def ivyDepsTree(inverse: Boolean = false) = T.command { - val (flattened, resolution) = Lib.resolveDependenciesMetadata( - repositories, - resolveCoursierDependency().apply(_), - transitiveIvyDeps(), - Some(mapDependencies()) - ) - - println(coursier.util.Print.dependencyTree(flattened, resolution, - printExclusions = false, reverse = inverse)) - - Result.Success() - } - - /** - * Runs this module's code in-process within an isolated classloader. This is - * faster than `run`, but in exchange you have less isolation between runs - * since the code can dirty the parent Mill process and potentially leave it - * in a bad state. - */ - def runLocal(args: String*) = T.command { - Jvm.runLocal( - finalMainClass(), - runClasspath().map(_.path), - args - ) - } - - /** - * Runs this module's code in a subprocess and waits for it to finish - */ - def run(args: String*) = T.command{ - try Result.Success(Jvm.runSubprocess( - finalMainClass(), - runClasspath().map(_.path), - forkArgs(), - forkEnv(), - args, - workingDir = forkWorkingDir() - )) catch { case e: Exception => - Result.Failure("subprocess failed") - } - } - - private[this] def backgroundSetup(dest: os.Path) = { - val token = java.util.UUID.randomUUID().toString - val procId = dest / ".mill-background-process-id" - val procTombstone = dest / ".mill-background-process-tombstone" - // The backgrounded subprocesses poll the procId file, and kill themselves - // when the procId file is deleted. This deletion happens immediately before - // the body of these commands run, but we cannot be sure the subprocess has - // had time to notice. - // - // To make sure we wait for the previous subprocess to - // die, we make the subprocess write a tombstone file out when it kills - // itself due to procId being deleted, and we wait a short time on task-start - // to see if such a tombstone appears. If a tombstone appears, we can be sure - // the subprocess has killed itself, and can continue. If a tombstone doesn't - // appear in a short amount of time, we assume the subprocess exited or was - // killed via some other means, and continue anyway. - val start = System.currentTimeMillis() - while({ - if (os.exists(procTombstone)) { - Thread.sleep(10) - os.remove.all(procTombstone) - true - } else { - Thread.sleep(10) - System.currentTimeMillis() - start < 100 - } - })() - - os.write(procId, token) - os.write(procTombstone, token) - (procId, procTombstone, token) - } - - /** - * Runs this module's code in a background process, until it dies or - * `runBackground` is used again. This lets you continue using Mill while - * the process is running in the background: editing files, compiling, and - * only re-starting the background process when you're ready. - * - * You can also use `-w foo.runBackground` to make Mill watch for changes - * and automatically recompile your code & restart the background process - * when ready. This is useful when working on long-running server processes - * that would otherwise run forever - */ - def runBackground(args: String*) = T.command{ - val (procId, procTombstone, token) = backgroundSetup(T.ctx().dest) - try Result.Success(Jvm.runSubprocess( - "mill.scalalib.backgroundwrapper.BackgroundWrapper", - (runClasspath() ++ zincWorker.backgroundWrapperClasspath()).map(_.path), - forkArgs(), - forkEnv(), - Seq(procId.toString, procTombstone.toString, token, finalMainClass()) ++ args, - workingDir = forkWorkingDir(), - background = true - )) catch { case e: Exception => - Result.Failure("subprocess failed") - } - } - - /** - * Same as `runBackground`, but lets you specify a main class to run - */ - def runMainBackground(mainClass: String, args: String*) = T.command{ - val (procId, procTombstone, token) = backgroundSetup(T.ctx().dest) - try Result.Success(Jvm.runSubprocess( - "mill.scalalib.backgroundwrapper.BackgroundWrapper", - (runClasspath() ++ zincWorker.backgroundWrapperClasspath()).map(_.path), - forkArgs(), - forkEnv(), - Seq(procId.toString, procTombstone.toString, token, mainClass) ++ args, - workingDir = forkWorkingDir(), - background = true - )) catch { case e: Exception => - Result.Failure("subprocess failed") - } - } - - /** - * Same as `runLocal`, but lets you specify a main class to run - */ - def runMainLocal(mainClass: String, args: String*) = T.command { - Jvm.runLocal( - mainClass, - runClasspath().map(_.path), - args - ) - } - - /** - * Same as `run`, but lets you specify a main class to run - */ - def runMain(mainClass: String, args: String*) = T.command{ - try Result.Success(Jvm.runSubprocess( - mainClass, - runClasspath().map(_.path), - forkArgs(), - forkEnv(), - args, - workingDir = forkWorkingDir() - )) catch { case e: Exception => - Result.Failure("subprocess failed") - } - } - - // publish artifact with name "mill_2.12.4" instead of "mill_2.12" - - def artifactName: T[String] = millModuleSegments.parts.mkString("-") - - def artifactId: T[String] = artifactName() - - def intellijModulePath: os.Path = millSourcePath - - def forkWorkingDir = T{ ammonite.ops.pwd } - - /** - * Skip Idea project file generation. - */ - def skipIdea: Boolean = false -} - -trait TestModule extends JavaModule with TaskModule { - override def defaultCommandName() = "test" - /** - * What test frameworks to use. - */ - def testFrameworks: T[Seq[String]] - /** - * Discovers and runs the module's tests in a subprocess, reporting the - * results to the console - */ - def test(args: String*) = T.command{ - val outputPath = T.ctx().dest/"out.json" - - Jvm.runSubprocess( - mainClass = "mill.scalalib.TestRunner", - classPath = zincWorker.scalalibClasspath().map(_.path), - jvmArgs = forkArgs(), - envArgs = forkEnv(), - mainArgs = - Seq(testFrameworks().length.toString) ++ - testFrameworks() ++ - Seq(runClasspath().length.toString) ++ - runClasspath().map(_.path.toString) ++ - Seq(args.length.toString) ++ - args ++ - Seq(outputPath.toString, T.ctx().log.colored.toString, compile().classes.path.toString, T.ctx().home.toString), - workingDir = forkWorkingDir() - ) - - try { - val jsonOutput = ujson.read(outputPath.toIO) - val (doneMsg, results) = upickle.default.read[(String, Seq[TestRunner.Result])](jsonOutput) - TestModule.handleResults(doneMsg, results) - }catch{case e: Throwable => - Result.Failure("Test reporting failed: " + e) - } - - } - - /** - * Discovers and runs the module's tests in-process in an isolated classloader, - * reporting the results to the console - */ - def testLocal(args: String*) = T.command{ - val outputPath = T.ctx().dest/"out.json" - - val (doneMsg, results) = TestRunner.runTests( - TestRunner.frameworks(testFrameworks()), - runClasspath().map(_.path), - Agg(compile().classes.path), - args - ) - - TestModule.handleResults(doneMsg, results) - - } -} - -object TestModule{ - def handleResults(doneMsg: String, results: Seq[TestRunner.Result]) = { - - val badTests = results.filter(x => Set("Error", "Failure").contains(x.status)) - if (badTests.isEmpty) Result.Success((doneMsg, results)) - else { - val suffix = if (badTests.length == 1) "" else " and " + (badTests.length-1) + " more" - - Result.Failure( - badTests.head.fullyQualifiedName + " " + badTests.head.selector + suffix, - Some((doneMsg, results)) - ) - } - } -} - diff --git a/scalalib/src/mill/scalalib/Lib.scala b/scalalib/src/mill/scalalib/Lib.scala deleted file mode 100644 index b8b253bd..00000000 --- a/scalalib/src/mill/scalalib/Lib.scala +++ /dev/null @@ -1,133 +0,0 @@ -package mill -package scalalib - -import java.io.{File, FileInputStream} -import java.lang.annotation.Annotation -import java.lang.reflect.Modifier -import java.util.zip.ZipInputStream -import javax.tools.ToolProvider - -import ammonite.util.Util -import coursier.{Cache, Dependency, Fetch, Repository, Resolution} -import mill.scalalib.api.Util.isDotty -import mill.Agg -import mill.eval.{PathRef, Result} -import mill.modules.Jvm -import mill.api.Ctx -import sbt.testing._ - -import scala.collection.mutable - - -object Lib{ - def depToDependencyJava(dep: Dep, platformSuffix: String = ""): Dependency = { - assert(dep.cross.isConstant, s"Not a Java dependency: $dep") - depToDependency(dep, "", platformSuffix) - } - - def depToDependency(dep: Dep, scalaVersion: String, platformSuffix: String = ""): Dependency = - dep.toDependency( - binaryVersion = mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion), - fullVersion = scalaVersion, - platformSuffix = platformSuffix - ) - - def resolveDependenciesMetadata(repositories: Seq[Repository], - depToDependency: Dep => coursier.Dependency, - deps: TraversableOnce[Dep], - mapDependencies: Option[Dependency => Dependency] = None) = { - val depSeq = deps.toSeq - mill.modules.Jvm.resolveDependenciesMetadata( - repositories, - depSeq.map(depToDependency), - depSeq.filter(_.force).map(depToDependency), - mapDependencies - ) - } - /** - * Resolve dependencies using Coursier. - * - * We do not bother breaking this out into the separate ZincWorker classpath, - * because Coursier is already bundled with mill/Ammonite to support the - * `import $ivy` syntax. - */ - def resolveDependencies(repositories: Seq[Repository], - depToDependency: Dep => coursier.Dependency, - deps: TraversableOnce[Dep], - sources: Boolean = false, - mapDependencies: Option[Dependency => Dependency] = None): Result[Agg[PathRef]] = { - val depSeq = deps.toSeq - mill.modules.Jvm.resolveDependencies( - repositories, - depSeq.map(depToDependency), - depSeq.filter(_.force).map(depToDependency), - sources, - mapDependencies - ) - } - def scalaCompilerIvyDeps(scalaOrganization: String, scalaVersion: String) = - if (mill.scalalib.api.Util.isDotty(scalaVersion)) - Agg(ivy"$scalaOrganization::dotty-compiler:$scalaVersion".forceVersion()) - else - Agg( - ivy"$scalaOrganization:scala-compiler:$scalaVersion".forceVersion(), - ivy"$scalaOrganization:scala-reflect:$scalaVersion".forceVersion() - ) - - def scalaRuntimeIvyDeps(scalaOrganization: String, scalaVersion: String) = Agg[Dep]( - ivy"$scalaOrganization:scala-library:$scalaVersion".forceVersion() - ) - - def listClassFiles(base: os.Path): Iterator[String] = { - if (os.isDir(base)) os.walk(base).toIterator.filter(_.ext == "class").map(_.relativeTo(base).toString) - else { - val zip = new ZipInputStream(new FileInputStream(base.toIO)) - Iterator.continually(zip.getNextEntry).takeWhile(_ != null).map(_.getName).filter(_.endsWith(".class")) - } - } - - def discoverTests(cl: ClassLoader, framework: Framework, classpath: Agg[os.Path]) = { - - val fingerprints = framework.fingerprints() - - val testClasses = classpath.flatMap { base => - // Don't blow up if there are no classfiles representing - // the tests to run Instead just don't run anything - if (!os.exists(base)) Nil - else listClassFiles(base).flatMap { path => - val cls = cl.loadClass(path.stripSuffix(".class").replace('/', '.')) - val publicConstructorCount = - cls.getConstructors.count(c => c.getParameterCount == 0 && Modifier.isPublic(c.getModifiers)) - - if (Modifier.isAbstract(cls.getModifiers) || cls.isInterface || publicConstructorCount > 1) { - None - } else { - (cls.getName.endsWith("$"), publicConstructorCount == 0) match{ - case (true, true) => matchFingerprints(cl, cls, fingerprints, isModule = true) - case (false, false) => matchFingerprints(cl, cls, fingerprints, isModule = false) - case _ => None - } - } - } - } - - testClasses - } - def matchFingerprints(cl: ClassLoader, cls: Class[_], fingerprints: Array[Fingerprint], isModule: Boolean) = { - fingerprints.find { - case f: SubclassFingerprint => - f.isModule == isModule && - cl.loadClass(f.superclassName()).isAssignableFrom(cls) - - case f: AnnotatedFingerprint => - val annotationCls = cl.loadClass(f.annotationName()).asInstanceOf[Class[Annotation]] - f.isModule == isModule && - ( - cls.isAnnotationPresent(annotationCls) || - cls.getDeclaredMethods.exists(_.isAnnotationPresent(annotationCls)) - ) - - }.map { f => (cls, f) } - } - -} diff --git a/scalalib/src/mill/scalalib/MiscModule.scala b/scalalib/src/mill/scalalib/MiscModule.scala deleted file mode 100644 index c6449d6e..00000000 --- a/scalalib/src/mill/scalalib/MiscModule.scala +++ /dev/null @@ -1,101 +0,0 @@ -package mill -package scalalib - -import mill.define.Cross.Resolver -import mill.define.{Cross, Task} -import mill.eval.{PathRef, Result} -import mill.util.Loose.Agg -object CrossModuleBase{ - def scalaVersionPaths(scalaVersion: String, f: String => os.Path) = { - for(segments <- scalaVersion.split('.').inits.filter(_.nonEmpty)) - yield PathRef(f(segments.mkString("."))) - } -} -trait CrossModuleBase extends ScalaModule { - def crossScalaVersion: String - def scalaVersion = T{ crossScalaVersion } - - override def millSourcePath = super.millSourcePath / ammonite.ops.up - implicit def crossSbtModuleResolver: Resolver[CrossModuleBase] = new Resolver[CrossModuleBase]{ - def resolve[V <: CrossModuleBase](c: Cross[V]): V = { - crossScalaVersion.split('.') - .inits - .takeWhile(_.length > 1) - .flatMap( prefix => - c.items.map(_._2).find(_.crossScalaVersion.split('.').startsWith(prefix)) - ) - .collectFirst{case x => x} - .getOrElse( - throw new Exception( - s"Unable to find compatible cross version between $crossScalaVersion and "+ - c.items.map(_._2.crossScalaVersion).mkString(",") - ) - ) - } - } -} - -trait CrossScalaModule extends ScalaModule with CrossModuleBase{ outer => - override def sources = T.sources{ - super.sources() ++ - CrossModuleBase.scalaVersionPaths(crossScalaVersion, s => millSourcePath / s"src-$s" ) - } - - trait Tests extends super.Tests { - override def sources = T.sources{ - super.sources() ++ - CrossModuleBase.scalaVersionPaths(crossScalaVersion, s => millSourcePath / s"src-$s" ) - } - } -} - -trait MavenTests extends TestModule{ - override def sources = T.sources( - millSourcePath / 'src / 'test / 'scala, - millSourcePath / 'src / 'test / 'java - ) - override def resources = T.sources{ millSourcePath / 'src / 'test / 'resources } -} -trait MavenModule extends JavaModule{outer => - - override def sources = T.sources( - millSourcePath / 'src / 'main / 'scala, - millSourcePath / 'src / 'main / 'java - ) - override def resources = T.sources{ millSourcePath / 'src / 'main / 'resources } - trait Tests extends super.Tests with MavenTests { - override def millSourcePath = outer.millSourcePath - override def intellijModulePath = outer.millSourcePath / 'src / 'test - } -} - -trait SbtModule extends MavenModule with ScalaModule{ outer => - trait Tests extends super.Tests with MavenTests { - override def millSourcePath = outer.millSourcePath - override def intellijModulePath = outer.millSourcePath / 'src / 'test - } -} - -trait CrossSbtModule extends SbtModule with CrossModuleBase{ outer => - - override def sources = T.sources{ - super.sources() ++ - CrossModuleBase.scalaVersionPaths( - crossScalaVersion, - s => millSourcePath / 'src / 'main / s"scala-$s" - ) - - } - trait Tests extends super.Tests { - override def millSourcePath = outer.millSourcePath - override def sources = T.sources{ - super.sources() ++ - CrossModuleBase.scalaVersionPaths( - crossScalaVersion, - s => millSourcePath / 'src / 'test / s"scala-$s" - ) - } - } -} - - diff --git a/scalalib/src/mill/scalalib/PublishModule.scala b/scalalib/src/mill/scalalib/PublishModule.scala deleted file mode 100644 index 588781f4..00000000 --- a/scalalib/src/mill/scalalib/PublishModule.scala +++ /dev/null @@ -1,124 +0,0 @@ -package mill -package scalalib - -import mill.define.{ExternalModule, Task} -import mill.api.PathRef -import mill.scalalib.publish.{Artifact, SonatypePublisher} - -/** - * Configuration necessary for publishing a Scala module to Maven Central or similar - */ -trait PublishModule extends JavaModule { outer => - import mill.scalalib.publish._ - - override def moduleDeps = Seq.empty[PublishModule] - - def pomSettings: T[PomSettings] - def publishVersion: T[String] - - def publishSelfDependency = T { - Artifact(pomSettings().organization, artifactId(), publishVersion()) - } - - def publishXmlDeps = T.task { - val ivyPomDeps = ivyDeps().map(resolvePublishDependency().apply(_)) - val modulePomDeps = Task.sequence(moduleDeps.map(_.publishSelfDependency))() - ivyPomDeps ++ modulePomDeps.map(Dependency(_, Scope.Compile)) - } - def pom = T { - val pom = Pom(artifactMetadata(), publishXmlDeps(), artifactId(), pomSettings()) - val pomPath = T.ctx().dest / s"${artifactId()}-${publishVersion()}.pom" - os.write.over(pomPath, pom) - PathRef(pomPath) - } - - def ivy = T { - val ivy = Ivy(artifactMetadata(), publishXmlDeps()) - val ivyPath = T.ctx().dest / "ivy.xml" - os.write.over(ivyPath, ivy) - PathRef(ivyPath) - } - - def artifactMetadata: T[Artifact] = T { - Artifact(pomSettings().organization, artifactId(), publishVersion()) - } - - def publishLocal(): define.Command[Unit] = T.command { - LocalPublisher.publish( - jar = jar().path, - sourcesJar = sourceJar().path, - docJar = docJar().path, - pom = pom().path, - ivy = ivy().path, - artifact = artifactMetadata() - ) - } - - def sonatypeUri: String = "https://oss.sonatype.org/service/local" - - def sonatypeSnapshotUri: String = "https://oss.sonatype.org/content/repositories/snapshots" - - def publishArtifacts = T { - val baseName = s"${artifactId()}-${publishVersion()}" - PublishModule.PublishData( - artifactMetadata(), - Seq( - jar() -> s"$baseName.jar", - sourceJar() -> s"$baseName-sources.jar", - docJar() -> s"$baseName-javadoc.jar", - pom() -> s"$baseName.pom" - ) - ) - } - - def publish(sonatypeCreds: String, - gpgPassphrase: String = null, - signed: Boolean = true, - release: Boolean): define.Command[Unit] = T.command { - val PublishModule.PublishData(artifactInfo, artifacts) = publishArtifacts() - new SonatypePublisher( - sonatypeUri, - sonatypeSnapshotUri, - sonatypeCreds, - Option(gpgPassphrase), - signed, - T.ctx().log - ).publish(artifacts.map{case (a, b) => (a.path, b)}, artifactInfo, release) - } -} - -object PublishModule extends ExternalModule { - case class PublishData(meta: Artifact, payload: Seq[(PathRef, String)]) - - object PublishData{ - implicit def jsonify: upickle.default.ReadWriter[PublishData] = upickle.default.macroRW - } - - def publishAll(sonatypeCreds: String, - gpgPassphrase: String = null, - publishArtifacts: mill.main.Tasks[PublishModule.PublishData], - release: Boolean = false, - sonatypeUri: String = "https://oss.sonatype.org/service/local", - sonatypeSnapshotUri: String = "https://oss.sonatype.org/content/repositories/snapshots", - signed: Boolean = true) = T.command { - - val x: Seq[(Seq[(os.Path, String)], Artifact)] = Task.sequence(publishArtifacts.value)().map{ - case PublishModule.PublishData(a, s) => (s.map{case (p, f) => (p.path, f)}, a) - } - new SonatypePublisher( - sonatypeUri, - sonatypeSnapshotUri, - sonatypeCreds, - Option(gpgPassphrase), - signed, - T.ctx().log - ).publishAll( - release, - x:_* - ) - } - - implicit def millScoptTargetReads[T] = new mill.main.Tasks.Scopt[T]() - - lazy val millDiscover: mill.define.Discover[this.type] = mill.define.Discover[this.type] -} diff --git a/scalalib/src/mill/scalalib/ScalaModule.scala b/scalalib/src/mill/scalalib/ScalaModule.scala deleted file mode 100644 index 9d669bf4..00000000 --- a/scalalib/src/mill/scalalib/ScalaModule.scala +++ /dev/null @@ -1,275 +0,0 @@ -package mill -package scalalib - -import coursier.Repository -import mill.define.{Target, Task, TaskModule} -import mill.eval.{PathRef, Result} -import mill.modules.Jvm -import mill.modules.Jvm.createJar -import mill.scalalib.api.Util.isDotty -import Lib._ -import mill.util.Loose.Agg -import mill.api.DummyInputStream - -/** - * Core configuration required to compile a single Scala compilation target - */ -trait ScalaModule extends JavaModule { outer => - trait Tests extends TestModule with ScalaModule{ - override def scalaOrganization = outer.scalaOrganization() - def scalaVersion = outer.scalaVersion() - override def repositories = outer.repositories - override def scalacPluginIvyDeps = outer.scalacPluginIvyDeps - override def scalacOptions = outer.scalacOptions - override def javacOptions = outer.javacOptions - override def zincWorker = outer.zincWorker - override def moduleDeps: Seq[JavaModule] = Seq(outer) - } - - /** - * What Scala organization to use - * @return - */ - def scalaOrganization: T[String] = T { - if (isDotty(scalaVersion())) - "ch.epfl.lamp" - else - "org.scala-lang" - } - - /** - * What version of Scala to use - */ - def scalaVersion: T[String] - - override def mapDependencies = T.task{ d: coursier.Dependency => - val artifacts = - if (isDotty(scalaVersion())) - Set("dotty-library", "dotty-compiler") - else - Set("scala-library", "scala-compiler", "scala-reflect") - if (!artifacts(d.module.name)) d - else d.copy(module = d.module.copy(organization = scalaOrganization()), version = scalaVersion()) - } - - override def resolveCoursierDependency: Task[Dep => coursier.Dependency] = T.task{ - Lib.depToDependency(_: Dep, scalaVersion(), platformSuffix()) - } - - override def resolvePublishDependency: Task[Dep => publish.Dependency] = T.task{ - publish.Artifact.fromDep( - _: Dep, - scalaVersion(), - mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion()), - platformSuffix() - ) - } - - /** - * Allows you to make use of Scala compiler plugins from maven central - */ - def scalacPluginIvyDeps = T{ Agg.empty[Dep] } - - def scalaDocPluginIvyDeps = T{ scalacPluginIvyDeps() } - - /** - * Command-line options to pass to the Scala compiler - */ - def scalacOptions = T{ Seq.empty[String] } - - def scalaDocOptions = T{ scalacOptions() } - - private val Milestone213 = raw"""2.13.(\d+)-M(\d+)""".r - - def scalaCompilerBridgeSources = T { - val (scalaVersion0, scalaBinaryVersion0) = scalaVersion() match { - case Milestone213(_, _) => ("2.13.0-M2", "2.13.0-M2") - case _ => (scalaVersion(), mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion())) - } - - val (bridgeDep, bridgeName, bridgeVersion) = - if (isDotty(scalaVersion0)) { - val org = scalaOrganization() - val name = "dotty-sbt-bridge" - val version = scalaVersion() - (ivy"$org:$name:$version", name, version) - } else { - val org = "org.scala-sbt" - val name = "compiler-bridge" - val version = Versions.zinc - (ivy"$org::$name:$version", s"${name}_$scalaBinaryVersion0", version) - } - - resolveDependencies( - repositories, - Lib.depToDependency(_, scalaVersion0, platformSuffix()), - Seq(bridgeDep), - sources = true - ).map(deps => - mill.scalalib.api.Util.grepJar(deps.map(_.path), bridgeName, bridgeVersion, sources = true) - ) - } - - /** - * The local classpath of Scala compiler plugins on-disk; you can add - * additional jars here if you have some copiler plugin that isn't present - * on maven central - */ - def scalacPluginClasspath: T[Agg[PathRef]] = T { - resolveDeps(scalacPluginIvyDeps)() - } - - /** - * The ivy coordinates of Scala's own standard library - */ - def scalaDocPluginClasspath: T[Agg[PathRef]] = T { - resolveDeps(scalaDocPluginIvyDeps)() - } - - def scalaLibraryIvyDeps = T{ scalaRuntimeIvyDeps(scalaOrganization(), scalaVersion()) } - - /** - * Classpath of the Scala Compiler & any compiler plugins - */ - def scalaCompilerClasspath: T[Agg[PathRef]] = T{ - resolveDeps( - T.task{ - scalaCompilerIvyDeps(scalaOrganization(), scalaVersion()) ++ - scalaRuntimeIvyDeps(scalaOrganization(), scalaVersion()) - } - )() - } - override def compileClasspath = T{ - transitiveLocalClasspath() ++ - resources() ++ - unmanagedClasspath() ++ - resolveDeps(T.task{compileIvyDeps() ++ scalaLibraryIvyDeps() ++ transitiveIvyDeps()})() - } - - override def upstreamAssemblyClasspath = T{ - transitiveLocalClasspath() ++ - unmanagedClasspath() ++ - resolveDeps(T.task{runIvyDeps() ++ scalaLibraryIvyDeps() ++ transitiveIvyDeps()})() - } - - override def compile: T[mill.scalalib.api.CompilationResult] = T.persistent{ - zincWorker.worker().compileMixed( - upstreamCompileOutput(), - allSourceFiles().map(_.path), - compileClasspath().map(_.path), - javacOptions(), - scalaVersion(), - scalacOptions(), - scalaCompilerBridgeSources(), - scalaCompilerClasspath().map(_.path), - scalacPluginClasspath().map(_.path), - ) - } - - override def docJar = T { - val outDir = T.ctx().dest - - val javadocDir = outDir / 'javadoc - os.makeDir.all(javadocDir) - - val files = allSourceFiles().map(_.path.toString) - - val pluginOptions = scalaDocPluginClasspath().map(pluginPathRef => s"-Xplugin:${pluginPathRef.path}") - val compileCp = compileClasspath().filter(_.path.ext != "pom").map(_.path) - val options = Seq( - "-d", javadocDir.toNIO.toString, - "-classpath", compileCp.mkString(":") - ) ++ - pluginOptions ++ - scalaDocOptions() - - if (files.isEmpty) Result.Success(createJar(Agg(javadocDir))(outDir)) - else { - zincWorker.worker().docJar( - scalaVersion(), - scalaCompilerBridgeSources(), - scalaCompilerClasspath().map(_.path), - scalacPluginClasspath().map(_.path), - files ++ options - ) match{ - case true => Result.Success(createJar(Agg(javadocDir))(outDir)) - case false => Result.Failure("docJar generation failed") - } - } - } - - /** - * Opens up a Scala console with your module and all dependencies present, - * for you to test and operate your code interactively - */ - def console() = T.command{ - if (T.ctx().log.inStream == DummyInputStream){ - Result.Failure("repl needs to be run with the -i/--interactive flag") - }else{ - Jvm.runSubprocess( - mainClass = - if (isDotty(scalaVersion())) - "dotty.tools.repl.Main" - else - "scala.tools.nsc.MainGenericRunner", - classPath = runClasspath().map(_.path) ++ scalaCompilerClasspath().map(_.path), - mainArgs = Seq("-usejavacp"), - workingDir = os.pwd - ) - Result.Success() - } - } - - /** - * Dependencies that are necessary to run the Ammonite Scala REPL - */ - def ammoniteReplClasspath = T{ - localClasspath() ++ - transitiveLocalClasspath() ++ - unmanagedClasspath() ++ - resolveDeps(T.task{ - runIvyDeps() ++ scalaLibraryIvyDeps() ++ transitiveIvyDeps() ++ - Agg(ivy"com.lihaoyi:::ammonite:${Versions.ammonite}") - })() - } - - /** - * Opens up an Ammonite Scala REPL with your module and all dependencies present, - * for you to test and operate your code interactively - */ - def repl(replOptions: String*) = T.command{ - if (T.ctx().log.inStream == DummyInputStream){ - Result.Failure("repl needs to be run with the -i/--interactive flag") - }else{ - Jvm.runSubprocess( - mainClass = "ammonite.Main", - classPath = ammoniteReplClasspath().map(_.path), - mainArgs = replOptions, - workingDir = os.pwd - ) - Result.Success() - } - - } - - /** - * Whether to publish artifacts with name "mill_2.12.4" instead of "mill_2.12" - */ - def crossFullScalaVersion: T[Boolean] = false - - /** - * What Scala version string to use when publishing - */ - def artifactScalaVersion: T[String] = T { - if (crossFullScalaVersion()) scalaVersion() - else mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion()) - } - - /** - * The suffix appended to the artifact IDs during publishing - */ - def artifactSuffix: T[String] = s"_${artifactScalaVersion()}" - - override def artifactId: T[String] = artifactName() + artifactSuffix() - -} diff --git a/scalalib/src/mill/scalalib/TestRunner.scala b/scalalib/src/mill/scalalib/TestRunner.scala deleted file mode 100644 index 42e65d63..00000000 --- a/scalalib/src/mill/scalalib/TestRunner.scala +++ /dev/null @@ -1,153 +0,0 @@ -package mill.scalalib -import ammonite.util.Colors -import mill.Agg -import mill.modules.Jvm -import mill.scalalib.Lib.discoverTests -import mill.util.{Ctx, PrintLogger} -import mill.util.JsonFormatters._ -import sbt.testing._ - -import scala.collection.mutable -object TestRunner { - - - def main(args: Array[String]): Unit = { - try{ - var i = 0 - def readArray() = { - val count = args(i).toInt - val slice = args.slice(i + 1, i + count + 1) - i = i + count + 1 - slice - } - val frameworks = readArray() - val classpath = readArray() - val arguments = readArray() - val outputPath = args(i + 0) - val colored = args(i + 1) - val testCp = args(i + 2) - val homeStr = args(i + 3) - val ctx = new Ctx.Log with Ctx.Home { - val log = PrintLogger( - colored == "true", - true, - if(colored == "true") Colors.Default - else Colors.BlackWhite, - System.out, - System.err, - System.err, - System.in, - debugEnabled = false - ) - val home = os.Path(homeStr) - } - val result = runTests( - frameworkInstances = TestRunner.frameworks(frameworks), - entireClasspath = Agg.from(classpath.map(os.Path(_))), - testClassfilePath = Agg(os.Path(testCp)), - args = arguments - )(ctx) - - // Clear interrupted state in case some badly-behaved test suite - // dirtied the thread-interrupted flag and forgot to clean up. Otherwise - // that flag causes writing the results to disk to fail - Thread.interrupted() - ammonite.ops.write(os.Path(outputPath), upickle.default.write(result)) - }catch{case e: Throwable => - println(e) - e.printStackTrace() - } - // Tests are over, kill the JVM whether or not anyone's threads are still running - // Always return 0, even if tests fail. The caller can pick up the detailed test - // results from the outputPath - System.exit(0) - } - - def runTests(frameworkInstances: ClassLoader => Seq[sbt.testing.Framework], - entireClasspath: Agg[os.Path], - testClassfilePath: Agg[os.Path], - args: Seq[String]) - (implicit ctx: Ctx.Log with Ctx.Home): (String, Seq[mill.scalalib.TestRunner.Result]) = { - //Leave the context class loader set and open so that shutdown hooks can access it - Jvm.inprocess(entireClasspath, classLoaderOverrideSbtTesting = true, isolated = true, closeContextClassLoaderWhenDone = false, cl => { - val frameworks = frameworkInstances(cl) - - val events = mutable.Buffer.empty[Event] - - val doneMessages = frameworks.map{ framework => - val runner = framework.runner(args.toArray, Array[String](), cl) - - val testClasses = discoverTests(cl, framework, testClassfilePath) - - val tasks = runner.tasks( - for ((cls, fingerprint) <- testClasses.toArray) - yield new TaskDef(cls.getName.stripSuffix("$"), fingerprint, true, Array(new SuiteSelector)) - ) - - val taskQueue = tasks.to[mutable.Queue] - while (taskQueue.nonEmpty){ - val next = taskQueue.dequeue().execute( - new EventHandler { - def handle(event: Event) = events.append(event) - }, - Array( - new Logger { - def debug(msg: String) = ctx.log.outputStream.println(msg) - - def error(msg: String) = ctx.log.outputStream.println(msg) - - def ansiCodesSupported() = true - - def warn(msg: String) = ctx.log.outputStream.println(msg) - - def trace(t: Throwable) = t.printStackTrace(ctx.log.outputStream) - - def info(msg: String) = ctx.log.outputStream.println(msg) - }) - ) - taskQueue.enqueue(next:_*) - } - runner.done() - } - - val results = for(e <- events) yield { - val ex = if (e.throwable().isDefined) Some(e.throwable().get) else None - mill.scalalib.TestRunner.Result( - e.fullyQualifiedName(), - e.selector() match{ - case s: NestedSuiteSelector => s.suiteId() - case s: NestedTestSelector => s.suiteId() + "." + s.testName() - case s: SuiteSelector => s.toString - case s: TestSelector => s.testName() - case s: TestWildcardSelector => s.testWildcard() - }, - e.duration(), - e.status().toString, - ex.map(_.getClass.getName), - ex.map(_.getMessage), - ex.map(_.getStackTrace) - ) - } - - (doneMessages.mkString("\n"), results) - }) - } - - def frameworks(frameworkNames: Seq[String])(cl: ClassLoader): Seq[sbt.testing.Framework] = { - frameworkNames.map { name => - cl.loadClass(name).newInstance().asInstanceOf[sbt.testing.Framework] - } - } - - case class Result(fullyQualifiedName: String, - selector: String, - duration: Long, - status: String, - exceptionName: Option[String] = None, - exceptionMsg: Option[String] = None, - exceptionTrace: Option[Seq[StackTraceElement]] = None) - - object Result{ - implicit def resultRW: upickle.default.ReadWriter[Result] = upickle.default.macroRW[Result] - } -} diff --git a/scalalib/src/mill/scalalib/Versions.scala b/scalalib/src/mill/scalalib/Versions.scala deleted file mode 100644 index e7eaf847..00000000 --- a/scalalib/src/mill/scalalib/Versions.scala +++ /dev/null @@ -1,8 +0,0 @@ -package mill.scalalib - -object Versions { - // Keep synchronized with ammonite dependency in core in build.sc - val ammonite = "1.5.0" - // Keep synchronized with zinc dependency in scalalib.worker in build.sc - val zinc = "1.2.1" -} diff --git a/scalalib/src/mill/scalalib/ZincWorkerModule.scala b/scalalib/src/mill/scalalib/ZincWorkerModule.scala deleted file mode 100644 index 5ca824ce..00000000 --- a/scalalib/src/mill/scalalib/ZincWorkerModule.scala +++ /dev/null @@ -1,56 +0,0 @@ -package mill.scalalib - -import coursier.Cache -import coursier.maven.MavenRepository -import mill.Agg -import mill.T -import mill.define.{Discover, Worker} -import mill.scalalib.Lib.resolveDependencies -import mill.util.Loose -import mill.util.JsonFormatters._ - -object ZincWorkerModule extends mill.define.ExternalModule with ZincWorkerModule{ - lazy val millDiscover = Discover[this.type] -} -trait ZincWorkerModule extends mill.Module{ - def repositories = Seq( - Cache.ivy2Local, - MavenRepository("https://repo1.maven.org/maven2"), - MavenRepository("https://oss.sonatype.org/content/repositories/releases") - ) - - def classpath = T{ - mill.modules.Util.millProjectModule("MILL_SCALA_WORKER", "mill-scalalib-worker", repositories) - } - - def scalalibClasspath = T{ - mill.modules.Util.millProjectModule("MILL_SCALA_LIB", "mill-scalalib", repositories) - } - - def backgroundWrapperClasspath = T{ - mill.modules.Util.millProjectModule( - "MILL_BACKGROUNDWRAPPER", "mill-scalalib-backgroundwrapper", - repositories, artifactSuffix = "" - ) - } - - def worker: Worker[mill.scalalib.api.ZincWorkerApi] = T.worker{ - val cl = mill.api.ClassLoader.create( - classpath().map(_.path.toNIO.toUri.toURL).toVector, - getClass.getClassLoader - ) - val cls = cl.loadClass("mill.scalalib.worker.ZincWorkerImpl") - val instance = cls.getConstructor(classOf[mill.api.Ctx], classOf[Array[String]]) - .newInstance(T.ctx(), compilerInterfaceClasspath().map(_.path.toString).toArray[String]) - instance.asInstanceOf[mill.scalalib.api.ZincWorkerApi] - } - - def compilerInterfaceClasspath = T{ - resolveDependencies( - repositories, - Lib.depToDependency(_, "2.12.4", ""), - Seq(ivy"org.scala-sbt:compiler-interface:${Versions.zinc}") - ) - } - -} diff --git a/scalalib/src/mill/scalalib/dependency/DependencyUpdatesImpl.scala b/scalalib/src/mill/scalalib/dependency/DependencyUpdatesImpl.scala deleted file mode 100644 index 3bb94202..00000000 --- a/scalalib/src/mill/scalalib/dependency/DependencyUpdatesImpl.scala +++ /dev/null @@ -1,52 +0,0 @@ -package mill.scalalib.dependency - -import mill.define._ -import mill.scalalib.dependency.updates.{ - DependencyUpdates, - ModuleDependenciesUpdates, - UpdatesFinder -} -import mill.scalalib.dependency.versions.VersionsFinder -import mill.api.Ctx.{Home, Log} - -object DependencyUpdatesImpl { - - def apply(ctx: Log with Home, - rootModule: BaseModule, - discover: Discover[_], - allowPreRelease: Boolean): Unit = { - - // 1. Find all available versions for each dependency - val allDependencyVersions = VersionsFinder.findVersions(ctx, rootModule) - - // 2. Extract updated versions from all available versions - val allUpdates = allDependencyVersions.map { dependencyVersions => - UpdatesFinder.findUpdates(dependencyVersions, allowPreRelease) - } - - // 3. Print the results - showAllUpdates(allUpdates) - } - - private def showAllUpdates(updates: Seq[ModuleDependenciesUpdates]): Unit = - updates.foreach { dependencyUpdates => - val module = dependencyUpdates.module.toString - val actualUpdates = - dependencyUpdates.dependencies.filter(_.updates.nonEmpty) - if (actualUpdates.isEmpty) { - println(s"No dependency updates found for $module") - } else { - println(s"Found ${actualUpdates.length} dependency update for $module") - showUpdates(actualUpdates) - } - } - - private def showUpdates(updates: Seq[DependencyUpdates]): Unit = - updates.foreach { dependencyUpdate => - val module = s"${dependencyUpdate.dependency.module}" - val allVersions = - (dependencyUpdate.currentVersion +: dependencyUpdate.updates.toList) - .mkString(" -> ") - println(s" $module : $allVersions") - } -} diff --git a/scalalib/src/mill/scalalib/dependency/metadata/MavenMetadataLoader.scala b/scalalib/src/mill/scalalib/dependency/metadata/MavenMetadataLoader.scala deleted file mode 100644 index 491911bf..00000000 --- a/scalalib/src/mill/scalalib/dependency/metadata/MavenMetadataLoader.scala +++ /dev/null @@ -1,21 +0,0 @@ -package mill.scalalib.dependency.metadata - -import coursier.Cache -import coursier.maven.MavenRepository -import coursier.util.Task -import mill.scalalib.dependency.versions.Version - -private[dependency] final case class MavenMetadataLoader(mavenRepo: MavenRepository) - extends MetadataLoader { - - private val fetch = Cache.fetch[Task]() - - override def getVersions(module: coursier.Module): List[Version] = { - import scala.concurrent.ExecutionContext.Implicits.global - // TODO fallback to 'versionsFromListing' if 'versions' doesn't work? (needs to be made public in coursier first) - val allVersions = mavenRepo.versions(module, fetch).run.unsafeRun - allVersions - .map(_.available.map(Version(_))) - .getOrElse(List.empty) - } -} diff --git a/scalalib/src/mill/scalalib/dependency/metadata/MetadataLoader.scala b/scalalib/src/mill/scalalib/dependency/metadata/MetadataLoader.scala deleted file mode 100644 index 20271f0e..00000000 --- a/scalalib/src/mill/scalalib/dependency/metadata/MetadataLoader.scala +++ /dev/null @@ -1,7 +0,0 @@ -package mill.scalalib.dependency.metadata - -import mill.scalalib.dependency.versions.Version - -private[dependency] trait MetadataLoader { - def getVersions(module: coursier.Module): Seq[Version] -} diff --git a/scalalib/src/mill/scalalib/dependency/metadata/MetadataLoaderFactory.scala b/scalalib/src/mill/scalalib/dependency/metadata/MetadataLoaderFactory.scala deleted file mode 100644 index 4495d6b0..00000000 --- a/scalalib/src/mill/scalalib/dependency/metadata/MetadataLoaderFactory.scala +++ /dev/null @@ -1,11 +0,0 @@ -package mill.scalalib.dependency.metadata - -import coursier.Repository -import coursier.maven.MavenRepository - -private[dependency] object MetadataLoaderFactory { - def apply(repo: Repository): Option[MetadataLoader] = repo match { - case mavenRepo: MavenRepository => Some(MavenMetadataLoader(mavenRepo)) - case _ => None - } -} diff --git a/scalalib/src/mill/scalalib/dependency/updates/ModuleDependenciesUpdates.scala b/scalalib/src/mill/scalalib/dependency/updates/ModuleDependenciesUpdates.scala deleted file mode 100644 index a989cd31..00000000 --- a/scalalib/src/mill/scalalib/dependency/updates/ModuleDependenciesUpdates.scala +++ /dev/null @@ -1,15 +0,0 @@ -package mill.scalalib.dependency.updates - -import mill.scalalib.JavaModule -import mill.scalalib.dependency.versions.Version - -import scala.collection.SortedSet - -private[dependency] final case class ModuleDependenciesUpdates( - module: JavaModule, - dependencies: Seq[DependencyUpdates]) - -private[dependency] final case class DependencyUpdates( - dependency: coursier.Dependency, - currentVersion: Version, - updates: SortedSet[Version]) diff --git a/scalalib/src/mill/scalalib/dependency/updates/UpdatesFinder.scala b/scalalib/src/mill/scalalib/dependency/updates/UpdatesFinder.scala deleted file mode 100644 index 3430592f..00000000 --- a/scalalib/src/mill/scalalib/dependency/updates/UpdatesFinder.scala +++ /dev/null @@ -1,75 +0,0 @@ -/* - * This file contains code originally published under the following license: - * - * Copyright (c) 2012, Roman Timushev - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY - * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND - * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -package mill.scalalib.dependency.updates - -import mill.scalalib.dependency.versions._ - -import scala.collection.SortedSet - -private[dependency] object UpdatesFinder { - - import scala.Ordered._ - - def findUpdates(dependencyVersions: ModuleDependenciesVersions, - allowPreRelease: Boolean): ModuleDependenciesUpdates = { - val dependencies = - dependencyVersions.dependencies.map { dependencyVersion => - findUpdates(dependencyVersion, allowPreRelease) - } - ModuleDependenciesUpdates(dependencyVersions.module, dependencies) - } - - def findUpdates(dependencyVersion: DependencyVersions, - allowPreRelease: Boolean): DependencyUpdates = { - val current = dependencyVersion.currentVersion - val versions = dependencyVersion.allversions.to[SortedSet] - - val updates = versions - .filter(isUpdate(current)) - .filterNot(lessStable(current, allowPreRelease)) - - DependencyUpdates(dependencyVersion.dependency, - dependencyVersion.currentVersion, - updates) - } - - private def lessStable(current: Version, allowPreRelease: Boolean)( - another: Version): Boolean = (current, another) match { - case (ReleaseVersion(_), ReleaseVersion(_)) => false - case (SnapshotVersion(_, _, _), _) => false - case (_, SnapshotVersion(_, _, _)) => true - case (ReleaseVersion(_), PreReleaseVersion(_, _)) => !allowPreRelease - case (ReleaseVersion(_), PreReleaseBuildVersion(_, _, _)) => - !allowPreRelease - case (ReleaseVersion(_), _) => true - case (_, _) => false - } - - private def isUpdate(current: Version) = current < _ -} diff --git a/scalalib/src/mill/scalalib/dependency/versions/ModuleDependenciesVersions.scala b/scalalib/src/mill/scalalib/dependency/versions/ModuleDependenciesVersions.scala deleted file mode 100644 index 12d57059..00000000 --- a/scalalib/src/mill/scalalib/dependency/versions/ModuleDependenciesVersions.scala +++ /dev/null @@ -1,12 +0,0 @@ -package mill.scalalib.dependency.versions - -import mill.scalalib.JavaModule - -private[dependency] final case class ModuleDependenciesVersions( - module: JavaModule, - dependencies: Seq[DependencyVersions]) - -private[dependency] final case class DependencyVersions( - dependency: coursier.Dependency, - currentVersion: Version, - allversions: Set[Version]) diff --git a/scalalib/src/mill/scalalib/dependency/versions/Version.scala b/scalalib/src/mill/scalalib/dependency/versions/Version.scala deleted file mode 100644 index a2719023..00000000 --- a/scalalib/src/mill/scalalib/dependency/versions/Version.scala +++ /dev/null @@ -1,227 +0,0 @@ -/* - * This file contains code originally published under the following license: - * - * Copyright (c) 2012, Roman Timushev - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY - * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND - * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -package mill.scalalib.dependency.versions - -import scala.util.matching.Regex -import scala.util.matching.Regex.Groups - -private[dependency] sealed trait Version { - def major: Long - - def minor: Long - - def patch: Long -} - -private[dependency] case class ValidVersion(text: String, - releasePart: List[Long], - preReleasePart: List[String], - buildPart: List[String]) - extends Version { - def major: Long = releasePart.headOption getOrElse 0 - - def minor: Long = releasePart.drop(1).headOption getOrElse 1 - - def patch: Long = releasePart.drop(2).headOption getOrElse 1 - - override def toString: String = text -} - -private[dependency] case class InvalidVersion(text: String) extends Version { - def major: Long = -1 - - def minor: Long = -1 - - def patch: Long = -1 -} - -private[dependency] object ReleaseVersion { - private val releaseKeyword: Regex = "(?i)final|release".r - - def unapply(v: Version): Option[List[Long]] = v match { - case ValidVersion(_, releasePart, Nil, Nil) => Some(releasePart) - case ValidVersion(_, releasePart, releaseKeyword() :: Nil, Nil) => - Some(releasePart) - case _ => None - } -} - -private[dependency] object PreReleaseVersion { - def unapply(v: Version): Option[(List[Long], List[String])] = v match { - case ValidVersion(_, releasePart, preReleasePart, Nil) - if preReleasePart.nonEmpty => - Some(releasePart, preReleasePart) - case _ => None - } -} - -private[dependency] object PreReleaseBuildVersion { - def unapply(v: Version): Option[(List[Long], List[String], List[String])] = - v match { - case ValidVersion(_, releasePart, preReleasePart, buildPart) - if preReleasePart.nonEmpty && buildPart.nonEmpty => - Some(releasePart, preReleasePart, buildPart) - case _ => None - } -} - -private[dependency] object SnapshotVersion { - def unapply(v: Version): Option[(List[Long], List[String], List[String])] = - v match { - case ValidVersion(_, releasePart, preReleasePart, buildPart) - if preReleasePart.lastOption.contains("SNAPSHOT") => - Some(releasePart, preReleasePart, buildPart) - case _ => None - } -} - -private[dependency] object BuildVersion { - def unapply(v: Version): Option[(List[Long], List[String])] = v match { - case ValidVersion(_, releasePart, Nil, buildPart) if buildPart.nonEmpty => - Some(releasePart, buildPart) - case _ => None - } -} - -private[dependency] object Version { - def apply(text: String): Version = synchronized { - VersionParser - .parse(text) - .fold( - (_, _, _) => InvalidVersion(text), - { case ((a, b, c), _) => ValidVersion(text, a.toList, b.toList, c.toList)} - ) - } - - implicit def versionOrdering: Ordering[Version] = VersionOrdering -} - -private[dependency] object VersionOrdering extends Ordering[Version] { - - private val subParts = "(\\d+)?(\\D+)?".r - - private def parsePart(s: String): Seq[Either[Int, String]] = - try { - subParts - .findAllIn(s) - .matchData - .flatMap { - case Groups(num, str) => - Seq(Option(num).map(_.toInt).map(Left.apply), - Option(str).map(Right.apply)) - } - .flatten - .toList - } catch { - case _: NumberFormatException => List(Right(s)) - } - - private def toOpt(x: Int): Option[Int] = if (x == 0) None else Some(x) - - private def comparePart(a: String, b: String) = { - if (a == b) None - else - (parsePart(a) zip parsePart(b)) map { - case (Left(x), Left(y)) => x compareTo y - case (Left(_), Right(_)) => -1 - case (Right(_), Left(_)) => 1 - case (Right(x), Right(y)) => x compareTo y - } find (0 != _) orElse Some(a compareTo b) - } - - private def compareNumericParts(a: List[Long], b: List[Long]): Option[Int] = - (a, b) match { - case (ah :: at, bh :: bt) => - toOpt(ah compareTo bh) orElse compareNumericParts(at, bt) - case (ah :: at, Nil) => - toOpt(ah compareTo 0L) orElse compareNumericParts(at, Nil) - case (Nil, bh :: bt) => - toOpt(0L compareTo bh) orElse compareNumericParts(Nil, bt) - case (Nil, Nil) => - None - } - - private def compareParts(a: List[String], b: List[String]): Option[Int] = - (a, b) match { - case (ah :: at, bh :: bt) => - comparePart(ah, bh) orElse compareParts(at, bt) - case (_ :: _, Nil) => - Some(1) - case (Nil, _ :: _) => - Some(-1) - case (Nil, Nil) => - None - } - - def compare(x: Version, y: Version): Int = (x, y) match { - case (InvalidVersion(a), InvalidVersion(b)) => - a compareTo b - case (InvalidVersion(_), _) => - -1 - case (_, InvalidVersion(_)) => - 1 - case (ReleaseVersion(r1), ReleaseVersion(r2)) => - compareNumericParts(r1, r2) getOrElse 0 - case (ReleaseVersion(r1), PreReleaseVersion(r2, p2)) => - compareNumericParts(r1, r2) getOrElse 1 - case (ReleaseVersion(r1), PreReleaseBuildVersion(r2, p2, b2)) => - compareNumericParts(r1, r2) getOrElse 1 - case (ReleaseVersion(r1), BuildVersion(r2, b2)) => - compareNumericParts(r1, r2) getOrElse -1 - case (PreReleaseVersion(r1, p1), ReleaseVersion(r2)) => - compareNumericParts(r1, r2) getOrElse -1 - case (PreReleaseVersion(r1, p1), PreReleaseVersion(r2, p2)) => - compareNumericParts(r1, r2) orElse compareParts(p1, p2) getOrElse 0 - case (PreReleaseVersion(r1, p1), PreReleaseBuildVersion(r2, p2, b2)) => - compareNumericParts(r1, r2) orElse compareParts(p1, p2) getOrElse -1 - case (PreReleaseVersion(r1, p1), BuildVersion(r2, b2)) => - compareNumericParts(r1, r2) getOrElse -1 - case (PreReleaseBuildVersion(r1, p1, b1), ReleaseVersion(r2)) => - compareNumericParts(r1, r2) getOrElse -1 - case (PreReleaseBuildVersion(r1, p1, b1), PreReleaseVersion(r2, p2)) => - compareNumericParts(r1, r2) orElse compareParts(p1, p2) getOrElse 1 - case (PreReleaseBuildVersion(r1, p1, b1), - PreReleaseBuildVersion(r2, p2, b2)) => - compareNumericParts(r1, r2) orElse - compareParts(p1, p2) orElse - compareParts(b1, b2) getOrElse - 0 - case (PreReleaseBuildVersion(r1, p1, b1), BuildVersion(r2, b2)) => - compareNumericParts(r1, r2) getOrElse -1 - case (BuildVersion(r1, b1), ReleaseVersion(r2)) => - compareNumericParts(r1, r2) getOrElse 1 - case (BuildVersion(r1, b1), PreReleaseVersion(r2, p2)) => - compareNumericParts(r1, r2) getOrElse 1 - case (BuildVersion(r1, b1), PreReleaseBuildVersion(r2, p2, b2)) => - compareNumericParts(r1, r2) getOrElse 1 - case (BuildVersion(r1, b1), BuildVersion(r2, b2)) => - compareNumericParts(r1, r2) orElse compareParts(b1, b2) getOrElse 0 - } - -} diff --git a/scalalib/src/mill/scalalib/dependency/versions/VersionParser.scala b/scalalib/src/mill/scalalib/dependency/versions/VersionParser.scala deleted file mode 100644 index 10aebd73..00000000 --- a/scalalib/src/mill/scalalib/dependency/versions/VersionParser.scala +++ /dev/null @@ -1,30 +0,0 @@ -package mill.scalalib.dependency.versions - -import fastparse._, NoWhitespace._ - -private[dependency] object VersionParser { - - private def numberParser[_: P] = - P(CharIn("0-9").rep(1).!.map(_.toLong)) - private def numericPartParser[_: P] = - P(numberParser ~ &(CharIn(".\\-+") | End)).rep(min = 1, sep = ".") - - private def tokenParser[_: P] = - CharPred(c => c != '.' && c != '-' && c != '+').rep(1).! - private def tokenPartParser[_: P] = - tokenParser.rep(sep = CharIn(".\\-")) - - private def firstPartParser[_: P] = - P(CharIn(".\\-") ~ tokenPartParser).? - - private def secondPartParser[_: P] = - P("+" ~ tokenPartParser).? - - private def versionParser[_: P] = - P(numericPartParser ~ firstPartParser ~ secondPartParser).map { - case (a, b, c) => (a, b.getOrElse(Seq.empty), c.getOrElse(Seq.empty)) - } - - def parse(text: String): Parsed[(Seq[Long], Seq[String], Seq[String])] = - fastparse.parse(text, versionParser(_)) -} diff --git a/scalalib/src/mill/scalalib/dependency/versions/VersionsFinder.scala b/scalalib/src/mill/scalalib/dependency/versions/VersionsFinder.scala deleted file mode 100644 index a831ffc3..00000000 --- a/scalalib/src/mill/scalalib/dependency/versions/VersionsFinder.scala +++ /dev/null @@ -1,73 +0,0 @@ -package mill.scalalib.dependency.versions - -import mill.define.{BaseModule, Task} -import mill.eval.Evaluator -import mill.scalalib.dependency.metadata.MetadataLoaderFactory -import mill.scalalib.{Dep, JavaModule, Lib} -import mill.api.Ctx.{Home, Log} -import mill.util.{Loose, Strict} - -private[dependency] object VersionsFinder { - - def findVersions(ctx: Log with Home, - rootModule: BaseModule): Seq[ModuleDependenciesVersions] = { - val evaluator = - new Evaluator(ctx.home, os.pwd / 'out, os.pwd / 'out, rootModule, ctx.log) - - val javaModules = rootModule.millInternal.modules.collect { - case javaModule: JavaModule => javaModule - } - - val resolvedDependencies = resolveDependencies(evaluator, javaModules) - resolveVersions(resolvedDependencies) - } - - private def resolveDependencies(evaluator: Evaluator, - javaModules: Seq[JavaModule]) = - javaModules.map { javaModule => - val depToDependency = - eval(evaluator, javaModule.resolveCoursierDependency) - val deps = evalOrElse(evaluator, javaModule.ivyDeps, Loose.Agg.empty[Dep]) - - val (dependencies, _) = - Lib.resolveDependenciesMetadata(javaModule.repositories, - depToDependency, - deps) - - (javaModule, dependencies) - } - - private def resolveVersions(resolvedDependencies: Seq[ResolvedDependencies]) = - resolvedDependencies.map { - case (javaModule, dependencies) => - val metadataLoaders = - javaModule.repositories.flatMap(MetadataLoaderFactory(_)) - - val versions = dependencies.map { dependency => - val currentVersion = Version(dependency.version) - val allVersions = - metadataLoaders - .flatMap(_.getVersions(dependency.module)) - .toSet - DependencyVersions(dependency, currentVersion, allVersions) - } - - ModuleDependenciesVersions(javaModule, versions) - } - - private def eval[T](evaluator: Evaluator, e: Task[T]): T = - evaluator.evaluate(Strict.Agg(e)).values match { - case Seq() => throw new NoSuchElementException - case Seq(e: T) => e - } - - private def evalOrElse[T](evaluator: Evaluator, - e: Task[T], - default: => T): T = - evaluator.evaluate(Strict.Agg(e)).values match { - case Seq() => default - case Seq(e: T) => e - } - - private type ResolvedDependencies = (JavaModule, Seq[coursier.Dependency]) -} diff --git a/scalalib/src/mill/scalalib/package.scala b/scalalib/src/mill/scalalib/package.scala deleted file mode 100644 index 5a282e82..00000000 --- a/scalalib/src/mill/scalalib/package.scala +++ /dev/null @@ -1,12 +0,0 @@ -package mill - -package object scalalib { - implicit class DepSyntax(ctx: StringContext){ - def ivy(args: Any*) = Dep.parse{ - ( - ctx.parts.take(args.length).zip(args).flatMap{case (p, a) => Seq(p, a)} ++ - ctx.parts.drop(args.length) - ).mkString - } - } -} diff --git a/scalalib/src/mill/scalalib/publish/Ivy.scala b/scalalib/src/mill/scalalib/publish/Ivy.scala deleted file mode 100644 index 22e26ff6..00000000 --- a/scalalib/src/mill/scalalib/publish/Ivy.scala +++ /dev/null @@ -1,59 +0,0 @@ -package mill.scalalib.publish - -import mill.util.Loose.Agg - -import scala.xml.PrettyPrinter - -object Ivy { - - val head = "\n" - - def apply( - artifact: Artifact, - dependencies: Agg[Dependency] - ): String = { - val xml = - - - - - - - - - - - - - - - - - - - - {dependencies.map(renderDependency).toSeq} - - - val pp = new PrettyPrinter(120, 4) - head + pp.format(xml).replaceAll(">", ">") - } - - private def renderDependency(dep: Dependency) = { - if (dep.exclusions.isEmpty) - ${dep.configuration.getOrElse("default(compile)")}"} /> - else - ${dep.configuration.getOrElse("default(compile)")}"}> - {dep.exclusions.map(ex => )} - - } - - private def scopeToConf(s: Scope): String = s match { - case Scope.Compile => "compile" - case Scope.Provided => "provided" - case Scope.Test => "test" - case Scope.Runtime => "runtime" - } - -} diff --git a/scalalib/src/mill/scalalib/publish/JsonFormatters.scala b/scalalib/src/mill/scalalib/publish/JsonFormatters.scala deleted file mode 100644 index 8fc90632..00000000 --- a/scalalib/src/mill/scalalib/publish/JsonFormatters.scala +++ /dev/null @@ -1,11 +0,0 @@ -package mill.scalalib.publish - -import upickle.default.{ReadWriter => RW} - -trait JsonFormatters { - implicit lazy val artifactFormat: RW[Artifact] = upickle.default.macroRW - implicit lazy val developerFormat: RW[Developer] = upickle.default.macroRW - implicit lazy val licenseFormat: RW[License] = upickle.default.macroRW - implicit lazy val versionControlFormat: RW[VersionControl] = upickle.default.macroRW - implicit lazy val pomSettingsFormat: RW[PomSettings] = upickle.default.macroRW -} diff --git a/scalalib/src/mill/scalalib/publish/Licence.scala b/scalalib/src/mill/scalalib/publish/Licence.scala deleted file mode 100644 index 8838ef69..00000000 --- a/scalalib/src/mill/scalalib/publish/Licence.scala +++ /dev/null @@ -1,479 +0,0 @@ -package mill.scalalib.publish - -case class License( - id: String, - name: String, - url: String, - isOsiApproved: Boolean, - isFsfLibre: Boolean, - distribution: String -) - -object License { - @deprecated("use License.LicenseName (ex: License.`Apache-2.0`)", "0.1.0") - def apply(name: String, url: String): License = - License(name, name, url, false, false, "repo") - - /* - wget https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json - - ``` - val circeVersion = "0.9.1" - libraryDependencies ++= Seq( - "io.circe" %% "circe-core", - "io.circe" %% "circe-generic", - "io.circe" %% "circe-parser" - ).map(_ % circeVersion) - - import io.circe._, io.circe.generic.auto._, io.circe.parser._, io.circe.syntax._ - import java.nio.file._ - import System.{lineSeparator => nl} - case class License( - reference: String, - isDeprecatedLicenseId: Boolean, - isFsfLibre: Option[Boolean], - detailsUrl: String, - referenceNumber: String, - name: String, - licenseId: String, - seeAlso: Option[List[String]], - isOsiApproved: Boolean - ) { - def ident: String = { - val startsWithDigit = (0 to 9).map(_.toString).exists(licenseId.startsWith) - if (licenseId.contains("-") || !startsWithDigit) s"`$licenseId`" - else licenseId - } - - def syntax(identPadding: Int, namePadding: Int): String = { - val s1 = " " * (identPadding - ident.size) - val s2 = " " * (namePadding - name.size) - val ticks = if (ident == licenseId) 2 else 0 - val s3 = " " * (identPadding - ticks - ident.size) - val s4 = if(isOsiApproved) " " else "" - s"""val ${ident}${s1} = spdx(\"\"\"$name\"\"\",$s2 "$licenseId", $s3 $isOsiApproved, $s4 ${isFsfLibre.getOrElse(false)})""" - } - } - - - case class Data(licenses: List[License]) - - val json = new String(Files.readAllBytes(Paths.get("data.json"))) - - val licences = decode[Data](json).right.get.licenses - - val identPadding = licences.map(_.licenseId.size + 2).max - val namePadding = licences.map(_.name.size).max - - val output = licences.map(license => license.syntax(identPadding, namePadding)).mkString(nl) - Files.write(Paths.get("out.scala"), output.getBytes("utf-8")) - */ - val `0BSD` = spdx("BSD Zero Clause License", "0BSD", false, false) - val AAL = spdx("Attribution Assurance License", "AAL", true, false) - val Abstyles = spdx("Abstyles License", "Abstyles", false, false) - val `Adobe-2006` = spdx("Adobe Systems Incorporated Source Code License Agreement", "Adobe-2006", false, false) - val `Adobe-Glyph` = spdx("Adobe Glyph List License", "Adobe-Glyph", false, false) - val ADSL = spdx("Amazon Digital Services License", "ADSL", false, false) - val `AFL-1.1` = spdx("Academic Free License v1.1", "AFL-1.1", true, true) - val `AFL-1.2` = spdx("Academic Free License v1.2", "AFL-1.2", true, true) - val `AFL-2.0` = spdx("Academic Free License v2.0", "AFL-2.0", true, true) - val `AFL-2.1` = spdx("Academic Free License v2.1", "AFL-2.1", true, true) - val `AFL-3.0` = spdx("Academic Free License v3.0", "AFL-3.0", true, true) - val Afmparse = spdx("Afmparse License", "Afmparse", false, false) - val `AGPL-1.0` = spdx("Affero General Public License v1.0", "AGPL-1.0", false, true) - val `AGPL-3.0-only` = spdx("GNU Affero General Public License v3.0 only", "AGPL-3.0-only", true, false) - val `AGPL-3.0-or-later` = spdx("GNU Affero General Public License v3.0 or later", "AGPL-3.0-or-later", true, false) - val Aladdin = spdx("Aladdin Free Public License", "Aladdin", false, false) - val AMDPLPA = spdx("AMD's plpa_map.c License", "AMDPLPA", false, false) - val AML = spdx("Apple MIT License", "AML", false, false) - val AMPAS = spdx("Academy of Motion Picture Arts and Sciences BSD", "AMPAS", false, false) - val `ANTLR-PD` = spdx("ANTLR Software Rights Notice", "ANTLR-PD", false, false) - val `Apache-1.0` = spdx("Apache License 1.0", "Apache-1.0", false, true) - val `Apache-1.1` = spdx("Apache License 1.1", "Apache-1.1", true, true) - val `Apache-2.0` = spdx("Apache License 2.0", "Apache-2.0", true, true) - val APAFML = spdx("Adobe Postscript AFM License", "APAFML", false, false) - val `APL-1.0` = spdx("Adaptive Public License 1.0", "APL-1.0", true, false) - val `APSL-1.0` = spdx("Apple Public Source License 1.0", "APSL-1.0", true, false) - val `APSL-1.1` = spdx("Apple Public Source License 1.1", "APSL-1.1", true, false) - val `APSL-1.2` = spdx("Apple Public Source License 1.2", "APSL-1.2", true, false) - val `APSL-2.0` = spdx("Apple Public Source License 2.0", "APSL-2.0", true, true) - val `Artistic-1.0-cl8` = spdx("Artistic License 1.0 w/clause 8", "Artistic-1.0-cl8", true, false) - val `Artistic-1.0-Perl` = spdx("Artistic License 1.0 (Perl)", "Artistic-1.0-Perl", true, false) - val `Artistic-1.0` = spdx("Artistic License 1.0", "Artistic-1.0", true, false) - val `Artistic-2.0` = spdx("Artistic License 2.0", "Artistic-2.0", true, true) - val Bahyph = spdx("Bahyph License", "Bahyph", false, false) - val Barr = spdx("Barr License", "Barr", false, false) - val Beerware = spdx("Beerware License", "Beerware", false, false) - val `BitTorrent-1.0` = spdx("BitTorrent Open Source License v1.0", "BitTorrent-1.0", false, false) - val `BitTorrent-1.1` = spdx("BitTorrent Open Source License v1.1", "BitTorrent-1.1", false, true) - val Borceux = spdx("Borceux license", "Borceux", false, false) - val `BSD-1-Clause` = spdx("BSD 1-Clause License", "BSD-1-Clause", false, false) - val `BSD-2-Clause-FreeBSD` = spdx("BSD 2-Clause FreeBSD License", "BSD-2-Clause-FreeBSD", false, true) - val `BSD-2-Clause-NetBSD` = spdx("BSD 2-Clause NetBSD License", "BSD-2-Clause-NetBSD", false, false) - val `BSD-2-Clause-Patent` = spdx("BSD-2-Clause Plus Patent License", "BSD-2-Clause-Patent", true, false) - val `BSD-2-Clause` = spdx("BSD 2-Clause \"Simplified\" License", "BSD-2-Clause", true, false) - val `BSD-3-Clause-Attribution` = spdx("BSD with attribution", "BSD-3-Clause-Attribution", false, false) - val `BSD-3-Clause-Clear` = spdx("BSD 3-Clause Clear License", "BSD-3-Clause-Clear", false, true) - val `BSD-3-Clause-LBNL` = spdx("Lawrence Berkeley National Labs BSD variant license", "BSD-3-Clause-LBNL", false, false) - val `BSD-3-Clause-No-Nuclear-License-2014` = spdx("BSD 3-Clause No Nuclear License 2014", "BSD-3-Clause-No-Nuclear-License-2014", false, false) - val `BSD-3-Clause-No-Nuclear-License` = spdx("BSD 3-Clause No Nuclear License", "BSD-3-Clause-No-Nuclear-License", false, false) - val `BSD-3-Clause-No-Nuclear-Warranty` = spdx("BSD 3-Clause No Nuclear Warranty", "BSD-3-Clause-No-Nuclear-Warranty", false, false) - val `BSD-3-Clause` = spdx("BSD 3-Clause \"New\" or \"Revised\" License", "BSD-3-Clause", true, true) - val `BSD-4-Clause-UC` = spdx("BSD-4-Clause (University of California-Specific)", "BSD-4-Clause-UC", false, false) - val `BSD-4-Clause` = spdx("BSD 4-Clause \"Original\" or \"Old\" License", "BSD-4-Clause", false, true) - val `BSD-Protection` = spdx("BSD Protection License", "BSD-Protection", false, false) - val `BSD-Source-Code` = spdx("BSD Source Code Attribution", "BSD-Source-Code", false, false) - val `BSL-1.0` = spdx("Boost Software License 1.0", "BSL-1.0", true, true) - val `bzip2-1.0.5` = spdx("bzip2 and libbzip2 License v1.0.5", "bzip2-1.0.5", false, false) - val `bzip2-1.0.6` = spdx("bzip2 and libbzip2 License v1.0.6", "bzip2-1.0.6", false, false) - val Caldera = spdx("Caldera License", "Caldera", false, false) - val `CATOSL-1.1` = spdx("Computer Associates Trusted Open Source License 1.1", "CATOSL-1.1", true, false) - val `CC-BY-1.0` = spdx("Creative Commons Attribution 1.0", "CC-BY-1.0", false, false) - val `CC-BY-2.0` = spdx("Creative Commons Attribution 2.0", "CC-BY-2.0", false, false) - val `CC-BY-2.5` = spdx("Creative Commons Attribution 2.5", "CC-BY-2.5", false, false) - val `CC-BY-3.0` = spdx("Creative Commons Attribution 3.0", "CC-BY-3.0", false, false) - val `CC-BY-4.0` = spdx("Creative Commons Attribution 4.0", "CC-BY-4.0", false, true) - val `CC-BY-NC-1.0` = spdx("Creative Commons Attribution Non Commercial 1.0", "CC-BY-NC-1.0", false, false) - val `CC-BY-NC-2.0` = spdx("Creative Commons Attribution Non Commercial 2.0", "CC-BY-NC-2.0", false, false) - val `CC-BY-NC-2.5` = spdx("Creative Commons Attribution Non Commercial 2.5", "CC-BY-NC-2.5", false, false) - val `CC-BY-NC-3.0` = spdx("Creative Commons Attribution Non Commercial 3.0", "CC-BY-NC-3.0", false, false) - val `CC-BY-NC-4.0` = spdx("Creative Commons Attribution Non Commercial 4.0", "CC-BY-NC-4.0", false, false) - val `CC-BY-NC-ND-1.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 1.0", "CC-BY-NC-ND-1.0", false, false) - val `CC-BY-NC-ND-2.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 2.0", "CC-BY-NC-ND-2.0", false, false) - val `CC-BY-NC-ND-2.5` = spdx("Creative Commons Attribution Non Commercial No Derivatives 2.5", "CC-BY-NC-ND-2.5", false, false) - val `CC-BY-NC-ND-3.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 3.0", "CC-BY-NC-ND-3.0", false, false) - val `CC-BY-NC-ND-4.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 4.0", "CC-BY-NC-ND-4.0", false, false) - val `CC-BY-NC-SA-1.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 1.0", "CC-BY-NC-SA-1.0", false, false) - val `CC-BY-NC-SA-2.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 2.0", "CC-BY-NC-SA-2.0", false, false) - val `CC-BY-NC-SA-2.5` = spdx("Creative Commons Attribution Non Commercial Share Alike 2.5", "CC-BY-NC-SA-2.5", false, false) - val `CC-BY-NC-SA-3.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 3.0", "CC-BY-NC-SA-3.0", false, false) - val `CC-BY-NC-SA-4.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 4.0", "CC-BY-NC-SA-4.0", false, false) - val `CC-BY-ND-1.0` = spdx("Creative Commons Attribution No Derivatives 1.0", "CC-BY-ND-1.0", false, false) - val `CC-BY-ND-2.0` = spdx("Creative Commons Attribution No Derivatives 2.0", "CC-BY-ND-2.0", false, false) - val `CC-BY-ND-2.5` = spdx("Creative Commons Attribution No Derivatives 2.5", "CC-BY-ND-2.5", false, false) - val `CC-BY-ND-3.0` = spdx("Creative Commons Attribution No Derivatives 3.0", "CC-BY-ND-3.0", false, false) - val `CC-BY-ND-4.0` = spdx("Creative Commons Attribution No Derivatives 4.0", "CC-BY-ND-4.0", false, false) - val `CC-BY-SA-1.0` = spdx("Creative Commons Attribution Share Alike 1.0", "CC-BY-SA-1.0", false, false) - val `CC-BY-SA-2.0` = spdx("Creative Commons Attribution Share Alike 2.0", "CC-BY-SA-2.0", false, false) - val `CC-BY-SA-2.5` = spdx("Creative Commons Attribution Share Alike 2.5", "CC-BY-SA-2.5", false, false) - val `CC-BY-SA-3.0` = spdx("Creative Commons Attribution Share Alike 3.0", "CC-BY-SA-3.0", false, false) - val `CC-BY-SA-4.0` = spdx("Creative Commons Attribution Share Alike 4.0", "CC-BY-SA-4.0", false, true) - val `CC0-1.0` = spdx("Creative Commons Zero v1.0 Universal", "CC0-1.0", false, true) - val `CDDL-1.0` = spdx("Common Development and Distribution License 1.0", "CDDL-1.0", true, true) - val `CDDL-1.1` = spdx("Common Development and Distribution License 1.1", "CDDL-1.1", false, false) - val `CDLA-Permissive-1.0` = spdx("Community Data License Agreement Permissive 1.0", "CDLA-Permissive-1.0", false, false) - val `CDLA-Sharing-1.0` = spdx("Community Data License Agreement Sharing 1.0", "CDLA-Sharing-1.0", false, false) - val `CECILL-1.0` = spdx("CeCILL Free Software License Agreement v1.0", "CECILL-1.0", false, false) - val `CECILL-1.1` = spdx("CeCILL Free Software License Agreement v1.1", "CECILL-1.1", false, false) - val `CECILL-2.0` = spdx("CeCILL Free Software License Agreement v2.0", "CECILL-2.0", false, true) - val `CECILL-2.1` = spdx("CeCILL Free Software License Agreement v2.1", "CECILL-2.1", true, false) - val `CECILL-B` = spdx("CeCILL-B Free Software License Agreement", "CECILL-B", false, true) - val `CECILL-C` = spdx("CeCILL-C Free Software License Agreement", "CECILL-C", false, true) - val ClArtistic = spdx("Clarified Artistic License", "ClArtistic", false, true) - val `CNRI-Jython` = spdx("CNRI Jython License", "CNRI-Jython", false, false) - val `CNRI-Python-GPL-Compatible` = spdx("CNRI Python Open Source GPL Compatible License Agreement", "CNRI-Python-GPL-Compatible", false, false) - val `CNRI-Python` = spdx("CNRI Python License", "CNRI-Python", true, false) - val `Condor-1.1` = spdx("Condor Public License v1.1", "Condor-1.1", false, true) - val `CPAL-1.0` = spdx("Common Public Attribution License 1.0", "CPAL-1.0", true, true) - val `CPL-1.0` = spdx("Common Public License 1.0", "CPL-1.0", true, true) - val `CPOL-1.02` = spdx("Code Project Open License 1.02", "CPOL-1.02", false, false) - val Crossword = spdx("Crossword License", "Crossword", false, false) - val CrystalStacker = spdx("CrystalStacker License", "CrystalStacker", false, false) - val `CUA-OPL-1.0` = spdx("CUA Office Public License v1.0", "CUA-OPL-1.0", true, false) - val Cube = spdx("Cube License", "Cube", false, false) - val curl = spdx("curl License", "curl", false, false) - val `D-FSL-1.0` = spdx("Deutsche Freie Software Lizenz", "D-FSL-1.0", false, false) - val diffmark = spdx("diffmark license", "diffmark", false, false) - val DOC = spdx("DOC License", "DOC", false, false) - val Dotseqn = spdx("Dotseqn License", "Dotseqn", false, false) - val DSDP = spdx("DSDP License", "DSDP", false, false) - val dvipdfm = spdx("dvipdfm License", "dvipdfm", false, false) - val `ECL-1.0` = spdx("Educational Community License v1.0", "ECL-1.0", true, false) - val `ECL-2.0` = spdx("Educational Community License v2.0", "ECL-2.0", true, true) - val `EFL-1.0` = spdx("Eiffel Forum License v1.0", "EFL-1.0", true, false) - val `EFL-2.0` = spdx("Eiffel Forum License v2.0", "EFL-2.0", true, true) - val eGenix = spdx("eGenix.com Public License 1.1.0", "eGenix", false, false) - val Entessa = spdx("Entessa Public License v1.0", "Entessa", true, false) - val `EPL-1.0` = spdx("Eclipse Public License 1.0", "EPL-1.0", true, true) - val `EPL-2.0` = spdx("Eclipse Public License 2.0", "EPL-2.0", true, true) - val `ErlPL-1.1` = spdx("Erlang Public License v1.1", "ErlPL-1.1", false, false) - val EUDatagrid = spdx("EU DataGrid Software License", "EUDatagrid", true, true) - val `EUPL-1.0` = spdx("European Union Public License 1.0", "EUPL-1.0", false, false) - val `EUPL-1.1` = spdx("European Union Public License 1.1", "EUPL-1.1", true, true) - val `EUPL-1.2` = spdx("European Union Public License 1.2", "EUPL-1.2", true, false) - val Eurosym = spdx("Eurosym License", "Eurosym", false, false) - val Fair = spdx("Fair License", "Fair", true, false) - val `Frameworx-1.0` = spdx("Frameworx Open License 1.0", "Frameworx-1.0", true, false) - val FreeImage = spdx("FreeImage Public License v1.0", "FreeImage", false, false) - val FSFAP = spdx("FSF All Permissive License", "FSFAP", false, true) - val FSFUL = spdx("FSF Unlimited License", "FSFUL", false, false) - val FSFULLR = spdx("FSF Unlimited License (with License Retention)", "FSFULLR", false, false) - val FTL = spdx("Freetype Project License", "FTL", false, true) - val `GFDL-1.1-only` = spdx("GNU Free Documentation License v1.1 only", "GFDL-1.1-only", false, false) - val `GFDL-1.1-or-later` = spdx("GNU Free Documentation License v1.1 or later", "GFDL-1.1-or-later", false, false) - val `GFDL-1.2-only` = spdx("GNU Free Documentation License v1.2 only", "GFDL-1.2-only", false, false) - val `GFDL-1.2-or-later` = spdx("GNU Free Documentation License v1.2 or later", "GFDL-1.2-or-later", false, false) - val `GFDL-1.3-only` = spdx("GNU Free Documentation License v1.3 only", "GFDL-1.3-only", false, false) - val `GFDL-1.3-or-later` = spdx("GNU Free Documentation License v1.3 or later", "GFDL-1.3-or-later", false, false) - val Giftware = spdx("Giftware License", "Giftware", false, false) - val GL2PS = spdx("GL2PS License", "GL2PS", false, false) - val Glide = spdx("3dfx Glide License", "Glide", false, false) - val Glulxe = spdx("Glulxe License", "Glulxe", false, false) - val gnuplot = spdx("gnuplot License", "gnuplot", false, true) - val `GPL-1.0-only` = spdx("GNU General Public License v1.0 only", "GPL-1.0-only", false, false) - val `GPL-1.0-or-later` = spdx("GNU General Public License v1.0 or later", "GPL-1.0-or-later", false, false) - val `GPL-2.0-only` = spdx("GNU General Public License v2.0 only", "GPL-2.0-only", true, false) - val `GPL-2.0-or-later` = spdx("GNU General Public License v2.0 or later", "GPL-2.0-or-later", true, false) - val `GPL-3.0-only` = spdx("GNU General Public License v3.0 only", "GPL-3.0-only", true, false) - val `GPL-3.0-or-later` = spdx("GNU General Public License v3.0 or later", "GPL-3.0-or-later", true, false) - val `gSOAP-1.3b` = spdx("gSOAP Public License v1.3b", "gSOAP-1.3b", false, false) - val HaskellReport = spdx("Haskell Language Report License", "HaskellReport", false, false) - val HPND = spdx("Historical Permission Notice and Disclaimer", "HPND", true, true) - val `IBM-pibs` = spdx("IBM PowerPC Initialization and Boot Software", "IBM-pibs", false, false) - val ICU = spdx("ICU License", "ICU", false, false) - val IJG = spdx("Independent JPEG Group License", "IJG", false, true) - val ImageMagick = spdx("ImageMagick License", "ImageMagick", false, false) - val iMatix = spdx("iMatix Standard Function Library Agreement", "iMatix", false, true) - val Imlib2 = spdx("Imlib2 License", "Imlib2", false, true) - val `Info-ZIP` = spdx("Info-ZIP License", "Info-ZIP", false, false) - val `Intel-ACPI` = spdx("Intel ACPI Software License Agreement", "Intel-ACPI", false, false) - val Intel = spdx("Intel Open Source License", "Intel", true, true) - val `Interbase-1.0` = spdx("Interbase Public License v1.0", "Interbase-1.0", false, false) - val IPA = spdx("IPA Font License", "IPA", true, true) - val `IPL-1.0` = spdx("IBM Public License v1.0", "IPL-1.0", true, true) - val ISC = spdx("ISC License", "ISC", true, true) - val `JasPer-2.0` = spdx("JasPer License", "JasPer-2.0", false, false) - val JSON = spdx("JSON License", "JSON", false, false) - val `LAL-1.2` = spdx("Licence Art Libre 1.2", "LAL-1.2", false, false) - val `LAL-1.3` = spdx("Licence Art Libre 1.3", "LAL-1.3", false, false) - val Latex2e = spdx("Latex2e License", "Latex2e", false, false) - val Leptonica = spdx("Leptonica License", "Leptonica", false, false) - val `LGPL-2.0-only` = spdx("GNU Library General Public License v2 only", "LGPL-2.0-only", true, false) - val `LGPL-2.0-or-later` = spdx("GNU Library General Public License v2 or later", "LGPL-2.0-or-later", true, false) - val `LGPL-2.1-only` = spdx("GNU Lesser General Public License v2.1 only", "LGPL-2.1-only", true, false) - val `LGPL-2.1-or-later` = spdx("GNU Lesser General Public License v2.1 or later", "LGPL-2.1-or-later", true, false) - val `LGPL-3.0-only` = spdx("GNU Lesser General Public License v3.0 only", "LGPL-3.0-only", true, false) - val `LGPL-3.0-or-later` = spdx("GNU Lesser General Public License v3.0 or later", "LGPL-3.0-or-later", true, false) - val LGPLLR = spdx("Lesser General Public License For Linguistic Resources", "LGPLLR", false, false) - val Libpng = spdx("libpng License", "Libpng", false, false) - val libtiff = spdx("libtiff License", "libtiff", false, false) - val `LiLiQ-P-1.1` = spdx("Licence Libre du Québec – Permissive version 1.1", "LiLiQ-P-1.1", true, false) - val `LiLiQ-R-1.1` = spdx("Licence Libre du Québec – Réciprocité version 1.1", "LiLiQ-R-1.1", true, false) - val `LiLiQ-Rplus-1.1` = spdx("Licence Libre du Québec – Réciprocité forte version 1.1", "LiLiQ-Rplus-1.1", true, false) - val `LPL-1.0` = spdx("Lucent Public License Version 1.0", "LPL-1.0", true, false) - val `LPL-1.02` = spdx("Lucent Public License v1.02", "LPL-1.02", true, true) - val `LPPL-1.0` = spdx("LaTeX Project Public License v1.0", "LPPL-1.0", false, false) - val `LPPL-1.1` = spdx("LaTeX Project Public License v1.1", "LPPL-1.1", false, false) - val `LPPL-1.2` = spdx("LaTeX Project Public License v1.2", "LPPL-1.2", false, true) - val `LPPL-1.3a` = spdx("LaTeX Project Public License v1.3a", "LPPL-1.3a", false, true) - val `LPPL-1.3c` = spdx("LaTeX Project Public License v1.3c", "LPPL-1.3c", true, false) - val MakeIndex = spdx("MakeIndex License", "MakeIndex", false, false) - val MirOS = spdx("MirOS License", "MirOS", true, false) - val `MIT-advertising` = spdx("Enlightenment License (e16)", "MIT-advertising", false, false) - val `MIT-CMU` = spdx("CMU License", "MIT-CMU", false, false) - val `MIT-enna` = spdx("enna License", "MIT-enna", false, false) - val `MIT-feh` = spdx("feh License", "MIT-feh", false, false) - val MIT = spdx("MIT License", "MIT", true, true) - val MITNFA = spdx("MIT +no-false-attribs license", "MITNFA", false, false) - val Motosoto = spdx("Motosoto License", "Motosoto", true, false) - val mpich2 = spdx("mpich2 License", "mpich2", false, false) - val `MPL-1.0` = spdx("Mozilla Public License 1.0", "MPL-1.0", true, false) - val `MPL-1.1` = spdx("Mozilla Public License 1.1", "MPL-1.1", true, true) - val `MPL-2.0-no-copyleft-exception` = spdx("Mozilla Public License 2.0 (no copyleft exception)", "MPL-2.0-no-copyleft-exception", true, false) - val `MPL-2.0` = spdx("Mozilla Public License 2.0", "MPL-2.0", true, true) - val `MS-PL` = spdx("Microsoft Public License", "MS-PL", true, true) - val `MS-RL` = spdx("Microsoft Reciprocal License", "MS-RL", true, true) - val MTLL = spdx("Matrix Template Library License", "MTLL", false, false) - val Multics = spdx("Multics License", "Multics", true, false) - val Mup = spdx("Mup License", "Mup", false, false) - val `NASA-1.3` = spdx("NASA Open Source Agreement 1.3", "NASA-1.3", true, false) - val Naumen = spdx("Naumen Public License", "Naumen", true, false) - val `NBPL-1.0` = spdx("Net Boolean Public License v1", "NBPL-1.0", false, false) - val NCSA = spdx("University of Illinois/NCSA Open Source License", "NCSA", true, true) - val `Net-SNMP` = spdx("Net-SNMP License", "Net-SNMP", false, false) - val NetCDF = spdx("NetCDF license", "NetCDF", false, false) - val Newsletr = spdx("Newsletr License", "Newsletr", false, false) - val NGPL = spdx("Nethack General Public License", "NGPL", true, false) - val `NLOD-1.0` = spdx("Norwegian Licence for Open Government Data", "NLOD-1.0", false, false) - val NLPL = spdx("No Limit Public License", "NLPL", false, false) - val Nokia = spdx("Nokia Open Source License", "Nokia", true, true) - val NOSL = spdx("Netizen Open Source License", "NOSL", false, true) - val Noweb = spdx("Noweb License", "Noweb", false, false) - val `NPL-1.0` = spdx("Netscape Public License v1.0", "NPL-1.0", false, true) - val `NPL-1.1` = spdx("Netscape Public License v1.1", "NPL-1.1", false, true) - val `NPOSL-3.0` = spdx("Non-Profit Open Software License 3.0", "NPOSL-3.0", true, false) - val NRL = spdx("NRL License", "NRL", false, false) - val NTP = spdx("NTP License", "NTP", true, false) - val `OCCT-PL` = spdx("Open CASCADE Technology Public License", "OCCT-PL", false, false) - val `OCLC-2.0` = spdx("OCLC Research Public License 2.0", "OCLC-2.0", true, false) - val `ODbL-1.0` = spdx("ODC Open Database License v1.0", "ODbL-1.0", false, true) - val `OFL-1.0` = spdx("SIL Open Font License 1.0", "OFL-1.0", false, false) - val `OFL-1.1` = spdx("SIL Open Font License 1.1", "OFL-1.1", true, true) - val OGTSL = spdx("Open Group Test Suite License", "OGTSL", true, false) - val `OLDAP-1.1` = spdx("Open LDAP Public License v1.1", "OLDAP-1.1", false, false) - val `OLDAP-1.2` = spdx("Open LDAP Public License v1.2", "OLDAP-1.2", false, false) - val `OLDAP-1.3` = spdx("Open LDAP Public License v1.3", "OLDAP-1.3", false, false) - val `OLDAP-1.4` = spdx("Open LDAP Public License v1.4", "OLDAP-1.4", false, false) - val `OLDAP-2.0.1` = spdx("Open LDAP Public License v2.0.1", "OLDAP-2.0.1", false, false) - val `OLDAP-2.0` = spdx("Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", "OLDAP-2.0", false, false) - val `OLDAP-2.1` = spdx("Open LDAP Public License v2.1", "OLDAP-2.1", false, false) - val `OLDAP-2.2.1` = spdx("Open LDAP Public License v2.2.1", "OLDAP-2.2.1", false, false) - val `OLDAP-2.2.2` = spdx("Open LDAP Public License 2.2.2", "OLDAP-2.2.2", false, false) - val `OLDAP-2.2` = spdx("Open LDAP Public License v2.2", "OLDAP-2.2", false, false) - val `OLDAP-2.3` = spdx("Open LDAP Public License v2.3", "OLDAP-2.3", false, true) - val `OLDAP-2.4` = spdx("Open LDAP Public License v2.4", "OLDAP-2.4", false, false) - val `OLDAP-2.5` = spdx("Open LDAP Public License v2.5", "OLDAP-2.5", false, false) - val `OLDAP-2.6` = spdx("Open LDAP Public License v2.6", "OLDAP-2.6", false, false) - val `OLDAP-2.7` = spdx("Open LDAP Public License v2.7", "OLDAP-2.7", false, true) - val `OLDAP-2.8` = spdx("Open LDAP Public License v2.8", "OLDAP-2.8", false, false) - val OML = spdx("Open Market License", "OML", false, false) - val OpenSSL = spdx("OpenSSL License", "OpenSSL", false, true) - val `OPL-1.0` = spdx("Open Public License v1.0", "OPL-1.0", false, false) - val `OSET-PL-2.1` = spdx("OSET Public License version 2.1", "OSET-PL-2.1", true, false) - val `OSL-1.0` = spdx("Open Software License 1.0", "OSL-1.0", true, true) - val `OSL-1.1` = spdx("Open Software License 1.1", "OSL-1.1", false, true) - val `OSL-2.0` = spdx("Open Software License 2.0", "OSL-2.0", true, true) - val `OSL-2.1` = spdx("Open Software License 2.1", "OSL-2.1", true, true) - val `OSL-3.0` = spdx("Open Software License 3.0", "OSL-3.0", true, true) - val `PDDL-1.0` = spdx("ODC Public Domain Dedication & License 1.0", "PDDL-1.0", false, false) - val `PHP-3.0` = spdx("PHP License v3.0", "PHP-3.0", true, false) - val `PHP-3.01` = spdx("PHP License v3.01", "PHP-3.01", false, true) - val Plexus = spdx("Plexus Classworlds License", "Plexus", false, false) - val PostgreSQL = spdx("PostgreSQL License", "PostgreSQL", true, false) - val psfrag = spdx("psfrag License", "psfrag", false, false) - val psutils = spdx("psutils License", "psutils", false, false) - val `Python-2.0` = spdx("Python License 2.0", "Python-2.0", true, true) - val Qhull = spdx("Qhull License", "Qhull", false, false) - val `QPL-1.0` = spdx("Q Public License 1.0", "QPL-1.0", true, true) - val Rdisc = spdx("Rdisc License", "Rdisc", false, false) - val `RHeCos-1.1` = spdx("Red Hat eCos Public License v1.1", "RHeCos-1.1", false, false) - val `RPL-1.1` = spdx("Reciprocal Public License 1.1", "RPL-1.1", true, false) - val `RPL-1.5` = spdx("Reciprocal Public License 1.5", "RPL-1.5", true, false) - val `RPSL-1.0` = spdx("RealNetworks Public Source License v1.0", "RPSL-1.0", true, true) - val `RSA-MD` = spdx("RSA Message-Digest License ", "RSA-MD", false, false) - val RSCPL = spdx("Ricoh Source Code Public License", "RSCPL", true, false) - val Ruby = spdx("Ruby License", "Ruby", false, true) - val `SAX-PD` = spdx("Sax Public Domain Notice", "SAX-PD", false, false) - val Saxpath = spdx("Saxpath License", "Saxpath", false, false) - val SCEA = spdx("SCEA Shared Source License", "SCEA", false, false) - val Sendmail = spdx("Sendmail License", "Sendmail", false, false) - val `SGI-B-1.0` = spdx("SGI Free Software License B v1.0", "SGI-B-1.0", false, false) - val `SGI-B-1.1` = spdx("SGI Free Software License B v1.1", "SGI-B-1.1", false, false) - val `SGI-B-2.0` = spdx("SGI Free Software License B v2.0", "SGI-B-2.0", false, true) - val `SimPL-2.0` = spdx("Simple Public License 2.0", "SimPL-2.0", true, false) - val `SISSL-1.2` = spdx("Sun Industry Standards Source License v1.2", "SISSL-1.2", false, false) - val SISSL = spdx("Sun Industry Standards Source License v1.1", "SISSL", true, false) - val Sleepycat = spdx("Sleepycat License", "Sleepycat", true, true) - val SMLNJ = spdx("Standard ML of New Jersey License", "SMLNJ", false, true) - val SMPPL = spdx("Secure Messaging Protocol Public License", "SMPPL", false, false) - val SNIA = spdx("SNIA Public License 1.1", "SNIA", false, false) - val `Spencer-86` = spdx("Spencer License 86", "Spencer-86", false, false) - val `Spencer-94` = spdx("Spencer License 94", "Spencer-94", false, false) - val `Spencer-99` = spdx("Spencer License 99", "Spencer-99", false, false) - val `SPL-1.0` = spdx("Sun Public License v1.0", "SPL-1.0", true, true) - val `SugarCRM-1.1.3` = spdx("SugarCRM Public License v1.1.3", "SugarCRM-1.1.3", false, false) - val SWL = spdx("Scheme Widget Library (SWL) Software License Agreement", "SWL", false, false) - val TCL = spdx("TCL/TK License", "TCL", false, false) - val `TCP-wrappers` = spdx("TCP Wrappers License", "TCP-wrappers", false, false) - val TMate = spdx("TMate Open Source License", "TMate", false, false) - val `TORQUE-1.1` = spdx("TORQUE v2.5+ Software License v1.1", "TORQUE-1.1", false, false) - val TOSL = spdx("Trusster Open Source License", "TOSL", false, false) - val `Unicode-DFS-2015` = spdx("Unicode License Agreement - Data Files and Software (2015)", "Unicode-DFS-2015", false, false) - val `Unicode-DFS-2016` = spdx("Unicode License Agreement - Data Files and Software (2016)", "Unicode-DFS-2016", false, false) - val `Unicode-TOU` = spdx("Unicode Terms of Use", "Unicode-TOU", false, false) - val Unlicense = spdx("The Unlicense", "Unlicense", false, true) - val `UPL-1.0` = spdx("Universal Permissive License v1.0", "UPL-1.0", true, true) - val Vim = spdx("Vim License", "Vim", false, true) - val VOSTROM = spdx("VOSTROM Public License for Open Source", "VOSTROM", false, false) - val `VSL-1.0` = spdx("Vovida Software License v1.0", "VSL-1.0", true, false) - val `W3C-19980720` = spdx("W3C Software Notice and License (1998-07-20)", "W3C-19980720", false, false) - val `W3C-20150513` = spdx("W3C Software Notice and Document License (2015-05-13)", "W3C-20150513", false, false) - val W3C = spdx("W3C Software Notice and License (2002-12-31)", "W3C", true, true) - val `Watcom-1.0` = spdx("Sybase Open Watcom Public License 1.0", "Watcom-1.0", true, false) - val Wsuipa = spdx("Wsuipa License", "Wsuipa", false, false) - val WTFPL = spdx("Do What The F*ck You Want To Public License", "WTFPL", false, true) - val X11 = spdx("X11 License", "X11", false, true) - val Xerox = spdx("Xerox License", "Xerox", false, false) - val `XFree86-1.1` = spdx("XFree86 License 1.1", "XFree86-1.1", false, true) - val xinetd = spdx("xinetd License", "xinetd", false, true) - val Xnet = spdx("X.Net License", "Xnet", true, false) - val xpp = spdx("XPP License", "xpp", false, false) - val XSkat = spdx("XSkat License", "XSkat", false, false) - val `YPL-1.0` = spdx("Yahoo! Public License v1.0", "YPL-1.0", false, false) - val `YPL-1.1` = spdx("Yahoo! Public License v1.1", "YPL-1.1", false, true) - val Zed = spdx("Zed License", "Zed", false, false) - val `Zend-2.0` = spdx("Zend License v2.0", "Zend-2.0", false, true) - val `Zimbra-1.3` = spdx("Zimbra Public License v1.3", "Zimbra-1.3", false, true) - val `Zimbra-1.4` = spdx("Zimbra Public License v1.4", "Zimbra-1.4", false, false) - val `zlib-acknowledgement` = spdx("zlib/libpng License with Acknowledgement", "zlib-acknowledgement", false, false) - val Zlib = spdx("zlib License", "Zlib", true, true) - val `ZPL-1.1` = spdx("Zope Public License 1.1", "ZPL-1.1", false, false) - val `ZPL-2.0` = spdx("Zope Public License 2.0", "ZPL-2.0", true, true) - val `ZPL-2.1` = spdx("Zope Public License 2.1", "ZPL-2.1", false, true) - val `AGPL-3.0` = spdx("GNU Affero General Public License v3.0", "AGPL-3.0", true, false) - val `eCos-2.0` = spdx("eCos license version 2.0", "eCos-2.0", false, false) - val `GFDL-1.1` = spdx("GNU Free Documentation License v1.1", "GFDL-1.1", false, false) - val `GFDL-1.2` = spdx("GNU Free Documentation License v1.2", "GFDL-1.2", false, false) - val `GFDL-1.3` = spdx("GNU Free Documentation License v1.3", "GFDL-1.3", false, false) - val `GPL-1.0+` = spdx("GNU General Public License v1.0 or later", "GPL-1.0+", false, false) - val `GPL-1.0` = spdx("GNU General Public License v1.0 only", "GPL-1.0", false, false) - val `GPL-2.0+` = spdx("GNU General Public License v2.0 or later", "GPL-2.0+", true, false) - val `GPL-2.0-with-autoconf-exception` = spdx("GNU General Public License v2.0 w/Autoconf exception", "GPL-2.0-with-autoconf-exception", false, false) - val `GPL-2.0-with-bison-exception` = spdx("GNU General Public License v2.0 w/Bison exception", "GPL-2.0-with-bison-exception", false, false) - val `GPL-2.0-with-classpath-exception` = spdx("GNU General Public License v2.0 w/Classpath exception", "GPL-2.0-with-classpath-exception", false, false) - val `GPL-2.0-with-font-exception` = spdx("GNU General Public License v2.0 w/Font exception", "GPL-2.0-with-font-exception", false, false) - val `GPL-2.0-with-GCC-exception` = spdx("GNU General Public License v2.0 w/GCC Runtime Library exception", "GPL-2.0-with-GCC-exception", false, false) - val `GPL-2.0` = spdx("GNU General Public License v2.0 only", "GPL-2.0", true, false) - val `GPL-3.0+` = spdx("GNU General Public License v3.0 or later", "GPL-3.0+", true, false) - val `GPL-3.0-with-autoconf-exception` = spdx("GNU General Public License v3.0 w/Autoconf exception", "GPL-3.0-with-autoconf-exception", false, false) - val `GPL-3.0-with-GCC-exception` = spdx("GNU General Public License v3.0 w/GCC Runtime Library exception", "GPL-3.0-with-GCC-exception", true, false) - val `GPL-3.0` = spdx("GNU General Public License v3.0 only", "GPL-3.0", true, false) - val `LGPL-2.0+` = spdx("GNU Library General Public License v2 or later", "LGPL-2.0+", true, false) - val `LGPL-2.0` = spdx("GNU Library General Public License v2 only", "LGPL-2.0", true, false) - val `LGPL-2.1+` = spdx("GNU Library General Public License v2 or later", "LGPL-2.1+", true, false) - val `LGPL-2.1` = spdx("GNU Lesser General Public License v2.1 only", "LGPL-2.1", true, false) - val `LGPL-3.0+` = spdx("GNU Lesser General Public License v3.0 or later", "LGPL-3.0+", true, false) - val `LGPL-3.0` = spdx("GNU Lesser General Public License v3.0 only", "LGPL-3.0", true, false) - val Nunit = spdx("Nunit License", "Nunit", false, false) - val `StandardML-NJ` = spdx("Standard ML of New Jersey License", "StandardML-NJ", false, false) - val wxWindows = spdx("wxWindows Library License", "wxWindows", false, false) - - private def spdx(fullName: String, id: String, isOsiApproved: Boolean, isFsfLibre: Boolean): License = - License(fullName, id, s"https://spdx.org/licenses/$id.html", isOsiApproved, isFsfLibre, "repo") - - val PublicDomain = License( - id = "Public Domain", - name = "Public Domain", - url = "https://creativecommons.org/publicdomain/zero/1.0/", - isOsiApproved = true, // sort of: https://opensource.org/faq#public-domain - isFsfLibre = true, // I'm not sure about this - distribution = "repo" - ) - - val Scala = License( - id = "Scala License", - name = "Scala License", - url = "http://www.scala-lang.org/license.html", - isOsiApproved = false, - isFsfLibre = false, - distribution = "repo" - ) - - val TypesafeSubscriptionAgreement = License( - id = "Typesafe Subscription Agreement", - name = "Typesafe Subscription Agreement", - url = "http://downloads.typesafe.com/website/legal/TypesafeSubscriptionAgreement.pdf", - isOsiApproved = false, - isFsfLibre = false, - distribution = "repo" - ) - - // https://github.com/sbt/sbt/issues/1937#issuecomment-214963983 - object Common { - val Apache2 = License.`Apache-2.0` - val MIT = License.MIT - val BSD4 = License.`BSD-4-Clause` - val Typesafe = License.TypesafeSubscriptionAgreement - val BSD3 = License.`BSD-3-Clause` - } -} \ No newline at end of file diff --git a/scalalib/src/mill/scalalib/publish/LocalPublisher.scala b/scalalib/src/mill/scalalib/publish/LocalPublisher.scala deleted file mode 100644 index d9839831..00000000 --- a/scalalib/src/mill/scalalib/publish/LocalPublisher.scala +++ /dev/null @@ -1,32 +0,0 @@ -package mill.scalalib.publish - - -object LocalPublisher { - - private val root: os.Path = os.home / ".ivy2" / "local" - - def publish(jar: os.Path, - sourcesJar: os.Path, - docJar: os.Path, - pom: os.Path, - ivy: os.Path, - artifact: Artifact): Unit = { - val releaseDir = root / artifact.group / artifact.id / artifact.version - writeFiles( - jar -> releaseDir / "jars" / s"${artifact.id}.jar", - sourcesJar -> releaseDir / "srcs" / s"${artifact.id}-sources.jar", - docJar -> releaseDir / "docs" / s"${artifact.id}-javadoc.jar", - pom -> releaseDir / "poms" / s"${artifact.id}.pom", - ivy -> releaseDir / "ivys" / "ivy.xml" - ) - } - - private def writeFiles(fromTo: (os.Path, os.Path)*): Unit = { - fromTo.foreach { - case (from, to) => - os.makeDir.all(to / os.up) - os.copy.over(from, to) - } - } - -} diff --git a/scalalib/src/mill/scalalib/publish/Pom.scala b/scalalib/src/mill/scalalib/publish/Pom.scala deleted file mode 100644 index 57a0e196..00000000 --- a/scalalib/src/mill/scalalib/publish/Pom.scala +++ /dev/null @@ -1,117 +0,0 @@ -package mill.scalalib.publish - -import mill.util.Loose.Agg - -import scala.xml.{Atom, Elem, NodeSeq, PrettyPrinter} - -object Pom { - - val head = "\n" - - implicit class XmlOps(val e: Elem) extends AnyVal { - // source: https://stackoverflow.com/a/5254068/449071 - def optional : NodeSeq = { - require(e.child.length == 1) - e.child.head match { - case atom: Atom[Option[_]] => atom.data match { - case None => NodeSeq.Empty - case Some(x) => e.copy(child = x match { - case n: NodeSeq => n - case x => new Atom(x) - }) - } - case _ => e - } - } - } - - //TODO - not only jar packaging support? - def apply(artifact: Artifact, - dependencies: Agg[Dependency], - name: String, - pomSettings: PomSettings): String = { - val xml = - - - 4.0.0 - {name} - {artifact.group} - {artifact.id} - jar - {pomSettings.description} - - {artifact.version} - {pomSettings.url} - - {pomSettings.licenses.map(renderLicense)} - - - { {pomSettings.versionControl.connection}.optional } - { {pomSettings.versionControl.developerConnection}.optional } - { {pomSettings.versionControl.tag}.optional } - { {pomSettings.versionControl.browsableRepository}.optional } - - - {pomSettings.developers.map(renderDeveloper)} - - - {dependencies.map(renderDependency).toSeq} - - - - val pp = new PrettyPrinter(120, 4) - head + pp.format(xml) - } - - private def renderLicense(l: License): Elem = { - - {l.name} - {l.url} - {l.distribution} - - } - - private def renderDeveloper(d: Developer): Elem = { - - {d.id} - {d.name} - { {d.organization}.optional } - { {d.organizationUrl}.optional } - - } - - private def renderDependency(d: Dependency): Elem = { - val scope = d.scope match { - case Scope.Compile => NodeSeq.Empty - case Scope.Provided => provided - case Scope.Test => test - case Scope.Runtime => runtime - } - if (d.exclusions.isEmpty) - - {d.artifact.group} - {d.artifact.id} - {d.artifact.version} - {scope} - - else - - {d.artifact.group} - {d.artifact.id} - {d.artifact.version} - - {d.exclusions.map(ex => - - {ex._1} - {ex._2} - - )} - - {scope} - - } - -} diff --git a/scalalib/src/mill/scalalib/publish/SonatypeHttpApi.scala b/scalalib/src/mill/scalalib/publish/SonatypeHttpApi.scala deleted file mode 100644 index 12defa93..00000000 --- a/scalalib/src/mill/scalalib/publish/SonatypeHttpApi.scala +++ /dev/null @@ -1,134 +0,0 @@ -package mill.scalalib.publish - -import java.util.Base64 - - - -import scala.concurrent.duration._ -import scalaj.http.{BaseHttp, HttpOptions, HttpRequest, HttpResponse} - -object PatientHttp - extends BaseHttp( - options = Seq( - HttpOptions.connTimeout(5.seconds.toMillis.toInt), - HttpOptions.readTimeout(1.minute.toMillis.toInt), - HttpOptions.followRedirects(false) - ) - ) - -class SonatypeHttpApi(uri: String, credentials: String) { - - private val base64Creds = base64(credentials) - - private val commonHeaders = Seq( - "Authorization" -> s"Basic $base64Creds", - "Accept" -> "application/json", - "Content-Type" -> "application/json" - ) - - // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles.html - def getStagingProfileUri(groupId: String): String = { - val response = withRetry( - PatientHttp(s"$uri/staging/profiles").headers(commonHeaders)) - .throwError - - val resourceUri = - ujson - .read(response.body)("data") - .arr - .find(profile => - groupId.split('.').startsWith(profile("name").str.split('.'))) - .map(_("resourceURI").str.toString) - - resourceUri.getOrElse( - throw new RuntimeException( - s"Could not find staging profile for groupId: ${groupId}") - ) - } - - def getStagingRepoState(stagingRepoId: String): String = { - val response = PatientHttp(s"${uri}/staging/repository/${stagingRepoId}") - .option(HttpOptions.readTimeout(60000)) - .headers(commonHeaders) - .asString - .throwError - - ujson.read(response.body)("type").str.toString - } - - // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_start.html - def createStagingRepo(profileUri: String, groupId: String): String = { - val response = withRetry(PatientHttp(s"${profileUri}/start") - .headers(commonHeaders) - .postData( - s"""{"data": {"description": "fresh staging profile for ${groupId}"}}""")) - .throwError - - ujson.read(response.body)("data")("stagedRepositoryId").str.toString - } - - // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_finish.html - def closeStagingRepo(profileUri: String, repositoryId: String): Boolean = { - val response = withRetry( - PatientHttp(s"${profileUri}/finish") - .headers(commonHeaders) - .postData( - s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "closing staging repository"}}""" - )) - - response.code == 201 - } - - // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_promote.html - def promoteStagingRepo(profileUri: String, repositoryId: String): Boolean = { - val response = withRetry( - PatientHttp(s"${profileUri}/promote") - .headers(commonHeaders) - .postData( - s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "promote staging repository"}}""" - )) - - response.code == 201 - } - - // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_drop.html - def dropStagingRepo(profileUri: String, repositoryId: String): Boolean = { - val response = withRetry( - PatientHttp(s"${profileUri}/drop") - .headers(commonHeaders) - .postData( - s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "drop staging repository"}}""" - )) - - response.code == 201 - } - - private val uploadTimeout = 5.minutes.toMillis.toInt - - def upload(uri: String, data: Array[Byte]): HttpResponse[String] = { - PatientHttp(uri) - .option(HttpOptions.readTimeout(uploadTimeout)) - .method("PUT") - .headers( - "Content-Type" -> "application/binary", - "Authorization" -> s"Basic ${base64Creds}" - ) - .put(data) - .asString - } - - private def withRetry(request: HttpRequest, - retries: Int = 10): HttpResponse[String] = { - val resp = request.asString - if (resp.is5xx && retries > 0) { - Thread.sleep(500) - withRetry(request, retries - 1) - } else { - resp - } - } - - private def base64(s: String) = - new String(Base64.getEncoder.encode(s.getBytes)) - -} diff --git a/scalalib/src/mill/scalalib/publish/SonatypePublisher.scala b/scalalib/src/mill/scalalib/publish/SonatypePublisher.scala deleted file mode 100644 index 1843943b..00000000 --- a/scalalib/src/mill/scalalib/publish/SonatypePublisher.scala +++ /dev/null @@ -1,164 +0,0 @@ -package mill.scalalib.publish - -import java.math.BigInteger -import java.security.MessageDigest - -import mill.api.Logger - -import scalaj.http.HttpResponse - -class SonatypePublisher(uri: String, - snapshotUri: String, - credentials: String, - gpgPassphrase: Option[String], - signed: Boolean, - log: Logger) { - - private val api = new SonatypeHttpApi(uri, credentials) - - def publish(fileMapping: Seq[(os.Path, String)], artifact: Artifact, release: Boolean): Unit = { - publishAll(release, fileMapping -> artifact) - } - def publishAll(release: Boolean, artifacts: (Seq[(os.Path, String)], Artifact)*): Unit = { - - val mappings = for ((fileMapping0, artifact) <- artifacts) yield { - val publishPath = Seq( - artifact.group.replace(".", "/"), - artifact.id, - artifact.version - ).mkString("/") - val fileMapping = fileMapping0.map{ case (file, name) => (file, publishPath+"/"+name) } - - val signedArtifacts = if (signed) fileMapping.map { - case (file, name) => poorMansSign(file, gpgPassphrase) -> s"$name.asc" - } else Seq() - - artifact -> (fileMapping ++ signedArtifacts).flatMap { - case (file, name) => - val content = os.read.bytes(file) - - Seq( - name -> content, - (name + ".md5") -> md5hex(content), - (name + ".sha1") -> sha1hex(content) - ) - } - } - - val (snapshots, releases) = mappings.partition(_._1.isSnapshot) - if(snapshots.nonEmpty) { - publishSnapshot(snapshots.flatMap(_._2), snapshots.map(_._1)) - } - val releaseGroups = releases.groupBy(_._1.group) - for((group, groupReleases) <- releaseGroups){ - publishRelease(release, groupReleases.flatMap(_._2), group, releases.map(_._1)) - } - } - - private def publishSnapshot(payloads: Seq[(String, Array[Byte])], - artifacts: Seq[Artifact]): Unit = { - - val publishResults = payloads.map { - case (fileName, data) => - log.info(s"Uploading $fileName") - val resp = api.upload(s"$snapshotUri/$fileName", data) - resp - } - reportPublishResults(publishResults, artifacts) - } - - private def publishRelease(release: Boolean, - payloads: Seq[(String, Array[Byte])], - stagingProfile: String, - artifacts: Seq[Artifact]): Unit = { - val profileUri = api.getStagingProfileUri(stagingProfile) - val stagingRepoId = - api.createStagingRepo(profileUri, stagingProfile) - val baseUri = s"$uri/staging/deployByRepositoryId/$stagingRepoId/" - - val publishResults = payloads.map { - case (fileName, data) => - log.info(s"Uploading ${fileName}") - api.upload(s"$baseUri/$fileName", data) - } - reportPublishResults(publishResults, artifacts) - - if (release) { - log.info("Closing staging repository") - api.closeStagingRepo(profileUri, stagingRepoId) - - log.info("Waiting for staging repository to close") - awaitRepoStatus("closed", stagingRepoId) - - log.info("Promoting staging repository") - api.promoteStagingRepo(profileUri, stagingRepoId) - - log.info("Waiting for staging repository to release") - awaitRepoStatus("released", stagingRepoId) - - log.info("Dropping staging repository") - api.dropStagingRepo(profileUri, stagingRepoId) - - log.info(s"Published ${artifacts.map(_.id).mkString(", ")} successfully") - } - } - - private def reportPublishResults(publishResults: Seq[HttpResponse[String]], - artifacts: Seq[Artifact]) = { - if (publishResults.forall(_.is2xx)) { - log.info(s"Published ${artifacts.map(_.id).mkString(", ")} to Sonatype") - } else { - val errors = publishResults.filterNot(_.is2xx).map { response => - s"Code: ${response.code}, message: ${response.body}" - } - throw new RuntimeException( - s"Failed to publish ${artifacts.map(_.id).mkString(", ")} to Sonatype. Errors: \n${errors.mkString("\n")}" - ) - } - } - - private def awaitRepoStatus(status: String, - stagingRepoId: String, - attempts: Int = 20): Unit = { - def isRightStatus = - api.getStagingRepoState(stagingRepoId).equalsIgnoreCase(status) - var attemptsLeft = attempts - - while (attemptsLeft > 0 && !isRightStatus) { - Thread.sleep(3000) - attemptsLeft -= 1 - if (attemptsLeft == 0) { - throw new RuntimeException( - s"Couldn't wait for staging repository to be ${status}. Failing") - } - } - } - - // http://central.sonatype.org/pages/working-with-pgp-signatures.html#signing-a-file - private def poorMansSign(file: os.Path, maybePassphrase: Option[String]): os.Path = { - val fileName = file.toString - maybePassphrase match { - case Some(passphrase) => - os.proc("gpg", "--passphrase", passphrase, "--batch", "--yes", "-a", "-b", fileName) - .call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) - case None => - os.proc("gpg", "--batch", "--yes", "-a", "-b", fileName) - .call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) - } - os.Path(fileName + ".asc") - } - - private def md5hex(bytes: Array[Byte]): Array[Byte] = - hexArray(md5.digest(bytes)).getBytes - - private def sha1hex(bytes: Array[Byte]): Array[Byte] = - hexArray(sha1.digest(bytes)).getBytes - - private def md5 = MessageDigest.getInstance("md5") - - private def sha1 = MessageDigest.getInstance("sha1") - - private def hexArray(arr: Array[Byte]) = - String.format("%0" + (arr.length << 1) + "x", new BigInteger(1, arr)) - -} diff --git a/scalalib/src/mill/scalalib/publish/VersionControl.scala b/scalalib/src/mill/scalalib/publish/VersionControl.scala deleted file mode 100644 index aad38ac3..00000000 --- a/scalalib/src/mill/scalalib/publish/VersionControl.scala +++ /dev/null @@ -1,131 +0,0 @@ -package mill.scalalib.publish - -// https://maven.apache.org/pom.html#SCM -/* - * @param browsableRepository: a publicly browsable repository - * (example: https://github.com/lihaoyi/mill) - * @param connection: read-only connection to repository - * (example: scm:git:git://github.com/lihaoyi/mill.git) - * @param developerConnection: read-write connection to repository - * (example: scm:git:git@github.com:lihaoyi/mill.git) - * @param tag: tag that was created for this release. This is useful for - * git and mercurial since it's not possible to include the tag in - * the connection url. - * (example: v2.12.4, HEAD, my-branch, fd8a2567ad32c11bcf8adbaca85bdba72bb4f935, ...) - */ -case class VersionControl( - browsableRepository: Option[String] = None, - connection: Option[String] = None, - developerConnection: Option[String] = None, - tag: Option[String] = None -) - -@deprecated("use VersionControl", "0.1.3") -case class SCM( - url: String, - connection: String -) - -object VersionControl { - def github(owner: String, repo: String, tag: Option[String] = None): VersionControl = - VersionControl( - browsableRepository = Some(s"https://github.com/$owner/$repo"), - connection = Some(VersionControlConnection.gitGit("github.com", s"$owner/$repo.git")), - developerConnection = Some(VersionControlConnection.gitSsh("github.com", s":$owner/$repo.git", username = Some("git"))), - tag = tag - ) - def gitlab(owner: String, repo: String, tag: Option[String] = None): VersionControl = - VersionControl( - browsableRepository = Some(s"https://gitlab.com/$owner/$repo"), - connection = Some(VersionControlConnection.gitGit("gitlab.com", s"$owner/$repo.git")), - developerConnection = Some(VersionControlConnection.gitSsh("gitlab.com", s":$owner/$repo.git", username = Some("git"))), - tag = tag - ) -} - -object VersionControlConnection { - def network(scm: String, - protocol: String, - hostname: String, - path: String, - username: Option[String] = None, - password: Option[String] = None, - port: Option[Int] = None): String = { - val portPart = port.map(":" + _).getOrElse("") - val credentials = - username match { - case Some(user) => - val pass = password.map(":" + _).getOrElse("") - s"${user}${pass}@" - case None => - password match { - case Some(p) => sys.error(s"no username set for password: $p") - case _ => "" - } - } - - val path0 = - if(path.startsWith(":") || path.startsWith("/")) path - else "/" + path - - s"scm:${scm}:${protocol}://${credentials}${hostname}${portPart}${path0}" - } - - def file(scm: String, path: String): String = { - s"scm:$scm:file://$path" - } - - def gitGit(hostname: String, - path: String = "", - port: Option[Int] = None): String = - network("git", "git", hostname, path, port = port) - - def gitHttp(hostname: String, - path: String = "", - port: Option[Int] = None): String = - network("git", "http", hostname, path, port = port) - - def gitHttps(hostname: String, - path: String = "", - port: Option[Int] = None): String = - network("git", "https", hostname, path, port = port) - - def gitSsh(hostname: String, - path: String = "", - username: Option[String] = None, - port: Option[Int] = None): String = - network("git", "ssh", hostname, path, username = username, port = port) - - def gitFile(path: String): String = - file("git", path) - - def svnSsh(hostname: String, - path: String = "", - username: Option[String] = None, - port: Option[Int] = None): String = - network("svn", "svn+ssh", hostname, path, username, None, port) - - def svnHttp(hostname: String, - path: String = "", - username: Option[String] = None, - password: Option[String] = None, - port: Option[Int] = None): String = - network("svn", "http", hostname, path, username, password, port) - - def svnHttps(hostname: String, - path: String = "", - username: Option[String] = None, - password: Option[String] = None, - port: Option[Int] = None): String = - network("svn", "https", hostname, path, username, password, port) - - def svnSvn(hostname: String, - path: String = "", - username: Option[String] = None, - password: Option[String] = None, - port: Option[Int] = None): String = - network("svn", "svn", hostname, path, username, password, port) - - def svnFile(path: String): String = - file("svn", path) -} \ No newline at end of file diff --git a/scalalib/src/mill/scalalib/publish/package.scala b/scalalib/src/mill/scalalib/publish/package.scala deleted file mode 100644 index 99eeec14..00000000 --- a/scalalib/src/mill/scalalib/publish/package.scala +++ /dev/null @@ -1,3 +0,0 @@ -package mill.scalalib - -package object publish extends JsonFormatters diff --git a/scalalib/src/mill/scalalib/publish/settings.scala b/scalalib/src/mill/scalalib/publish/settings.scala deleted file mode 100644 index bca81cf0..00000000 --- a/scalalib/src/mill/scalalib/publish/settings.scala +++ /dev/null @@ -1,91 +0,0 @@ -package mill.scalalib.publish - -import mill.scalalib.Dep - -case class Artifact(group: String, id: String, version: String) { - def isSnapshot: Boolean = version.endsWith("-SNAPSHOT") -} - -object Artifact { - def fromDepJava(dep: Dep) = { - assert(dep.cross.isConstant, s"Not a Java dependency: $dep") - fromDep(dep, "", "", "") - } - - def fromDep(dep: Dep, - scalaFull: String, - scalaBin: String, - platformSuffix: String): Dependency = { - val name = dep.artifactName( - binaryVersion = scalaBin, - fullVersion = scalaFull, - platformSuffix = platformSuffix - ) - Dependency( - Artifact( - dep.dep.module.organization, - name, - dep.dep.version - ), - Scope.Compile, - if (dep.dep.configuration == "") None else Some(dep.dep.configuration), - dep.dep.exclusions.toList - ) - } -} - -sealed trait Scope -object Scope { - case object Compile extends Scope - case object Provided extends Scope - case object Runtime extends Scope - case object Test extends Scope -} - -case class Dependency( - artifact: Artifact, - scope: Scope, - configuration: Option[String] = None, - exclusions: Seq[(String, String)] = Nil -) - -case class Developer( - id: String, - name: String, - url: String, - organization: Option[String] = None, - organizationUrl: Option[String] = None -) - -case class PomSettings( - description: String, - organization: String, - url: String, - licenses: Seq[License], - versionControl: VersionControl, - developers: Seq[Developer] -) - -object PomSettings { - @deprecated("use VersionControl instead of SCM", "0.1.3") - def apply(description: String, - organization: String, - url: String, - licenses: Seq[License], - scm: SCM, - developers: Seq[Developer]): PomSettings = { - PomSettings( - description = description, - organization = organization, - url = url, - licenses = licenses, - versionControl = VersionControl( - browsableRepository = Some(scm.url), - connection = Some(scm.connection), - developerConnection = None, - tag = None - ), - developers = developers - ) - } -} diff --git a/scalalib/src/mill/scalalib/scalafmt/ScalafmtModule.scala b/scalalib/src/mill/scalalib/scalafmt/ScalafmtModule.scala deleted file mode 100644 index 6a81d975..00000000 --- a/scalalib/src/mill/scalalib/scalafmt/ScalafmtModule.scala +++ /dev/null @@ -1,57 +0,0 @@ -package mill.scalalib.scalafmt - -import mill._ -import mill.define._ -import mill.scalalib._ - -trait ScalafmtModule extends JavaModule { - - def reformat(): Command[Unit] = T.command { - ScalafmtWorkerModule - .worker() - .reformat( - filesToFormat(sources()), - scalafmtConfig().head, - scalafmtDeps().map(_.path) - ) - } - - def scalafmtVersion: T[String] = "1.5.1" - - def scalafmtConfig: Sources = T.sources(os.pwd / ".scalafmt.conf") - - def scalafmtDeps: T[Agg[PathRef]] = T { - Lib.resolveDependencies( - zincWorker.repositories, - Lib.depToDependency(_, "2.12.4"), - Seq(ivy"com.geirsson::scalafmt-cli:${scalafmtVersion()}") - ) - } - - protected def filesToFormat(sources: Seq[PathRef]) = { - for { - pathRef <- sources if os.exists(pathRef.path) - file <- os.walk(pathRef.path) if os.isFile(file) && file.ext == "scala" - } yield PathRef(file) - } - -} - -object ScalafmtModule extends ExternalModule with ScalafmtModule { - - def reformatAll(sources: mill.main.Tasks[Seq[PathRef]]): Command[Unit] = - T.command { - val files = Task.sequence(sources.value)().flatMap(filesToFormat) - ScalafmtWorkerModule - .worker() - .reformat( - files, - scalafmtConfig().head, - scalafmtDeps().map(_.path) - ) - } - - implicit def millScoptTargetReads[T] = new mill.main.Tasks.Scopt[T]() - - lazy val millDiscover = Discover[this.type] -} diff --git a/scalalib/src/mill/scalalib/scalafmt/ScalafmtWorker.scala b/scalalib/src/mill/scalalib/scalafmt/ScalafmtWorker.scala deleted file mode 100644 index 47d8375f..00000000 --- a/scalalib/src/mill/scalalib/scalafmt/ScalafmtWorker.scala +++ /dev/null @@ -1,57 +0,0 @@ -package mill.scalalib.scalafmt - -import mill._ -import mill.define.{Discover, ExternalModule, Worker} -import mill.modules.Jvm -import mill.api.Ctx - -import scala.collection.mutable - -object ScalafmtWorkerModule extends ExternalModule { - def worker: Worker[ScalafmtWorker] = T.worker { new ScalafmtWorker() } - - lazy val millDiscover = Discover[this.type] -} - -private[scalafmt] class ScalafmtWorker { - private val reformatted: mutable.Map[os.Path, Int] = mutable.Map.empty - private var configSig: Int = 0 - - def reformat(input: Seq[PathRef], - scalafmtConfig: PathRef, - scalafmtClasspath: Agg[os.Path])(implicit ctx: Ctx): Unit = { - val toFormat = - if (scalafmtConfig.sig != configSig) input - else - input.filterNot(ref => reformatted.get(ref.path).contains(ref.sig)) - - if (toFormat.nonEmpty) { - ctx.log.info(s"Formatting ${toFormat.size} Scala sources") - reformatAction(toFormat.map(_.path), - scalafmtConfig.path, - scalafmtClasspath) - reformatted ++= toFormat.map { ref => - val updRef = PathRef(ref.path) - updRef.path -> updRef.sig - } - configSig = scalafmtConfig.sig - } else { - ctx.log.info(s"Everything is formatted already") - } - } - - private val cliFlags = Seq("--non-interactive", "--quiet") - - private def reformatAction(toFormat: Seq[os.Path], - config: os.Path, - classpath: Agg[os.Path])(implicit ctx: Ctx) = { - val configFlags = - if (os.exists(config)) Seq("--config", config.toString) else Seq.empty - Jvm.runSubprocess( - "org.scalafmt.cli.Cli", - classpath, - mainArgs = toFormat.map(_.toString) ++ configFlags ++ cliFlags - ) - } - -} diff --git a/scalalib/src/package.scala b/scalalib/src/package.scala new file mode 100644 index 00000000..5a282e82 --- /dev/null +++ b/scalalib/src/package.scala @@ -0,0 +1,12 @@ +package mill + +package object scalalib { + implicit class DepSyntax(ctx: StringContext){ + def ivy(args: Any*) = Dep.parse{ + ( + ctx.parts.take(args.length).zip(args).flatMap{case (p, a) => Seq(p, a)} ++ + ctx.parts.drop(args.length) + ).mkString + } + } +} diff --git a/scalalib/src/publish/Ivy.scala b/scalalib/src/publish/Ivy.scala new file mode 100644 index 00000000..22e26ff6 --- /dev/null +++ b/scalalib/src/publish/Ivy.scala @@ -0,0 +1,59 @@ +package mill.scalalib.publish + +import mill.util.Loose.Agg + +import scala.xml.PrettyPrinter + +object Ivy { + + val head = "\n" + + def apply( + artifact: Artifact, + dependencies: Agg[Dependency] + ): String = { + val xml = + + + + + + + + + + + + + + + + + + + + {dependencies.map(renderDependency).toSeq} + + + val pp = new PrettyPrinter(120, 4) + head + pp.format(xml).replaceAll(">", ">") + } + + private def renderDependency(dep: Dependency) = { + if (dep.exclusions.isEmpty) + ${dep.configuration.getOrElse("default(compile)")}"} /> + else + ${dep.configuration.getOrElse("default(compile)")}"}> + {dep.exclusions.map(ex => )} + + } + + private def scopeToConf(s: Scope): String = s match { + case Scope.Compile => "compile" + case Scope.Provided => "provided" + case Scope.Test => "test" + case Scope.Runtime => "runtime" + } + +} diff --git a/scalalib/src/publish/JsonFormatters.scala b/scalalib/src/publish/JsonFormatters.scala new file mode 100644 index 00000000..8fc90632 --- /dev/null +++ b/scalalib/src/publish/JsonFormatters.scala @@ -0,0 +1,11 @@ +package mill.scalalib.publish + +import upickle.default.{ReadWriter => RW} + +trait JsonFormatters { + implicit lazy val artifactFormat: RW[Artifact] = upickle.default.macroRW + implicit lazy val developerFormat: RW[Developer] = upickle.default.macroRW + implicit lazy val licenseFormat: RW[License] = upickle.default.macroRW + implicit lazy val versionControlFormat: RW[VersionControl] = upickle.default.macroRW + implicit lazy val pomSettingsFormat: RW[PomSettings] = upickle.default.macroRW +} diff --git a/scalalib/src/publish/Licence.scala b/scalalib/src/publish/Licence.scala new file mode 100644 index 00000000..8838ef69 --- /dev/null +++ b/scalalib/src/publish/Licence.scala @@ -0,0 +1,479 @@ +package mill.scalalib.publish + +case class License( + id: String, + name: String, + url: String, + isOsiApproved: Boolean, + isFsfLibre: Boolean, + distribution: String +) + +object License { + @deprecated("use License.LicenseName (ex: License.`Apache-2.0`)", "0.1.0") + def apply(name: String, url: String): License = + License(name, name, url, false, false, "repo") + + /* + wget https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json + + ``` + val circeVersion = "0.9.1" + libraryDependencies ++= Seq( + "io.circe" %% "circe-core", + "io.circe" %% "circe-generic", + "io.circe" %% "circe-parser" + ).map(_ % circeVersion) + + import io.circe._, io.circe.generic.auto._, io.circe.parser._, io.circe.syntax._ + import java.nio.file._ + import System.{lineSeparator => nl} + case class License( + reference: String, + isDeprecatedLicenseId: Boolean, + isFsfLibre: Option[Boolean], + detailsUrl: String, + referenceNumber: String, + name: String, + licenseId: String, + seeAlso: Option[List[String]], + isOsiApproved: Boolean + ) { + def ident: String = { + val startsWithDigit = (0 to 9).map(_.toString).exists(licenseId.startsWith) + if (licenseId.contains("-") || !startsWithDigit) s"`$licenseId`" + else licenseId + } + + def syntax(identPadding: Int, namePadding: Int): String = { + val s1 = " " * (identPadding - ident.size) + val s2 = " " * (namePadding - name.size) + val ticks = if (ident == licenseId) 2 else 0 + val s3 = " " * (identPadding - ticks - ident.size) + val s4 = if(isOsiApproved) " " else "" + s"""val ${ident}${s1} = spdx(\"\"\"$name\"\"\",$s2 "$licenseId", $s3 $isOsiApproved, $s4 ${isFsfLibre.getOrElse(false)})""" + } + } + + + case class Data(licenses: List[License]) + + val json = new String(Files.readAllBytes(Paths.get("data.json"))) + + val licences = decode[Data](json).right.get.licenses + + val identPadding = licences.map(_.licenseId.size + 2).max + val namePadding = licences.map(_.name.size).max + + val output = licences.map(license => license.syntax(identPadding, namePadding)).mkString(nl) + Files.write(Paths.get("out.scala"), output.getBytes("utf-8")) + */ + val `0BSD` = spdx("BSD Zero Clause License", "0BSD", false, false) + val AAL = spdx("Attribution Assurance License", "AAL", true, false) + val Abstyles = spdx("Abstyles License", "Abstyles", false, false) + val `Adobe-2006` = spdx("Adobe Systems Incorporated Source Code License Agreement", "Adobe-2006", false, false) + val `Adobe-Glyph` = spdx("Adobe Glyph List License", "Adobe-Glyph", false, false) + val ADSL = spdx("Amazon Digital Services License", "ADSL", false, false) + val `AFL-1.1` = spdx("Academic Free License v1.1", "AFL-1.1", true, true) + val `AFL-1.2` = spdx("Academic Free License v1.2", "AFL-1.2", true, true) + val `AFL-2.0` = spdx("Academic Free License v2.0", "AFL-2.0", true, true) + val `AFL-2.1` = spdx("Academic Free License v2.1", "AFL-2.1", true, true) + val `AFL-3.0` = spdx("Academic Free License v3.0", "AFL-3.0", true, true) + val Afmparse = spdx("Afmparse License", "Afmparse", false, false) + val `AGPL-1.0` = spdx("Affero General Public License v1.0", "AGPL-1.0", false, true) + val `AGPL-3.0-only` = spdx("GNU Affero General Public License v3.0 only", "AGPL-3.0-only", true, false) + val `AGPL-3.0-or-later` = spdx("GNU Affero General Public License v3.0 or later", "AGPL-3.0-or-later", true, false) + val Aladdin = spdx("Aladdin Free Public License", "Aladdin", false, false) + val AMDPLPA = spdx("AMD's plpa_map.c License", "AMDPLPA", false, false) + val AML = spdx("Apple MIT License", "AML", false, false) + val AMPAS = spdx("Academy of Motion Picture Arts and Sciences BSD", "AMPAS", false, false) + val `ANTLR-PD` = spdx("ANTLR Software Rights Notice", "ANTLR-PD", false, false) + val `Apache-1.0` = spdx("Apache License 1.0", "Apache-1.0", false, true) + val `Apache-1.1` = spdx("Apache License 1.1", "Apache-1.1", true, true) + val `Apache-2.0` = spdx("Apache License 2.0", "Apache-2.0", true, true) + val APAFML = spdx("Adobe Postscript AFM License", "APAFML", false, false) + val `APL-1.0` = spdx("Adaptive Public License 1.0", "APL-1.0", true, false) + val `APSL-1.0` = spdx("Apple Public Source License 1.0", "APSL-1.0", true, false) + val `APSL-1.1` = spdx("Apple Public Source License 1.1", "APSL-1.1", true, false) + val `APSL-1.2` = spdx("Apple Public Source License 1.2", "APSL-1.2", true, false) + val `APSL-2.0` = spdx("Apple Public Source License 2.0", "APSL-2.0", true, true) + val `Artistic-1.0-cl8` = spdx("Artistic License 1.0 w/clause 8", "Artistic-1.0-cl8", true, false) + val `Artistic-1.0-Perl` = spdx("Artistic License 1.0 (Perl)", "Artistic-1.0-Perl", true, false) + val `Artistic-1.0` = spdx("Artistic License 1.0", "Artistic-1.0", true, false) + val `Artistic-2.0` = spdx("Artistic License 2.0", "Artistic-2.0", true, true) + val Bahyph = spdx("Bahyph License", "Bahyph", false, false) + val Barr = spdx("Barr License", "Barr", false, false) + val Beerware = spdx("Beerware License", "Beerware", false, false) + val `BitTorrent-1.0` = spdx("BitTorrent Open Source License v1.0", "BitTorrent-1.0", false, false) + val `BitTorrent-1.1` = spdx("BitTorrent Open Source License v1.1", "BitTorrent-1.1", false, true) + val Borceux = spdx("Borceux license", "Borceux", false, false) + val `BSD-1-Clause` = spdx("BSD 1-Clause License", "BSD-1-Clause", false, false) + val `BSD-2-Clause-FreeBSD` = spdx("BSD 2-Clause FreeBSD License", "BSD-2-Clause-FreeBSD", false, true) + val `BSD-2-Clause-NetBSD` = spdx("BSD 2-Clause NetBSD License", "BSD-2-Clause-NetBSD", false, false) + val `BSD-2-Clause-Patent` = spdx("BSD-2-Clause Plus Patent License", "BSD-2-Clause-Patent", true, false) + val `BSD-2-Clause` = spdx("BSD 2-Clause \"Simplified\" License", "BSD-2-Clause", true, false) + val `BSD-3-Clause-Attribution` = spdx("BSD with attribution", "BSD-3-Clause-Attribution", false, false) + val `BSD-3-Clause-Clear` = spdx("BSD 3-Clause Clear License", "BSD-3-Clause-Clear", false, true) + val `BSD-3-Clause-LBNL` = spdx("Lawrence Berkeley National Labs BSD variant license", "BSD-3-Clause-LBNL", false, false) + val `BSD-3-Clause-No-Nuclear-License-2014` = spdx("BSD 3-Clause No Nuclear License 2014", "BSD-3-Clause-No-Nuclear-License-2014", false, false) + val `BSD-3-Clause-No-Nuclear-License` = spdx("BSD 3-Clause No Nuclear License", "BSD-3-Clause-No-Nuclear-License", false, false) + val `BSD-3-Clause-No-Nuclear-Warranty` = spdx("BSD 3-Clause No Nuclear Warranty", "BSD-3-Clause-No-Nuclear-Warranty", false, false) + val `BSD-3-Clause` = spdx("BSD 3-Clause \"New\" or \"Revised\" License", "BSD-3-Clause", true, true) + val `BSD-4-Clause-UC` = spdx("BSD-4-Clause (University of California-Specific)", "BSD-4-Clause-UC", false, false) + val `BSD-4-Clause` = spdx("BSD 4-Clause \"Original\" or \"Old\" License", "BSD-4-Clause", false, true) + val `BSD-Protection` = spdx("BSD Protection License", "BSD-Protection", false, false) + val `BSD-Source-Code` = spdx("BSD Source Code Attribution", "BSD-Source-Code", false, false) + val `BSL-1.0` = spdx("Boost Software License 1.0", "BSL-1.0", true, true) + val `bzip2-1.0.5` = spdx("bzip2 and libbzip2 License v1.0.5", "bzip2-1.0.5", false, false) + val `bzip2-1.0.6` = spdx("bzip2 and libbzip2 License v1.0.6", "bzip2-1.0.6", false, false) + val Caldera = spdx("Caldera License", "Caldera", false, false) + val `CATOSL-1.1` = spdx("Computer Associates Trusted Open Source License 1.1", "CATOSL-1.1", true, false) + val `CC-BY-1.0` = spdx("Creative Commons Attribution 1.0", "CC-BY-1.0", false, false) + val `CC-BY-2.0` = spdx("Creative Commons Attribution 2.0", "CC-BY-2.0", false, false) + val `CC-BY-2.5` = spdx("Creative Commons Attribution 2.5", "CC-BY-2.5", false, false) + val `CC-BY-3.0` = spdx("Creative Commons Attribution 3.0", "CC-BY-3.0", false, false) + val `CC-BY-4.0` = spdx("Creative Commons Attribution 4.0", "CC-BY-4.0", false, true) + val `CC-BY-NC-1.0` = spdx("Creative Commons Attribution Non Commercial 1.0", "CC-BY-NC-1.0", false, false) + val `CC-BY-NC-2.0` = spdx("Creative Commons Attribution Non Commercial 2.0", "CC-BY-NC-2.0", false, false) + val `CC-BY-NC-2.5` = spdx("Creative Commons Attribution Non Commercial 2.5", "CC-BY-NC-2.5", false, false) + val `CC-BY-NC-3.0` = spdx("Creative Commons Attribution Non Commercial 3.0", "CC-BY-NC-3.0", false, false) + val `CC-BY-NC-4.0` = spdx("Creative Commons Attribution Non Commercial 4.0", "CC-BY-NC-4.0", false, false) + val `CC-BY-NC-ND-1.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 1.0", "CC-BY-NC-ND-1.0", false, false) + val `CC-BY-NC-ND-2.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 2.0", "CC-BY-NC-ND-2.0", false, false) + val `CC-BY-NC-ND-2.5` = spdx("Creative Commons Attribution Non Commercial No Derivatives 2.5", "CC-BY-NC-ND-2.5", false, false) + val `CC-BY-NC-ND-3.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 3.0", "CC-BY-NC-ND-3.0", false, false) + val `CC-BY-NC-ND-4.0` = spdx("Creative Commons Attribution Non Commercial No Derivatives 4.0", "CC-BY-NC-ND-4.0", false, false) + val `CC-BY-NC-SA-1.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 1.0", "CC-BY-NC-SA-1.0", false, false) + val `CC-BY-NC-SA-2.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 2.0", "CC-BY-NC-SA-2.0", false, false) + val `CC-BY-NC-SA-2.5` = spdx("Creative Commons Attribution Non Commercial Share Alike 2.5", "CC-BY-NC-SA-2.5", false, false) + val `CC-BY-NC-SA-3.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 3.0", "CC-BY-NC-SA-3.0", false, false) + val `CC-BY-NC-SA-4.0` = spdx("Creative Commons Attribution Non Commercial Share Alike 4.0", "CC-BY-NC-SA-4.0", false, false) + val `CC-BY-ND-1.0` = spdx("Creative Commons Attribution No Derivatives 1.0", "CC-BY-ND-1.0", false, false) + val `CC-BY-ND-2.0` = spdx("Creative Commons Attribution No Derivatives 2.0", "CC-BY-ND-2.0", false, false) + val `CC-BY-ND-2.5` = spdx("Creative Commons Attribution No Derivatives 2.5", "CC-BY-ND-2.5", false, false) + val `CC-BY-ND-3.0` = spdx("Creative Commons Attribution No Derivatives 3.0", "CC-BY-ND-3.0", false, false) + val `CC-BY-ND-4.0` = spdx("Creative Commons Attribution No Derivatives 4.0", "CC-BY-ND-4.0", false, false) + val `CC-BY-SA-1.0` = spdx("Creative Commons Attribution Share Alike 1.0", "CC-BY-SA-1.0", false, false) + val `CC-BY-SA-2.0` = spdx("Creative Commons Attribution Share Alike 2.0", "CC-BY-SA-2.0", false, false) + val `CC-BY-SA-2.5` = spdx("Creative Commons Attribution Share Alike 2.5", "CC-BY-SA-2.5", false, false) + val `CC-BY-SA-3.0` = spdx("Creative Commons Attribution Share Alike 3.0", "CC-BY-SA-3.0", false, false) + val `CC-BY-SA-4.0` = spdx("Creative Commons Attribution Share Alike 4.0", "CC-BY-SA-4.0", false, true) + val `CC0-1.0` = spdx("Creative Commons Zero v1.0 Universal", "CC0-1.0", false, true) + val `CDDL-1.0` = spdx("Common Development and Distribution License 1.0", "CDDL-1.0", true, true) + val `CDDL-1.1` = spdx("Common Development and Distribution License 1.1", "CDDL-1.1", false, false) + val `CDLA-Permissive-1.0` = spdx("Community Data License Agreement Permissive 1.0", "CDLA-Permissive-1.0", false, false) + val `CDLA-Sharing-1.0` = spdx("Community Data License Agreement Sharing 1.0", "CDLA-Sharing-1.0", false, false) + val `CECILL-1.0` = spdx("CeCILL Free Software License Agreement v1.0", "CECILL-1.0", false, false) + val `CECILL-1.1` = spdx("CeCILL Free Software License Agreement v1.1", "CECILL-1.1", false, false) + val `CECILL-2.0` = spdx("CeCILL Free Software License Agreement v2.0", "CECILL-2.0", false, true) + val `CECILL-2.1` = spdx("CeCILL Free Software License Agreement v2.1", "CECILL-2.1", true, false) + val `CECILL-B` = spdx("CeCILL-B Free Software License Agreement", "CECILL-B", false, true) + val `CECILL-C` = spdx("CeCILL-C Free Software License Agreement", "CECILL-C", false, true) + val ClArtistic = spdx("Clarified Artistic License", "ClArtistic", false, true) + val `CNRI-Jython` = spdx("CNRI Jython License", "CNRI-Jython", false, false) + val `CNRI-Python-GPL-Compatible` = spdx("CNRI Python Open Source GPL Compatible License Agreement", "CNRI-Python-GPL-Compatible", false, false) + val `CNRI-Python` = spdx("CNRI Python License", "CNRI-Python", true, false) + val `Condor-1.1` = spdx("Condor Public License v1.1", "Condor-1.1", false, true) + val `CPAL-1.0` = spdx("Common Public Attribution License 1.0", "CPAL-1.0", true, true) + val `CPL-1.0` = spdx("Common Public License 1.0", "CPL-1.0", true, true) + val `CPOL-1.02` = spdx("Code Project Open License 1.02", "CPOL-1.02", false, false) + val Crossword = spdx("Crossword License", "Crossword", false, false) + val CrystalStacker = spdx("CrystalStacker License", "CrystalStacker", false, false) + val `CUA-OPL-1.0` = spdx("CUA Office Public License v1.0", "CUA-OPL-1.0", true, false) + val Cube = spdx("Cube License", "Cube", false, false) + val curl = spdx("curl License", "curl", false, false) + val `D-FSL-1.0` = spdx("Deutsche Freie Software Lizenz", "D-FSL-1.0", false, false) + val diffmark = spdx("diffmark license", "diffmark", false, false) + val DOC = spdx("DOC License", "DOC", false, false) + val Dotseqn = spdx("Dotseqn License", "Dotseqn", false, false) + val DSDP = spdx("DSDP License", "DSDP", false, false) + val dvipdfm = spdx("dvipdfm License", "dvipdfm", false, false) + val `ECL-1.0` = spdx("Educational Community License v1.0", "ECL-1.0", true, false) + val `ECL-2.0` = spdx("Educational Community License v2.0", "ECL-2.0", true, true) + val `EFL-1.0` = spdx("Eiffel Forum License v1.0", "EFL-1.0", true, false) + val `EFL-2.0` = spdx("Eiffel Forum License v2.0", "EFL-2.0", true, true) + val eGenix = spdx("eGenix.com Public License 1.1.0", "eGenix", false, false) + val Entessa = spdx("Entessa Public License v1.0", "Entessa", true, false) + val `EPL-1.0` = spdx("Eclipse Public License 1.0", "EPL-1.0", true, true) + val `EPL-2.0` = spdx("Eclipse Public License 2.0", "EPL-2.0", true, true) + val `ErlPL-1.1` = spdx("Erlang Public License v1.1", "ErlPL-1.1", false, false) + val EUDatagrid = spdx("EU DataGrid Software License", "EUDatagrid", true, true) + val `EUPL-1.0` = spdx("European Union Public License 1.0", "EUPL-1.0", false, false) + val `EUPL-1.1` = spdx("European Union Public License 1.1", "EUPL-1.1", true, true) + val `EUPL-1.2` = spdx("European Union Public License 1.2", "EUPL-1.2", true, false) + val Eurosym = spdx("Eurosym License", "Eurosym", false, false) + val Fair = spdx("Fair License", "Fair", true, false) + val `Frameworx-1.0` = spdx("Frameworx Open License 1.0", "Frameworx-1.0", true, false) + val FreeImage = spdx("FreeImage Public License v1.0", "FreeImage", false, false) + val FSFAP = spdx("FSF All Permissive License", "FSFAP", false, true) + val FSFUL = spdx("FSF Unlimited License", "FSFUL", false, false) + val FSFULLR = spdx("FSF Unlimited License (with License Retention)", "FSFULLR", false, false) + val FTL = spdx("Freetype Project License", "FTL", false, true) + val `GFDL-1.1-only` = spdx("GNU Free Documentation License v1.1 only", "GFDL-1.1-only", false, false) + val `GFDL-1.1-or-later` = spdx("GNU Free Documentation License v1.1 or later", "GFDL-1.1-or-later", false, false) + val `GFDL-1.2-only` = spdx("GNU Free Documentation License v1.2 only", "GFDL-1.2-only", false, false) + val `GFDL-1.2-or-later` = spdx("GNU Free Documentation License v1.2 or later", "GFDL-1.2-or-later", false, false) + val `GFDL-1.3-only` = spdx("GNU Free Documentation License v1.3 only", "GFDL-1.3-only", false, false) + val `GFDL-1.3-or-later` = spdx("GNU Free Documentation License v1.3 or later", "GFDL-1.3-or-later", false, false) + val Giftware = spdx("Giftware License", "Giftware", false, false) + val GL2PS = spdx("GL2PS License", "GL2PS", false, false) + val Glide = spdx("3dfx Glide License", "Glide", false, false) + val Glulxe = spdx("Glulxe License", "Glulxe", false, false) + val gnuplot = spdx("gnuplot License", "gnuplot", false, true) + val `GPL-1.0-only` = spdx("GNU General Public License v1.0 only", "GPL-1.0-only", false, false) + val `GPL-1.0-or-later` = spdx("GNU General Public License v1.0 or later", "GPL-1.0-or-later", false, false) + val `GPL-2.0-only` = spdx("GNU General Public License v2.0 only", "GPL-2.0-only", true, false) + val `GPL-2.0-or-later` = spdx("GNU General Public License v2.0 or later", "GPL-2.0-or-later", true, false) + val `GPL-3.0-only` = spdx("GNU General Public License v3.0 only", "GPL-3.0-only", true, false) + val `GPL-3.0-or-later` = spdx("GNU General Public License v3.0 or later", "GPL-3.0-or-later", true, false) + val `gSOAP-1.3b` = spdx("gSOAP Public License v1.3b", "gSOAP-1.3b", false, false) + val HaskellReport = spdx("Haskell Language Report License", "HaskellReport", false, false) + val HPND = spdx("Historical Permission Notice and Disclaimer", "HPND", true, true) + val `IBM-pibs` = spdx("IBM PowerPC Initialization and Boot Software", "IBM-pibs", false, false) + val ICU = spdx("ICU License", "ICU", false, false) + val IJG = spdx("Independent JPEG Group License", "IJG", false, true) + val ImageMagick = spdx("ImageMagick License", "ImageMagick", false, false) + val iMatix = spdx("iMatix Standard Function Library Agreement", "iMatix", false, true) + val Imlib2 = spdx("Imlib2 License", "Imlib2", false, true) + val `Info-ZIP` = spdx("Info-ZIP License", "Info-ZIP", false, false) + val `Intel-ACPI` = spdx("Intel ACPI Software License Agreement", "Intel-ACPI", false, false) + val Intel = spdx("Intel Open Source License", "Intel", true, true) + val `Interbase-1.0` = spdx("Interbase Public License v1.0", "Interbase-1.0", false, false) + val IPA = spdx("IPA Font License", "IPA", true, true) + val `IPL-1.0` = spdx("IBM Public License v1.0", "IPL-1.0", true, true) + val ISC = spdx("ISC License", "ISC", true, true) + val `JasPer-2.0` = spdx("JasPer License", "JasPer-2.0", false, false) + val JSON = spdx("JSON License", "JSON", false, false) + val `LAL-1.2` = spdx("Licence Art Libre 1.2", "LAL-1.2", false, false) + val `LAL-1.3` = spdx("Licence Art Libre 1.3", "LAL-1.3", false, false) + val Latex2e = spdx("Latex2e License", "Latex2e", false, false) + val Leptonica = spdx("Leptonica License", "Leptonica", false, false) + val `LGPL-2.0-only` = spdx("GNU Library General Public License v2 only", "LGPL-2.0-only", true, false) + val `LGPL-2.0-or-later` = spdx("GNU Library General Public License v2 or later", "LGPL-2.0-or-later", true, false) + val `LGPL-2.1-only` = spdx("GNU Lesser General Public License v2.1 only", "LGPL-2.1-only", true, false) + val `LGPL-2.1-or-later` = spdx("GNU Lesser General Public License v2.1 or later", "LGPL-2.1-or-later", true, false) + val `LGPL-3.0-only` = spdx("GNU Lesser General Public License v3.0 only", "LGPL-3.0-only", true, false) + val `LGPL-3.0-or-later` = spdx("GNU Lesser General Public License v3.0 or later", "LGPL-3.0-or-later", true, false) + val LGPLLR = spdx("Lesser General Public License For Linguistic Resources", "LGPLLR", false, false) + val Libpng = spdx("libpng License", "Libpng", false, false) + val libtiff = spdx("libtiff License", "libtiff", false, false) + val `LiLiQ-P-1.1` = spdx("Licence Libre du Québec – Permissive version 1.1", "LiLiQ-P-1.1", true, false) + val `LiLiQ-R-1.1` = spdx("Licence Libre du Québec – Réciprocité version 1.1", "LiLiQ-R-1.1", true, false) + val `LiLiQ-Rplus-1.1` = spdx("Licence Libre du Québec – Réciprocité forte version 1.1", "LiLiQ-Rplus-1.1", true, false) + val `LPL-1.0` = spdx("Lucent Public License Version 1.0", "LPL-1.0", true, false) + val `LPL-1.02` = spdx("Lucent Public License v1.02", "LPL-1.02", true, true) + val `LPPL-1.0` = spdx("LaTeX Project Public License v1.0", "LPPL-1.0", false, false) + val `LPPL-1.1` = spdx("LaTeX Project Public License v1.1", "LPPL-1.1", false, false) + val `LPPL-1.2` = spdx("LaTeX Project Public License v1.2", "LPPL-1.2", false, true) + val `LPPL-1.3a` = spdx("LaTeX Project Public License v1.3a", "LPPL-1.3a", false, true) + val `LPPL-1.3c` = spdx("LaTeX Project Public License v1.3c", "LPPL-1.3c", true, false) + val MakeIndex = spdx("MakeIndex License", "MakeIndex", false, false) + val MirOS = spdx("MirOS License", "MirOS", true, false) + val `MIT-advertising` = spdx("Enlightenment License (e16)", "MIT-advertising", false, false) + val `MIT-CMU` = spdx("CMU License", "MIT-CMU", false, false) + val `MIT-enna` = spdx("enna License", "MIT-enna", false, false) + val `MIT-feh` = spdx("feh License", "MIT-feh", false, false) + val MIT = spdx("MIT License", "MIT", true, true) + val MITNFA = spdx("MIT +no-false-attribs license", "MITNFA", false, false) + val Motosoto = spdx("Motosoto License", "Motosoto", true, false) + val mpich2 = spdx("mpich2 License", "mpich2", false, false) + val `MPL-1.0` = spdx("Mozilla Public License 1.0", "MPL-1.0", true, false) + val `MPL-1.1` = spdx("Mozilla Public License 1.1", "MPL-1.1", true, true) + val `MPL-2.0-no-copyleft-exception` = spdx("Mozilla Public License 2.0 (no copyleft exception)", "MPL-2.0-no-copyleft-exception", true, false) + val `MPL-2.0` = spdx("Mozilla Public License 2.0", "MPL-2.0", true, true) + val `MS-PL` = spdx("Microsoft Public License", "MS-PL", true, true) + val `MS-RL` = spdx("Microsoft Reciprocal License", "MS-RL", true, true) + val MTLL = spdx("Matrix Template Library License", "MTLL", false, false) + val Multics = spdx("Multics License", "Multics", true, false) + val Mup = spdx("Mup License", "Mup", false, false) + val `NASA-1.3` = spdx("NASA Open Source Agreement 1.3", "NASA-1.3", true, false) + val Naumen = spdx("Naumen Public License", "Naumen", true, false) + val `NBPL-1.0` = spdx("Net Boolean Public License v1", "NBPL-1.0", false, false) + val NCSA = spdx("University of Illinois/NCSA Open Source License", "NCSA", true, true) + val `Net-SNMP` = spdx("Net-SNMP License", "Net-SNMP", false, false) + val NetCDF = spdx("NetCDF license", "NetCDF", false, false) + val Newsletr = spdx("Newsletr License", "Newsletr", false, false) + val NGPL = spdx("Nethack General Public License", "NGPL", true, false) + val `NLOD-1.0` = spdx("Norwegian Licence for Open Government Data", "NLOD-1.0", false, false) + val NLPL = spdx("No Limit Public License", "NLPL", false, false) + val Nokia = spdx("Nokia Open Source License", "Nokia", true, true) + val NOSL = spdx("Netizen Open Source License", "NOSL", false, true) + val Noweb = spdx("Noweb License", "Noweb", false, false) + val `NPL-1.0` = spdx("Netscape Public License v1.0", "NPL-1.0", false, true) + val `NPL-1.1` = spdx("Netscape Public License v1.1", "NPL-1.1", false, true) + val `NPOSL-3.0` = spdx("Non-Profit Open Software License 3.0", "NPOSL-3.0", true, false) + val NRL = spdx("NRL License", "NRL", false, false) + val NTP = spdx("NTP License", "NTP", true, false) + val `OCCT-PL` = spdx("Open CASCADE Technology Public License", "OCCT-PL", false, false) + val `OCLC-2.0` = spdx("OCLC Research Public License 2.0", "OCLC-2.0", true, false) + val `ODbL-1.0` = spdx("ODC Open Database License v1.0", "ODbL-1.0", false, true) + val `OFL-1.0` = spdx("SIL Open Font License 1.0", "OFL-1.0", false, false) + val `OFL-1.1` = spdx("SIL Open Font License 1.1", "OFL-1.1", true, true) + val OGTSL = spdx("Open Group Test Suite License", "OGTSL", true, false) + val `OLDAP-1.1` = spdx("Open LDAP Public License v1.1", "OLDAP-1.1", false, false) + val `OLDAP-1.2` = spdx("Open LDAP Public License v1.2", "OLDAP-1.2", false, false) + val `OLDAP-1.3` = spdx("Open LDAP Public License v1.3", "OLDAP-1.3", false, false) + val `OLDAP-1.4` = spdx("Open LDAP Public License v1.4", "OLDAP-1.4", false, false) + val `OLDAP-2.0.1` = spdx("Open LDAP Public License v2.0.1", "OLDAP-2.0.1", false, false) + val `OLDAP-2.0` = spdx("Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", "OLDAP-2.0", false, false) + val `OLDAP-2.1` = spdx("Open LDAP Public License v2.1", "OLDAP-2.1", false, false) + val `OLDAP-2.2.1` = spdx("Open LDAP Public License v2.2.1", "OLDAP-2.2.1", false, false) + val `OLDAP-2.2.2` = spdx("Open LDAP Public License 2.2.2", "OLDAP-2.2.2", false, false) + val `OLDAP-2.2` = spdx("Open LDAP Public License v2.2", "OLDAP-2.2", false, false) + val `OLDAP-2.3` = spdx("Open LDAP Public License v2.3", "OLDAP-2.3", false, true) + val `OLDAP-2.4` = spdx("Open LDAP Public License v2.4", "OLDAP-2.4", false, false) + val `OLDAP-2.5` = spdx("Open LDAP Public License v2.5", "OLDAP-2.5", false, false) + val `OLDAP-2.6` = spdx("Open LDAP Public License v2.6", "OLDAP-2.6", false, false) + val `OLDAP-2.7` = spdx("Open LDAP Public License v2.7", "OLDAP-2.7", false, true) + val `OLDAP-2.8` = spdx("Open LDAP Public License v2.8", "OLDAP-2.8", false, false) + val OML = spdx("Open Market License", "OML", false, false) + val OpenSSL = spdx("OpenSSL License", "OpenSSL", false, true) + val `OPL-1.0` = spdx("Open Public License v1.0", "OPL-1.0", false, false) + val `OSET-PL-2.1` = spdx("OSET Public License version 2.1", "OSET-PL-2.1", true, false) + val `OSL-1.0` = spdx("Open Software License 1.0", "OSL-1.0", true, true) + val `OSL-1.1` = spdx("Open Software License 1.1", "OSL-1.1", false, true) + val `OSL-2.0` = spdx("Open Software License 2.0", "OSL-2.0", true, true) + val `OSL-2.1` = spdx("Open Software License 2.1", "OSL-2.1", true, true) + val `OSL-3.0` = spdx("Open Software License 3.0", "OSL-3.0", true, true) + val `PDDL-1.0` = spdx("ODC Public Domain Dedication & License 1.0", "PDDL-1.0", false, false) + val `PHP-3.0` = spdx("PHP License v3.0", "PHP-3.0", true, false) + val `PHP-3.01` = spdx("PHP License v3.01", "PHP-3.01", false, true) + val Plexus = spdx("Plexus Classworlds License", "Plexus", false, false) + val PostgreSQL = spdx("PostgreSQL License", "PostgreSQL", true, false) + val psfrag = spdx("psfrag License", "psfrag", false, false) + val psutils = spdx("psutils License", "psutils", false, false) + val `Python-2.0` = spdx("Python License 2.0", "Python-2.0", true, true) + val Qhull = spdx("Qhull License", "Qhull", false, false) + val `QPL-1.0` = spdx("Q Public License 1.0", "QPL-1.0", true, true) + val Rdisc = spdx("Rdisc License", "Rdisc", false, false) + val `RHeCos-1.1` = spdx("Red Hat eCos Public License v1.1", "RHeCos-1.1", false, false) + val `RPL-1.1` = spdx("Reciprocal Public License 1.1", "RPL-1.1", true, false) + val `RPL-1.5` = spdx("Reciprocal Public License 1.5", "RPL-1.5", true, false) + val `RPSL-1.0` = spdx("RealNetworks Public Source License v1.0", "RPSL-1.0", true, true) + val `RSA-MD` = spdx("RSA Message-Digest License ", "RSA-MD", false, false) + val RSCPL = spdx("Ricoh Source Code Public License", "RSCPL", true, false) + val Ruby = spdx("Ruby License", "Ruby", false, true) + val `SAX-PD` = spdx("Sax Public Domain Notice", "SAX-PD", false, false) + val Saxpath = spdx("Saxpath License", "Saxpath", false, false) + val SCEA = spdx("SCEA Shared Source License", "SCEA", false, false) + val Sendmail = spdx("Sendmail License", "Sendmail", false, false) + val `SGI-B-1.0` = spdx("SGI Free Software License B v1.0", "SGI-B-1.0", false, false) + val `SGI-B-1.1` = spdx("SGI Free Software License B v1.1", "SGI-B-1.1", false, false) + val `SGI-B-2.0` = spdx("SGI Free Software License B v2.0", "SGI-B-2.0", false, true) + val `SimPL-2.0` = spdx("Simple Public License 2.0", "SimPL-2.0", true, false) + val `SISSL-1.2` = spdx("Sun Industry Standards Source License v1.2", "SISSL-1.2", false, false) + val SISSL = spdx("Sun Industry Standards Source License v1.1", "SISSL", true, false) + val Sleepycat = spdx("Sleepycat License", "Sleepycat", true, true) + val SMLNJ = spdx("Standard ML of New Jersey License", "SMLNJ", false, true) + val SMPPL = spdx("Secure Messaging Protocol Public License", "SMPPL", false, false) + val SNIA = spdx("SNIA Public License 1.1", "SNIA", false, false) + val `Spencer-86` = spdx("Spencer License 86", "Spencer-86", false, false) + val `Spencer-94` = spdx("Spencer License 94", "Spencer-94", false, false) + val `Spencer-99` = spdx("Spencer License 99", "Spencer-99", false, false) + val `SPL-1.0` = spdx("Sun Public License v1.0", "SPL-1.0", true, true) + val `SugarCRM-1.1.3` = spdx("SugarCRM Public License v1.1.3", "SugarCRM-1.1.3", false, false) + val SWL = spdx("Scheme Widget Library (SWL) Software License Agreement", "SWL", false, false) + val TCL = spdx("TCL/TK License", "TCL", false, false) + val `TCP-wrappers` = spdx("TCP Wrappers License", "TCP-wrappers", false, false) + val TMate = spdx("TMate Open Source License", "TMate", false, false) + val `TORQUE-1.1` = spdx("TORQUE v2.5+ Software License v1.1", "TORQUE-1.1", false, false) + val TOSL = spdx("Trusster Open Source License", "TOSL", false, false) + val `Unicode-DFS-2015` = spdx("Unicode License Agreement - Data Files and Software (2015)", "Unicode-DFS-2015", false, false) + val `Unicode-DFS-2016` = spdx("Unicode License Agreement - Data Files and Software (2016)", "Unicode-DFS-2016", false, false) + val `Unicode-TOU` = spdx("Unicode Terms of Use", "Unicode-TOU", false, false) + val Unlicense = spdx("The Unlicense", "Unlicense", false, true) + val `UPL-1.0` = spdx("Universal Permissive License v1.0", "UPL-1.0", true, true) + val Vim = spdx("Vim License", "Vim", false, true) + val VOSTROM = spdx("VOSTROM Public License for Open Source", "VOSTROM", false, false) + val `VSL-1.0` = spdx("Vovida Software License v1.0", "VSL-1.0", true, false) + val `W3C-19980720` = spdx("W3C Software Notice and License (1998-07-20)", "W3C-19980720", false, false) + val `W3C-20150513` = spdx("W3C Software Notice and Document License (2015-05-13)", "W3C-20150513", false, false) + val W3C = spdx("W3C Software Notice and License (2002-12-31)", "W3C", true, true) + val `Watcom-1.0` = spdx("Sybase Open Watcom Public License 1.0", "Watcom-1.0", true, false) + val Wsuipa = spdx("Wsuipa License", "Wsuipa", false, false) + val WTFPL = spdx("Do What The F*ck You Want To Public License", "WTFPL", false, true) + val X11 = spdx("X11 License", "X11", false, true) + val Xerox = spdx("Xerox License", "Xerox", false, false) + val `XFree86-1.1` = spdx("XFree86 License 1.1", "XFree86-1.1", false, true) + val xinetd = spdx("xinetd License", "xinetd", false, true) + val Xnet = spdx("X.Net License", "Xnet", true, false) + val xpp = spdx("XPP License", "xpp", false, false) + val XSkat = spdx("XSkat License", "XSkat", false, false) + val `YPL-1.0` = spdx("Yahoo! Public License v1.0", "YPL-1.0", false, false) + val `YPL-1.1` = spdx("Yahoo! Public License v1.1", "YPL-1.1", false, true) + val Zed = spdx("Zed License", "Zed", false, false) + val `Zend-2.0` = spdx("Zend License v2.0", "Zend-2.0", false, true) + val `Zimbra-1.3` = spdx("Zimbra Public License v1.3", "Zimbra-1.3", false, true) + val `Zimbra-1.4` = spdx("Zimbra Public License v1.4", "Zimbra-1.4", false, false) + val `zlib-acknowledgement` = spdx("zlib/libpng License with Acknowledgement", "zlib-acknowledgement", false, false) + val Zlib = spdx("zlib License", "Zlib", true, true) + val `ZPL-1.1` = spdx("Zope Public License 1.1", "ZPL-1.1", false, false) + val `ZPL-2.0` = spdx("Zope Public License 2.0", "ZPL-2.0", true, true) + val `ZPL-2.1` = spdx("Zope Public License 2.1", "ZPL-2.1", false, true) + val `AGPL-3.0` = spdx("GNU Affero General Public License v3.0", "AGPL-3.0", true, false) + val `eCos-2.0` = spdx("eCos license version 2.0", "eCos-2.0", false, false) + val `GFDL-1.1` = spdx("GNU Free Documentation License v1.1", "GFDL-1.1", false, false) + val `GFDL-1.2` = spdx("GNU Free Documentation License v1.2", "GFDL-1.2", false, false) + val `GFDL-1.3` = spdx("GNU Free Documentation License v1.3", "GFDL-1.3", false, false) + val `GPL-1.0+` = spdx("GNU General Public License v1.0 or later", "GPL-1.0+", false, false) + val `GPL-1.0` = spdx("GNU General Public License v1.0 only", "GPL-1.0", false, false) + val `GPL-2.0+` = spdx("GNU General Public License v2.0 or later", "GPL-2.0+", true, false) + val `GPL-2.0-with-autoconf-exception` = spdx("GNU General Public License v2.0 w/Autoconf exception", "GPL-2.0-with-autoconf-exception", false, false) + val `GPL-2.0-with-bison-exception` = spdx("GNU General Public License v2.0 w/Bison exception", "GPL-2.0-with-bison-exception", false, false) + val `GPL-2.0-with-classpath-exception` = spdx("GNU General Public License v2.0 w/Classpath exception", "GPL-2.0-with-classpath-exception", false, false) + val `GPL-2.0-with-font-exception` = spdx("GNU General Public License v2.0 w/Font exception", "GPL-2.0-with-font-exception", false, false) + val `GPL-2.0-with-GCC-exception` = spdx("GNU General Public License v2.0 w/GCC Runtime Library exception", "GPL-2.0-with-GCC-exception", false, false) + val `GPL-2.0` = spdx("GNU General Public License v2.0 only", "GPL-2.0", true, false) + val `GPL-3.0+` = spdx("GNU General Public License v3.0 or later", "GPL-3.0+", true, false) + val `GPL-3.0-with-autoconf-exception` = spdx("GNU General Public License v3.0 w/Autoconf exception", "GPL-3.0-with-autoconf-exception", false, false) + val `GPL-3.0-with-GCC-exception` = spdx("GNU General Public License v3.0 w/GCC Runtime Library exception", "GPL-3.0-with-GCC-exception", true, false) + val `GPL-3.0` = spdx("GNU General Public License v3.0 only", "GPL-3.0", true, false) + val `LGPL-2.0+` = spdx("GNU Library General Public License v2 or later", "LGPL-2.0+", true, false) + val `LGPL-2.0` = spdx("GNU Library General Public License v2 only", "LGPL-2.0", true, false) + val `LGPL-2.1+` = spdx("GNU Library General Public License v2 or later", "LGPL-2.1+", true, false) + val `LGPL-2.1` = spdx("GNU Lesser General Public License v2.1 only", "LGPL-2.1", true, false) + val `LGPL-3.0+` = spdx("GNU Lesser General Public License v3.0 or later", "LGPL-3.0+", true, false) + val `LGPL-3.0` = spdx("GNU Lesser General Public License v3.0 only", "LGPL-3.0", true, false) + val Nunit = spdx("Nunit License", "Nunit", false, false) + val `StandardML-NJ` = spdx("Standard ML of New Jersey License", "StandardML-NJ", false, false) + val wxWindows = spdx("wxWindows Library License", "wxWindows", false, false) + + private def spdx(fullName: String, id: String, isOsiApproved: Boolean, isFsfLibre: Boolean): License = + License(fullName, id, s"https://spdx.org/licenses/$id.html", isOsiApproved, isFsfLibre, "repo") + + val PublicDomain = License( + id = "Public Domain", + name = "Public Domain", + url = "https://creativecommons.org/publicdomain/zero/1.0/", + isOsiApproved = true, // sort of: https://opensource.org/faq#public-domain + isFsfLibre = true, // I'm not sure about this + distribution = "repo" + ) + + val Scala = License( + id = "Scala License", + name = "Scala License", + url = "http://www.scala-lang.org/license.html", + isOsiApproved = false, + isFsfLibre = false, + distribution = "repo" + ) + + val TypesafeSubscriptionAgreement = License( + id = "Typesafe Subscription Agreement", + name = "Typesafe Subscription Agreement", + url = "http://downloads.typesafe.com/website/legal/TypesafeSubscriptionAgreement.pdf", + isOsiApproved = false, + isFsfLibre = false, + distribution = "repo" + ) + + // https://github.com/sbt/sbt/issues/1937#issuecomment-214963983 + object Common { + val Apache2 = License.`Apache-2.0` + val MIT = License.MIT + val BSD4 = License.`BSD-4-Clause` + val Typesafe = License.TypesafeSubscriptionAgreement + val BSD3 = License.`BSD-3-Clause` + } +} \ No newline at end of file diff --git a/scalalib/src/publish/LocalPublisher.scala b/scalalib/src/publish/LocalPublisher.scala new file mode 100644 index 00000000..d9839831 --- /dev/null +++ b/scalalib/src/publish/LocalPublisher.scala @@ -0,0 +1,32 @@ +package mill.scalalib.publish + + +object LocalPublisher { + + private val root: os.Path = os.home / ".ivy2" / "local" + + def publish(jar: os.Path, + sourcesJar: os.Path, + docJar: os.Path, + pom: os.Path, + ivy: os.Path, + artifact: Artifact): Unit = { + val releaseDir = root / artifact.group / artifact.id / artifact.version + writeFiles( + jar -> releaseDir / "jars" / s"${artifact.id}.jar", + sourcesJar -> releaseDir / "srcs" / s"${artifact.id}-sources.jar", + docJar -> releaseDir / "docs" / s"${artifact.id}-javadoc.jar", + pom -> releaseDir / "poms" / s"${artifact.id}.pom", + ivy -> releaseDir / "ivys" / "ivy.xml" + ) + } + + private def writeFiles(fromTo: (os.Path, os.Path)*): Unit = { + fromTo.foreach { + case (from, to) => + os.makeDir.all(to / os.up) + os.copy.over(from, to) + } + } + +} diff --git a/scalalib/src/publish/Pom.scala b/scalalib/src/publish/Pom.scala new file mode 100644 index 00000000..57a0e196 --- /dev/null +++ b/scalalib/src/publish/Pom.scala @@ -0,0 +1,117 @@ +package mill.scalalib.publish + +import mill.util.Loose.Agg + +import scala.xml.{Atom, Elem, NodeSeq, PrettyPrinter} + +object Pom { + + val head = "\n" + + implicit class XmlOps(val e: Elem) extends AnyVal { + // source: https://stackoverflow.com/a/5254068/449071 + def optional : NodeSeq = { + require(e.child.length == 1) + e.child.head match { + case atom: Atom[Option[_]] => atom.data match { + case None => NodeSeq.Empty + case Some(x) => e.copy(child = x match { + case n: NodeSeq => n + case x => new Atom(x) + }) + } + case _ => e + } + } + } + + //TODO - not only jar packaging support? + def apply(artifact: Artifact, + dependencies: Agg[Dependency], + name: String, + pomSettings: PomSettings): String = { + val xml = + + + 4.0.0 + {name} + {artifact.group} + {artifact.id} + jar + {pomSettings.description} + + {artifact.version} + {pomSettings.url} + + {pomSettings.licenses.map(renderLicense)} + + + { {pomSettings.versionControl.connection}.optional } + { {pomSettings.versionControl.developerConnection}.optional } + { {pomSettings.versionControl.tag}.optional } + { {pomSettings.versionControl.browsableRepository}.optional } + + + {pomSettings.developers.map(renderDeveloper)} + + + {dependencies.map(renderDependency).toSeq} + + + + val pp = new PrettyPrinter(120, 4) + head + pp.format(xml) + } + + private def renderLicense(l: License): Elem = { + + {l.name} + {l.url} + {l.distribution} + + } + + private def renderDeveloper(d: Developer): Elem = { + + {d.id} + {d.name} + { {d.organization}.optional } + { {d.organizationUrl}.optional } + + } + + private def renderDependency(d: Dependency): Elem = { + val scope = d.scope match { + case Scope.Compile => NodeSeq.Empty + case Scope.Provided => provided + case Scope.Test => test + case Scope.Runtime => runtime + } + if (d.exclusions.isEmpty) + + {d.artifact.group} + {d.artifact.id} + {d.artifact.version} + {scope} + + else + + {d.artifact.group} + {d.artifact.id} + {d.artifact.version} + + {d.exclusions.map(ex => + + {ex._1} + {ex._2} + + )} + + {scope} + + } + +} diff --git a/scalalib/src/publish/SonatypeHttpApi.scala b/scalalib/src/publish/SonatypeHttpApi.scala new file mode 100644 index 00000000..12defa93 --- /dev/null +++ b/scalalib/src/publish/SonatypeHttpApi.scala @@ -0,0 +1,134 @@ +package mill.scalalib.publish + +import java.util.Base64 + + + +import scala.concurrent.duration._ +import scalaj.http.{BaseHttp, HttpOptions, HttpRequest, HttpResponse} + +object PatientHttp + extends BaseHttp( + options = Seq( + HttpOptions.connTimeout(5.seconds.toMillis.toInt), + HttpOptions.readTimeout(1.minute.toMillis.toInt), + HttpOptions.followRedirects(false) + ) + ) + +class SonatypeHttpApi(uri: String, credentials: String) { + + private val base64Creds = base64(credentials) + + private val commonHeaders = Seq( + "Authorization" -> s"Basic $base64Creds", + "Accept" -> "application/json", + "Content-Type" -> "application/json" + ) + + // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles.html + def getStagingProfileUri(groupId: String): String = { + val response = withRetry( + PatientHttp(s"$uri/staging/profiles").headers(commonHeaders)) + .throwError + + val resourceUri = + ujson + .read(response.body)("data") + .arr + .find(profile => + groupId.split('.').startsWith(profile("name").str.split('.'))) + .map(_("resourceURI").str.toString) + + resourceUri.getOrElse( + throw new RuntimeException( + s"Could not find staging profile for groupId: ${groupId}") + ) + } + + def getStagingRepoState(stagingRepoId: String): String = { + val response = PatientHttp(s"${uri}/staging/repository/${stagingRepoId}") + .option(HttpOptions.readTimeout(60000)) + .headers(commonHeaders) + .asString + .throwError + + ujson.read(response.body)("type").str.toString + } + + // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_start.html + def createStagingRepo(profileUri: String, groupId: String): String = { + val response = withRetry(PatientHttp(s"${profileUri}/start") + .headers(commonHeaders) + .postData( + s"""{"data": {"description": "fresh staging profile for ${groupId}"}}""")) + .throwError + + ujson.read(response.body)("data")("stagedRepositoryId").str.toString + } + + // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_finish.html + def closeStagingRepo(profileUri: String, repositoryId: String): Boolean = { + val response = withRetry( + PatientHttp(s"${profileUri}/finish") + .headers(commonHeaders) + .postData( + s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "closing staging repository"}}""" + )) + + response.code == 201 + } + + // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_promote.html + def promoteStagingRepo(profileUri: String, repositoryId: String): Boolean = { + val response = withRetry( + PatientHttp(s"${profileUri}/promote") + .headers(commonHeaders) + .postData( + s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "promote staging repository"}}""" + )) + + response.code == 201 + } + + // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_drop.html + def dropStagingRepo(profileUri: String, repositoryId: String): Boolean = { + val response = withRetry( + PatientHttp(s"${profileUri}/drop") + .headers(commonHeaders) + .postData( + s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "drop staging repository"}}""" + )) + + response.code == 201 + } + + private val uploadTimeout = 5.minutes.toMillis.toInt + + def upload(uri: String, data: Array[Byte]): HttpResponse[String] = { + PatientHttp(uri) + .option(HttpOptions.readTimeout(uploadTimeout)) + .method("PUT") + .headers( + "Content-Type" -> "application/binary", + "Authorization" -> s"Basic ${base64Creds}" + ) + .put(data) + .asString + } + + private def withRetry(request: HttpRequest, + retries: Int = 10): HttpResponse[String] = { + val resp = request.asString + if (resp.is5xx && retries > 0) { + Thread.sleep(500) + withRetry(request, retries - 1) + } else { + resp + } + } + + private def base64(s: String) = + new String(Base64.getEncoder.encode(s.getBytes)) + +} diff --git a/scalalib/src/publish/SonatypePublisher.scala b/scalalib/src/publish/SonatypePublisher.scala new file mode 100644 index 00000000..1843943b --- /dev/null +++ b/scalalib/src/publish/SonatypePublisher.scala @@ -0,0 +1,164 @@ +package mill.scalalib.publish + +import java.math.BigInteger +import java.security.MessageDigest + +import mill.api.Logger + +import scalaj.http.HttpResponse + +class SonatypePublisher(uri: String, + snapshotUri: String, + credentials: String, + gpgPassphrase: Option[String], + signed: Boolean, + log: Logger) { + + private val api = new SonatypeHttpApi(uri, credentials) + + def publish(fileMapping: Seq[(os.Path, String)], artifact: Artifact, release: Boolean): Unit = { + publishAll(release, fileMapping -> artifact) + } + def publishAll(release: Boolean, artifacts: (Seq[(os.Path, String)], Artifact)*): Unit = { + + val mappings = for ((fileMapping0, artifact) <- artifacts) yield { + val publishPath = Seq( + artifact.group.replace(".", "/"), + artifact.id, + artifact.version + ).mkString("/") + val fileMapping = fileMapping0.map{ case (file, name) => (file, publishPath+"/"+name) } + + val signedArtifacts = if (signed) fileMapping.map { + case (file, name) => poorMansSign(file, gpgPassphrase) -> s"$name.asc" + } else Seq() + + artifact -> (fileMapping ++ signedArtifacts).flatMap { + case (file, name) => + val content = os.read.bytes(file) + + Seq( + name -> content, + (name + ".md5") -> md5hex(content), + (name + ".sha1") -> sha1hex(content) + ) + } + } + + val (snapshots, releases) = mappings.partition(_._1.isSnapshot) + if(snapshots.nonEmpty) { + publishSnapshot(snapshots.flatMap(_._2), snapshots.map(_._1)) + } + val releaseGroups = releases.groupBy(_._1.group) + for((group, groupReleases) <- releaseGroups){ + publishRelease(release, groupReleases.flatMap(_._2), group, releases.map(_._1)) + } + } + + private def publishSnapshot(payloads: Seq[(String, Array[Byte])], + artifacts: Seq[Artifact]): Unit = { + + val publishResults = payloads.map { + case (fileName, data) => + log.info(s"Uploading $fileName") + val resp = api.upload(s"$snapshotUri/$fileName", data) + resp + } + reportPublishResults(publishResults, artifacts) + } + + private def publishRelease(release: Boolean, + payloads: Seq[(String, Array[Byte])], + stagingProfile: String, + artifacts: Seq[Artifact]): Unit = { + val profileUri = api.getStagingProfileUri(stagingProfile) + val stagingRepoId = + api.createStagingRepo(profileUri, stagingProfile) + val baseUri = s"$uri/staging/deployByRepositoryId/$stagingRepoId/" + + val publishResults = payloads.map { + case (fileName, data) => + log.info(s"Uploading ${fileName}") + api.upload(s"$baseUri/$fileName", data) + } + reportPublishResults(publishResults, artifacts) + + if (release) { + log.info("Closing staging repository") + api.closeStagingRepo(profileUri, stagingRepoId) + + log.info("Waiting for staging repository to close") + awaitRepoStatus("closed", stagingRepoId) + + log.info("Promoting staging repository") + api.promoteStagingRepo(profileUri, stagingRepoId) + + log.info("Waiting for staging repository to release") + awaitRepoStatus("released", stagingRepoId) + + log.info("Dropping staging repository") + api.dropStagingRepo(profileUri, stagingRepoId) + + log.info(s"Published ${artifacts.map(_.id).mkString(", ")} successfully") + } + } + + private def reportPublishResults(publishResults: Seq[HttpResponse[String]], + artifacts: Seq[Artifact]) = { + if (publishResults.forall(_.is2xx)) { + log.info(s"Published ${artifacts.map(_.id).mkString(", ")} to Sonatype") + } else { + val errors = publishResults.filterNot(_.is2xx).map { response => + s"Code: ${response.code}, message: ${response.body}" + } + throw new RuntimeException( + s"Failed to publish ${artifacts.map(_.id).mkString(", ")} to Sonatype. Errors: \n${errors.mkString("\n")}" + ) + } + } + + private def awaitRepoStatus(status: String, + stagingRepoId: String, + attempts: Int = 20): Unit = { + def isRightStatus = + api.getStagingRepoState(stagingRepoId).equalsIgnoreCase(status) + var attemptsLeft = attempts + + while (attemptsLeft > 0 && !isRightStatus) { + Thread.sleep(3000) + attemptsLeft -= 1 + if (attemptsLeft == 0) { + throw new RuntimeException( + s"Couldn't wait for staging repository to be ${status}. Failing") + } + } + } + + // http://central.sonatype.org/pages/working-with-pgp-signatures.html#signing-a-file + private def poorMansSign(file: os.Path, maybePassphrase: Option[String]): os.Path = { + val fileName = file.toString + maybePassphrase match { + case Some(passphrase) => + os.proc("gpg", "--passphrase", passphrase, "--batch", "--yes", "-a", "-b", fileName) + .call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) + case None => + os.proc("gpg", "--batch", "--yes", "-a", "-b", fileName) + .call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) + } + os.Path(fileName + ".asc") + } + + private def md5hex(bytes: Array[Byte]): Array[Byte] = + hexArray(md5.digest(bytes)).getBytes + + private def sha1hex(bytes: Array[Byte]): Array[Byte] = + hexArray(sha1.digest(bytes)).getBytes + + private def md5 = MessageDigest.getInstance("md5") + + private def sha1 = MessageDigest.getInstance("sha1") + + private def hexArray(arr: Array[Byte]) = + String.format("%0" + (arr.length << 1) + "x", new BigInteger(1, arr)) + +} diff --git a/scalalib/src/publish/VersionControl.scala b/scalalib/src/publish/VersionControl.scala new file mode 100644 index 00000000..aad38ac3 --- /dev/null +++ b/scalalib/src/publish/VersionControl.scala @@ -0,0 +1,131 @@ +package mill.scalalib.publish + +// https://maven.apache.org/pom.html#SCM +/* + * @param browsableRepository: a publicly browsable repository + * (example: https://github.com/lihaoyi/mill) + * @param connection: read-only connection to repository + * (example: scm:git:git://github.com/lihaoyi/mill.git) + * @param developerConnection: read-write connection to repository + * (example: scm:git:git@github.com:lihaoyi/mill.git) + * @param tag: tag that was created for this release. This is useful for + * git and mercurial since it's not possible to include the tag in + * the connection url. + * (example: v2.12.4, HEAD, my-branch, fd8a2567ad32c11bcf8adbaca85bdba72bb4f935, ...) + */ +case class VersionControl( + browsableRepository: Option[String] = None, + connection: Option[String] = None, + developerConnection: Option[String] = None, + tag: Option[String] = None +) + +@deprecated("use VersionControl", "0.1.3") +case class SCM( + url: String, + connection: String +) + +object VersionControl { + def github(owner: String, repo: String, tag: Option[String] = None): VersionControl = + VersionControl( + browsableRepository = Some(s"https://github.com/$owner/$repo"), + connection = Some(VersionControlConnection.gitGit("github.com", s"$owner/$repo.git")), + developerConnection = Some(VersionControlConnection.gitSsh("github.com", s":$owner/$repo.git", username = Some("git"))), + tag = tag + ) + def gitlab(owner: String, repo: String, tag: Option[String] = None): VersionControl = + VersionControl( + browsableRepository = Some(s"https://gitlab.com/$owner/$repo"), + connection = Some(VersionControlConnection.gitGit("gitlab.com", s"$owner/$repo.git")), + developerConnection = Some(VersionControlConnection.gitSsh("gitlab.com", s":$owner/$repo.git", username = Some("git"))), + tag = tag + ) +} + +object VersionControlConnection { + def network(scm: String, + protocol: String, + hostname: String, + path: String, + username: Option[String] = None, + password: Option[String] = None, + port: Option[Int] = None): String = { + val portPart = port.map(":" + _).getOrElse("") + val credentials = + username match { + case Some(user) => + val pass = password.map(":" + _).getOrElse("") + s"${user}${pass}@" + case None => + password match { + case Some(p) => sys.error(s"no username set for password: $p") + case _ => "" + } + } + + val path0 = + if(path.startsWith(":") || path.startsWith("/")) path + else "/" + path + + s"scm:${scm}:${protocol}://${credentials}${hostname}${portPart}${path0}" + } + + def file(scm: String, path: String): String = { + s"scm:$scm:file://$path" + } + + def gitGit(hostname: String, + path: String = "", + port: Option[Int] = None): String = + network("git", "git", hostname, path, port = port) + + def gitHttp(hostname: String, + path: String = "", + port: Option[Int] = None): String = + network("git", "http", hostname, path, port = port) + + def gitHttps(hostname: String, + path: String = "", + port: Option[Int] = None): String = + network("git", "https", hostname, path, port = port) + + def gitSsh(hostname: String, + path: String = "", + username: Option[String] = None, + port: Option[Int] = None): String = + network("git", "ssh", hostname, path, username = username, port = port) + + def gitFile(path: String): String = + file("git", path) + + def svnSsh(hostname: String, + path: String = "", + username: Option[String] = None, + port: Option[Int] = None): String = + network("svn", "svn+ssh", hostname, path, username, None, port) + + def svnHttp(hostname: String, + path: String = "", + username: Option[String] = None, + password: Option[String] = None, + port: Option[Int] = None): String = + network("svn", "http", hostname, path, username, password, port) + + def svnHttps(hostname: String, + path: String = "", + username: Option[String] = None, + password: Option[String] = None, + port: Option[Int] = None): String = + network("svn", "https", hostname, path, username, password, port) + + def svnSvn(hostname: String, + path: String = "", + username: Option[String] = None, + password: Option[String] = None, + port: Option[Int] = None): String = + network("svn", "svn", hostname, path, username, password, port) + + def svnFile(path: String): String = + file("svn", path) +} \ No newline at end of file diff --git a/scalalib/src/publish/package.scala b/scalalib/src/publish/package.scala new file mode 100644 index 00000000..99eeec14 --- /dev/null +++ b/scalalib/src/publish/package.scala @@ -0,0 +1,3 @@ +package mill.scalalib + +package object publish extends JsonFormatters diff --git a/scalalib/src/publish/settings.scala b/scalalib/src/publish/settings.scala new file mode 100644 index 00000000..bca81cf0 --- /dev/null +++ b/scalalib/src/publish/settings.scala @@ -0,0 +1,91 @@ +package mill.scalalib.publish + +import mill.scalalib.Dep + +case class Artifact(group: String, id: String, version: String) { + def isSnapshot: Boolean = version.endsWith("-SNAPSHOT") +} + +object Artifact { + def fromDepJava(dep: Dep) = { + assert(dep.cross.isConstant, s"Not a Java dependency: $dep") + fromDep(dep, "", "", "") + } + + def fromDep(dep: Dep, + scalaFull: String, + scalaBin: String, + platformSuffix: String): Dependency = { + val name = dep.artifactName( + binaryVersion = scalaBin, + fullVersion = scalaFull, + platformSuffix = platformSuffix + ) + Dependency( + Artifact( + dep.dep.module.organization, + name, + dep.dep.version + ), + Scope.Compile, + if (dep.dep.configuration == "") None else Some(dep.dep.configuration), + dep.dep.exclusions.toList + ) + } +} + +sealed trait Scope +object Scope { + case object Compile extends Scope + case object Provided extends Scope + case object Runtime extends Scope + case object Test extends Scope +} + +case class Dependency( + artifact: Artifact, + scope: Scope, + configuration: Option[String] = None, + exclusions: Seq[(String, String)] = Nil +) + +case class Developer( + id: String, + name: String, + url: String, + organization: Option[String] = None, + organizationUrl: Option[String] = None +) + +case class PomSettings( + description: String, + organization: String, + url: String, + licenses: Seq[License], + versionControl: VersionControl, + developers: Seq[Developer] +) + +object PomSettings { + @deprecated("use VersionControl instead of SCM", "0.1.3") + def apply(description: String, + organization: String, + url: String, + licenses: Seq[License], + scm: SCM, + developers: Seq[Developer]): PomSettings = { + PomSettings( + description = description, + organization = organization, + url = url, + licenses = licenses, + versionControl = VersionControl( + browsableRepository = Some(scm.url), + connection = Some(scm.connection), + developerConnection = None, + tag = None + ), + developers = developers + ) + } +} diff --git a/scalalib/src/scalafmt/ScalafmtModule.scala b/scalalib/src/scalafmt/ScalafmtModule.scala new file mode 100644 index 00000000..6a81d975 --- /dev/null +++ b/scalalib/src/scalafmt/ScalafmtModule.scala @@ -0,0 +1,57 @@ +package mill.scalalib.scalafmt + +import mill._ +import mill.define._ +import mill.scalalib._ + +trait ScalafmtModule extends JavaModule { + + def reformat(): Command[Unit] = T.command { + ScalafmtWorkerModule + .worker() + .reformat( + filesToFormat(sources()), + scalafmtConfig().head, + scalafmtDeps().map(_.path) + ) + } + + def scalafmtVersion: T[String] = "1.5.1" + + def scalafmtConfig: Sources = T.sources(os.pwd / ".scalafmt.conf") + + def scalafmtDeps: T[Agg[PathRef]] = T { + Lib.resolveDependencies( + zincWorker.repositories, + Lib.depToDependency(_, "2.12.4"), + Seq(ivy"com.geirsson::scalafmt-cli:${scalafmtVersion()}") + ) + } + + protected def filesToFormat(sources: Seq[PathRef]) = { + for { + pathRef <- sources if os.exists(pathRef.path) + file <- os.walk(pathRef.path) if os.isFile(file) && file.ext == "scala" + } yield PathRef(file) + } + +} + +object ScalafmtModule extends ExternalModule with ScalafmtModule { + + def reformatAll(sources: mill.main.Tasks[Seq[PathRef]]): Command[Unit] = + T.command { + val files = Task.sequence(sources.value)().flatMap(filesToFormat) + ScalafmtWorkerModule + .worker() + .reformat( + files, + scalafmtConfig().head, + scalafmtDeps().map(_.path) + ) + } + + implicit def millScoptTargetReads[T] = new mill.main.Tasks.Scopt[T]() + + lazy val millDiscover = Discover[this.type] +} diff --git a/scalalib/src/scalafmt/ScalafmtWorker.scala b/scalalib/src/scalafmt/ScalafmtWorker.scala new file mode 100644 index 00000000..47d8375f --- /dev/null +++ b/scalalib/src/scalafmt/ScalafmtWorker.scala @@ -0,0 +1,57 @@ +package mill.scalalib.scalafmt + +import mill._ +import mill.define.{Discover, ExternalModule, Worker} +import mill.modules.Jvm +import mill.api.Ctx + +import scala.collection.mutable + +object ScalafmtWorkerModule extends ExternalModule { + def worker: Worker[ScalafmtWorker] = T.worker { new ScalafmtWorker() } + + lazy val millDiscover = Discover[this.type] +} + +private[scalafmt] class ScalafmtWorker { + private val reformatted: mutable.Map[os.Path, Int] = mutable.Map.empty + private var configSig: Int = 0 + + def reformat(input: Seq[PathRef], + scalafmtConfig: PathRef, + scalafmtClasspath: Agg[os.Path])(implicit ctx: Ctx): Unit = { + val toFormat = + if (scalafmtConfig.sig != configSig) input + else + input.filterNot(ref => reformatted.get(ref.path).contains(ref.sig)) + + if (toFormat.nonEmpty) { + ctx.log.info(s"Formatting ${toFormat.size} Scala sources") + reformatAction(toFormat.map(_.path), + scalafmtConfig.path, + scalafmtClasspath) + reformatted ++= toFormat.map { ref => + val updRef = PathRef(ref.path) + updRef.path -> updRef.sig + } + configSig = scalafmtConfig.sig + } else { + ctx.log.info(s"Everything is formatted already") + } + } + + private val cliFlags = Seq("--non-interactive", "--quiet") + + private def reformatAction(toFormat: Seq[os.Path], + config: os.Path, + classpath: Agg[os.Path])(implicit ctx: Ctx) = { + val configFlags = + if (os.exists(config)) Seq("--config", config.toString) else Seq.empty + Jvm.runSubprocess( + "org.scalafmt.cli.Cli", + classpath, + mainArgs = toFormat.map(_.toString) ++ configFlags ++ cliFlags + ) + } + +} diff --git a/scalalib/test/resources/hello-java/app/src/Main.java b/scalalib/test/resources/hello-java/app/src/Main.java new file mode 100644 index 00000000..23ddd679 --- /dev/null +++ b/scalalib/test/resources/hello-java/app/src/Main.java @@ -0,0 +1,10 @@ +package hello; + +public class Main{ + public static String getMessage(String[] args){ + return Core.msg() + " " + args[0]; + } + public static void main(String[] args){ + System.out.println(getMessage(args)); + } +} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/app/src/hello/Main.java b/scalalib/test/resources/hello-java/app/src/hello/Main.java deleted file mode 100644 index 23ddd679..00000000 --- a/scalalib/test/resources/hello-java/app/src/hello/Main.java +++ /dev/null @@ -1,10 +0,0 @@ -package hello; - -public class Main{ - public static String getMessage(String[] args){ - return Core.msg() + " " + args[0]; - } - public static void main(String[] args){ - System.out.println(getMessage(args)); - } -} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/app/test/src/MyAppTests.java b/scalalib/test/resources/hello-java/app/test/src/MyAppTests.java new file mode 100644 index 00000000..df0d0351 --- /dev/null +++ b/scalalib/test/resources/hello-java/app/test/src/MyAppTests.java @@ -0,0 +1,18 @@ +package hello; + +import static org.junit.Assert.assertEquals; +import org.junit.Test; + +public class MyAppTests { + + @Test + public void coreTest() { + assertEquals(Core.msg(), "Hello World"); + } + + @Test + public void appTest() { + assertEquals(Main.getMessage(new String[]{"lols"}), "Hello World lols"); + } + +} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/app/test/src/hello/MyAppTests.java b/scalalib/test/resources/hello-java/app/test/src/hello/MyAppTests.java deleted file mode 100644 index df0d0351..00000000 --- a/scalalib/test/resources/hello-java/app/test/src/hello/MyAppTests.java +++ /dev/null @@ -1,18 +0,0 @@ -package hello; - -import static org.junit.Assert.assertEquals; -import org.junit.Test; - -public class MyAppTests { - - @Test - public void coreTest() { - assertEquals(Core.msg(), "Hello World"); - } - - @Test - public void appTest() { - assertEquals(Main.getMessage(new String[]{"lols"}), "Hello World lols"); - } - -} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/core/src/Core.java b/scalalib/test/resources/hello-java/core/src/Core.java new file mode 100644 index 00000000..3ecb1f61 --- /dev/null +++ b/scalalib/test/resources/hello-java/core/src/Core.java @@ -0,0 +1,7 @@ +package hello; + +public class Core{ + public static String msg(){ + return "Hello World"; + } +} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/core/src/hello/Core.java b/scalalib/test/resources/hello-java/core/src/hello/Core.java deleted file mode 100644 index 3ecb1f61..00000000 --- a/scalalib/test/resources/hello-java/core/src/hello/Core.java +++ /dev/null @@ -1,7 +0,0 @@ -package hello; - -public class Core{ - public static String msg(){ - return "Hello World"; - } -} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/core/test/src/MyCoreTests.java b/scalalib/test/resources/hello-java/core/test/src/MyCoreTests.java new file mode 100644 index 00000000..38bebaeb --- /dev/null +++ b/scalalib/test/resources/hello-java/core/test/src/MyCoreTests.java @@ -0,0 +1,15 @@ +package hello; + +import static org.junit.Assert.assertEquals; +import org.junit.Test; + +public class MyCoreTests { + @Test + public void msgTest() { + assertEquals(Core.msg(), "Hello World!!"); + } + @Test + public void lengthTest() { + assertEquals(Core.msg().length(), 11); + } +} \ No newline at end of file diff --git a/scalalib/test/resources/hello-java/core/test/src/hello/MyCoreTests.java b/scalalib/test/resources/hello-java/core/test/src/hello/MyCoreTests.java deleted file mode 100644 index 38bebaeb..00000000 --- a/scalalib/test/resources/hello-java/core/test/src/hello/MyCoreTests.java +++ /dev/null @@ -1,15 +0,0 @@ -package hello; - -import static org.junit.Assert.assertEquals; -import org.junit.Test; - -public class MyCoreTests { - @Test - public void msgTest() { - assertEquals(Core.msg(), "Hello World!!"); - } - @Test - public void lengthTest() { - assertEquals(Core.msg().length(), 11); - } -} \ No newline at end of file diff --git a/scalalib/test/src/GenIdeaTests.scala b/scalalib/test/src/GenIdeaTests.scala new file mode 100644 index 00000000..50db95c0 --- /dev/null +++ b/scalalib/test/src/GenIdeaTests.scala @@ -0,0 +1,62 @@ +package mill.scalalib + +import coursier.Cache +import mill._ +import mill.util.{TestEvaluator, TestUtil} +import utest._ + +object GenIdeaTests extends TestSuite { + + val millSourcePath = os.pwd / 'target / 'workspace / "gen-idea" + + trait HelloWorldModule extends scalalib.ScalaModule { + def scalaVersion = "2.12.4" + def millSourcePath = GenIdeaTests.millSourcePath + object test extends super.Tests { + def testFrameworks = Seq("utest.runner.Framework") + } + } + + object HelloWorld extends TestUtil.BaseModule with HelloWorldModule + + val helloWorldEvaluator = TestEvaluator.static(HelloWorld) + + def tests: Tests = Tests { + 'genIdeaTests - { + val pp = new scala.xml.PrettyPrinter(999, 4) + + val layout = GenIdeaImpl.xmlFileLayout( + helloWorldEvaluator.evaluator, + HelloWorld, + ("JDK_1_8", "1.8 (1)"), fetchMillModules = false) + for((relPath, xml) <- layout){ + os.write.over(millSourcePath/ "generated"/ relPath, pp.format(xml), createFolders = true) + } + + Seq( + "gen-idea/idea_modules/iml" -> + millSourcePath / "generated" / ".idea_modules" /".iml", + "gen-idea/idea_modules/test.iml" -> + millSourcePath / "generated" / ".idea_modules" /"test.iml", + "gen-idea/idea_modules/mill-build.iml" -> + millSourcePath / "generated" / ".idea_modules" /"mill-build.iml", + "gen-idea/idea/libraries/scala-library-2.12.4.jar.xml" -> + millSourcePath / "generated" / ".idea" / "libraries" / "scala-library-2.12.4.jar.xml", + "gen-idea/idea/modules.xml" -> + millSourcePath / "generated" / ".idea" / "modules.xml", + "gen-idea/idea/misc.xml" -> + millSourcePath / "generated" / ".idea" / "misc.xml" + ).foreach { case (resource, generated) => + val resourceString = scala.io.Source.fromResource(resource).getLines().mkString("\n") + val generatedString = normaliseLibraryPaths(os.read(generated)) + + assert(resourceString == generatedString) + } + } + } + + + private def normaliseLibraryPaths(in: String): String = { + in.replaceAll(Cache.default.toPath.toAbsolutePath.toString, "COURSIER_HOME") + } +} diff --git a/scalalib/test/src/HelloJavaTests.scala b/scalalib/test/src/HelloJavaTests.scala new file mode 100644 index 00000000..02c2567f --- /dev/null +++ b/scalalib/test/src/HelloJavaTests.scala @@ -0,0 +1,114 @@ +package mill +package scalalib + + +import mill.api.Result +import mill.util.{TestEvaluator, TestUtil} +import utest._ +import utest.framework.TestPath + + +object HelloJavaTests extends TestSuite { + + object HelloJava extends TestUtil.BaseModule{ + def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + trait JUnitTests extends TestModule{ + def testFrameworks = Seq("com.novocode.junit.JUnitFramework") + def ivyDeps = Agg(ivy"com.novocode:junit-interface:0.11") + } + + object core extends JavaModule{ + object test extends Tests with JUnitTests + } + object app extends JavaModule{ + def moduleDeps = Seq(core) + object test extends Tests with JUnitTests + } + } + val resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-java" + + def init()(implicit tp: TestPath) = { + val eval = new TestEvaluator(HelloJava) + os.remove.all(HelloJava.millSourcePath) + os.remove.all(eval.outPath) + os.makeDir.all(HelloJava.millSourcePath / os.up) + os.copy(resourcePath, HelloJava.millSourcePath) + eval + } + def tests: Tests = Tests { + 'compile - { + val eval = init() + + val Right((res1, n1)) = eval.apply(HelloJava.core.compile) + val Right((res2, 0)) = eval.apply(HelloJava.core.compile) + val Right((res3, n2)) = eval.apply(HelloJava.app.compile) + + assert( + res1 == res2, + n1 != 0, + n2 != 0, + os.walk(res1.classes.path).exists(_.last == "Core.class"), + !os.walk(res1.classes.path).exists(_.last == "Main.class"), + os.walk(res3.classes.path).exists(_.last == "Main.class"), + !os.walk(res3.classes.path).exists(_.last == "Core.class") + ) + } + 'docJar - { + val eval = init() + + val Right((ref1, _)) = eval.apply(HelloJava.core.docJar) + val Right((ref2, _)) = eval.apply(HelloJava.app.docJar) + + assert( + os.proc("jar", "tf", ref1.path).call().out.lines.contains("hello/Core.html"), + os.proc("jar", "tf", ref2.path).call().out.lines.contains("hello/Main.html") + ) + } + 'test - { + val eval = init() + + val Left(Result.Failure(ref1, Some(v1))) = eval.apply(HelloJava.core.test.test()) + + assert( + v1._2(0).fullyQualifiedName == "hello.MyCoreTests.lengthTest", + v1._2(0).status == "Success", + v1._2(1).fullyQualifiedName == "hello.MyCoreTests.msgTest", + v1._2(1).status == "Failure" + ) + + val Right((v2, _)) = eval.apply(HelloJava.app.test.test()) + + assert( + v2._2(0).fullyQualifiedName == "hello.MyAppTests.appTest", + v2._2(0).status == "Success", + v2._2(1).fullyQualifiedName == "hello.MyAppTests.coreTest", + v2._2(1).status == "Success" + ) + } + 'failures - { + val eval = init() + + val mainJava = HelloJava.millSourcePath / 'app / 'src / "Main.java" + val coreJava = HelloJava.millSourcePath / 'core / 'src / "Core.java" + + val Right(_) = eval.apply(HelloJava.core.compile) + val Right(_) = eval.apply(HelloJava.app.compile) + + ammonite.ops.write.over(mainJava, ammonite.ops.read(mainJava) + "}") + + val Right(_) = eval.apply(HelloJava.core.compile) + val Left(_) = eval.apply(HelloJava.app.compile) + + ammonite.ops.write.over(coreJava, ammonite.ops.read(coreJava) + "}") + + val Left(_) = eval.apply(HelloJava.core.compile) + val Left(_) = eval.apply(HelloJava.app.compile) + + ammonite.ops.write.over(mainJava, ammonite.ops.read(mainJava).dropRight(1)) + ammonite.ops.write.over(coreJava, ammonite.ops.read(coreJava).dropRight(1)) + + val Right(_) = eval.apply(HelloJava.core.compile) + val Right(_) = eval.apply(HelloJava.app.compile) + } + } +} diff --git a/scalalib/test/src/HelloWorldTests.scala b/scalalib/test/src/HelloWorldTests.scala new file mode 100644 index 00000000..da08f056 --- /dev/null +++ b/scalalib/test/src/HelloWorldTests.scala @@ -0,0 +1,934 @@ +package mill.scalalib + +import java.io.ByteArrayOutputStream +import java.util.jar.JarFile + +import mill._ +import mill.define.Target +import mill.api.Result.Exception +import mill.eval.{Evaluator, Result} +import mill.modules.Assembly +import mill.scalalib.publish._ +import mill.util.{TestEvaluator, TestUtil} +import mill.scalalib.publish.VersionControl +import utest._ +import utest.framework.TestPath + +import scala.collection.JavaConverters._ +import scala.util.Properties.isJavaAtLeast + + +object HelloWorldTests extends TestSuite { + trait HelloBase extends TestUtil.BaseModule{ + def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + } + + trait HelloWorldModule extends scalalib.ScalaModule { + def scalaVersion = "2.12.4" + } + + trait HelloWorldModuleWithMain extends HelloWorldModule { + def mainClass = Some("Main") + } + + object HelloWorld extends HelloBase { + object core extends HelloWorldModule + } + object CrossHelloWorld extends HelloBase { + object core extends Cross[HelloWorldCross]("2.10.6", "2.11.11", "2.12.3", "2.12.4", "2.13.0-M3") + class HelloWorldCross(val crossScalaVersion: String) extends CrossScalaModule + } + + object HelloWorldDefaultMain extends HelloBase { + object core extends HelloWorldModule + } + + object HelloWorldWithoutMain extends HelloBase { + object core extends HelloWorldModule{ + def mainClass = None + } + } + + object HelloWorldWithMain extends HelloBase { + object core extends HelloWorldModuleWithMain + } + + val akkaHttpDeps = Agg(ivy"com.typesafe.akka::akka-http:10.0.13") + + object HelloWorldAkkaHttpAppend extends HelloBase { + object core extends HelloWorldModuleWithMain { + def ivyDeps = akkaHttpDeps + + def assemblyRules = Seq(Assembly.Rule.Append("reference.conf")) + } + } + + object HelloWorldAkkaHttpExclude extends HelloBase { + object core extends HelloWorldModuleWithMain { + def ivyDeps = akkaHttpDeps + + def assemblyRules = Seq(Assembly.Rule.Exclude("reference.conf")) + } + } + + object HelloWorldAkkaHttpAppendPattern extends HelloBase { + object core extends HelloWorldModuleWithMain { + def ivyDeps = akkaHttpDeps + + def assemblyRules = Seq(Assembly.Rule.AppendPattern(".*.conf")) + } + } + + object HelloWorldAkkaHttpExcludePattern extends HelloBase { + object core extends HelloWorldModuleWithMain { + def ivyDeps = akkaHttpDeps + + def assemblyRules = Seq(Assembly.Rule.ExcludePattern(".*.conf")) + } + } + + object HelloWorldAkkaHttpNoRules extends HelloBase { + object core extends HelloWorldModuleWithMain { + def ivyDeps = akkaHttpDeps + + def assemblyRules = Seq.empty + } + } + + object HelloWorldMultiAppend extends HelloBase { + object core extends HelloWorldModuleWithMain { + def moduleDeps = Seq(model) + + def assemblyRules = Seq(Assembly.Rule.Append("reference.conf")) + } + object model extends HelloWorldModule + } + + object HelloWorldMultiExclude extends HelloBase { + object core extends HelloWorldModuleWithMain { + def moduleDeps = Seq(model) + + def assemblyRules = Seq(Assembly.Rule.Exclude("reference.conf")) + } + object model extends HelloWorldModule + } + + object HelloWorldMultiAppendPattern extends HelloBase { + object core extends HelloWorldModuleWithMain { + def moduleDeps = Seq(model) + + def assemblyRules = Seq(Assembly.Rule.AppendPattern(".*.conf")) + } + object model extends HelloWorldModule + } + + object HelloWorldMultiExcludePattern extends HelloBase { + object core extends HelloWorldModuleWithMain { + def moduleDeps = Seq(model) + + def assemblyRules = Seq(Assembly.Rule.ExcludePattern(".*.conf")) + } + object model extends HelloWorldModule + } + + object HelloWorldMultiNoRules extends HelloBase { + object core extends HelloWorldModuleWithMain { + def moduleDeps = Seq(model) + + def assemblyRules = Seq.empty + } + object model extends HelloWorldModule + } + + object HelloWorldWarnUnused extends HelloBase { + object core extends HelloWorldModule { + def scalacOptions = T(Seq("-Ywarn-unused")) + } + } + + object HelloWorldFatalWarnings extends HelloBase { + object core extends HelloWorldModule { + def scalacOptions = T(Seq("-Ywarn-unused", "-Xfatal-warnings")) + } + } + + object HelloWorldWithDocVersion extends HelloBase { + object core extends HelloWorldModule { + def scalacOptions = T(Seq("-Ywarn-unused", "-Xfatal-warnings")) + def scalaDocOptions = super.scalaDocOptions() ++ Seq("-doc-version", "1.2.3") + } + } + + object HelloWorldOnlyDocVersion extends HelloBase { + object core extends HelloWorldModule { + def scalacOptions = T(Seq("-Ywarn-unused", "-Xfatal-warnings")) + def scalaDocOptions = T(Seq("-doc-version", "1.2.3")) + } + } + + object HelloWorldDocTitle extends HelloBase { + object core extends HelloWorldModule { + def scalaDocOptions = T(Seq("-doc-title", "Hello World")) + } + } + + object HelloWorldWithPublish extends HelloBase{ + object core extends HelloWorldModule with PublishModule{ + + def artifactName = "hello-world" + def publishVersion = "0.0.1" + + def pomSettings = PomSettings( + organization = "com.lihaoyi", + description = "hello world ready for real world publishing", + url = "https://github.com/lihaoyi/hello-world-publish", + licenses = Seq(License.Common.Apache2), + versionControl = VersionControl.github("lihaoyi", "hello-world-publish"), + developers = + Seq(Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi")) + ) + } + } + + object HelloWorldScalaOverride extends HelloBase{ + object core extends HelloWorldModule { + + override def scalaVersion: Target[String] = "2.11.11" + } + } + + object HelloWorldIvyDeps extends HelloBase{ + object moduleA extends HelloWorldModule { + + override def ivyDeps = Agg(ivy"com.lihaoyi::sourcecode:0.1.3") + } + object moduleB extends HelloWorldModule { + override def moduleDeps = Seq(moduleA) + override def ivyDeps = Agg(ivy"com.lihaoyi::sourcecode:0.1.4") + } + } + + object HelloWorldTypeLevel extends HelloBase{ + object foo extends ScalaModule { + def scalaVersion = "2.11.8" + override def scalaOrganization = "org.typelevel" + + def ivyDeps = Agg( + ivy"com.github.julien-truffaut::monocle-macro::1.4.0" + ) + def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg( + ivy"org.scalamacros:::paradise:2.1.0" + ) + def scalaDocPluginIvyDeps = super.scalaDocPluginIvyDeps() ++ Agg( + ivy"com.typesafe.genjavadoc:::genjavadoc-plugin:0.11" + ) + } + } + + object HelloWorldMacros extends HelloBase{ + object core extends ScalaModule { + def scalaVersion = "2.12.4" + + def ivyDeps = Agg( + ivy"com.github.julien-truffaut::monocle-macro::1.4.0" + ) + def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg( + ivy"org.scalamacros:::paradise:2.1.0" + ) + } + } + + object HelloWorldFlags extends HelloBase{ + object core extends ScalaModule { + def scalaVersion = "2.12.4" + + def scalacOptions = super.scalacOptions() ++ Seq( + "-Ypartial-unification" + ) + } + } + + object HelloScalacheck extends HelloBase{ + object foo extends ScalaModule { + def scalaVersion = "2.12.4" + object test extends Tests { + def ivyDeps = Agg(ivy"org.scalacheck::scalacheck:1.13.5") + def testFrameworks = Seq("org.scalacheck.ScalaCheckFramework") + } + } + } + + object HelloDotty extends HelloBase{ + object foo extends ScalaModule { + def scalaVersion = "0.9.0-RC1" + def ivyDeps = Agg(ivy"org.typelevel::cats-core:1.2.0".withDottyCompat(scalaVersion())) + } + } + + val resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" + + def jarMainClass(jar: JarFile): Option[String] = { + import java.util.jar.Attributes._ + val attrs = jar.getManifest.getMainAttributes.asScala + attrs.get(Name.MAIN_CLASS).map(_.asInstanceOf[String]) + } + + def jarEntries(jar: JarFile): Set[String] = { + jar.entries().asScala.map(_.getName).toSet + } + + def readFileFromJar(jar: JarFile, name: String): String = { + val is = jar.getInputStream(jar.getEntry(name)) + val baos = new ByteArrayOutputStream() + os.Internals.transfer(is, baos) + new String(baos.toByteArray) + } + + def compileClassfiles = Seq[os.RelPath]( + "Main.class", + "Main$.class", + "Main0.class", + "Main0$.class", + "Main$delayedInit$body.class", + "Person.class", + "Person$.class" + ) + + def workspaceTest[T](m: TestUtil.BaseModule, resourcePath: os.Path = resourcePath) + (t: TestEvaluator => T) + (implicit tp: TestPath): T = { + val eval = new TestEvaluator(m) + os.remove.all(m.millSourcePath) + os.remove.all(eval.outPath) + os.makeDir.all(m.millSourcePath / os.up) + os.copy(resourcePath, m.millSourcePath) + t(eval) + } + + + + + def tests: Tests = Tests { + 'scalaVersion - { + + 'fromBuild - workspaceTest(HelloWorld){eval => + val Right((result, evalCount)) = eval.apply(HelloWorld.core.scalaVersion) + + assert( + result == "2.12.4", + evalCount > 0 + ) + } + 'override - workspaceTest(HelloWorldScalaOverride){eval => + val Right((result, evalCount)) = eval.apply(HelloWorldScalaOverride.core.scalaVersion) + + assert( + result == "2.11.11", + evalCount > 0 + ) + } + } + + 'scalacOptions - { + 'emptyByDefault - workspaceTest(HelloWorld){eval => + val Right((result, evalCount)) = eval.apply(HelloWorld.core.scalacOptions) + + assert( + result.isEmpty, + evalCount > 0 + ) + } + 'override - workspaceTest(HelloWorldFatalWarnings){ eval => + val Right((result, evalCount)) = eval.apply(HelloWorldFatalWarnings.core.scalacOptions) + + assert( + result == Seq("-Ywarn-unused", "-Xfatal-warnings"), + evalCount > 0 + ) + } + } + + 'scalaDocOptions - { + 'emptyByDefault - workspaceTest(HelloWorld){eval => + val Right((result, evalCount)) = eval.apply(HelloWorld.core.scalaDocOptions) + assert( + result.isEmpty, + evalCount > 0 + ) + } + 'override - workspaceTest(HelloWorldDocTitle){ eval => + val Right((result, evalCount)) = eval.apply(HelloWorldDocTitle.core.scalaDocOptions) + assert( + result == Seq("-doc-title", "Hello World"), + evalCount > 0 + ) + } + 'extend - workspaceTest(HelloWorldWithDocVersion){ eval => + val Right((result, evalCount)) = eval.apply(HelloWorldWithDocVersion.core.scalaDocOptions) + assert( + result == Seq("-Ywarn-unused", "-Xfatal-warnings", "-doc-version", "1.2.3"), + evalCount > 0 + ) + } + // make sure options are passed during ScalaDoc generation + 'docJarWithTitle - workspaceTest( + HelloWorldDocTitle, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" + ){ eval => + val Right((_, evalCount)) = eval.apply(HelloWorldDocTitle.core.docJar) + assert( + evalCount > 0, + os.read(eval.outPath / 'core / 'docJar / 'dest / 'javadoc / "index.html").contains("Hello World") + ) + } + 'docJarWithVersion - workspaceTest( + HelloWorldWithDocVersion, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" + ){ eval => + // scaladoc generation fails because of "-Xfatal-warnings" flag + val Left(Result.Failure("docJar generation failed", None)) = eval.apply(HelloWorldWithDocVersion.core.docJar) + } + 'docJarOnlyVersion - workspaceTest( + HelloWorldOnlyDocVersion, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" + ){ eval => + val Right((_, evalCount)) = eval.apply(HelloWorldOnlyDocVersion.core.docJar) + assert( + evalCount > 0, + os.read(eval.outPath / 'core / 'docJar / 'dest / 'javadoc / "index.html").contains("1.2.3") + ) + } + } + + 'scalacPluginClasspath - { + 'withMacroParadise - workspaceTest(HelloWorldTypeLevel){eval => + val Right((result, evalCount)) = eval.apply(HelloWorldTypeLevel.foo.scalacPluginClasspath) + assert( + result.nonEmpty, + result.exists { pathRef => pathRef.path.segments.contains("scalamacros") }, + evalCount > 0 + ) + } + } + + 'scalaDocPluginClasspath - { + 'extend - workspaceTest(HelloWorldTypeLevel){eval => + val Right((result, evalCount)) = eval.apply(HelloWorldTypeLevel.foo.scalaDocPluginClasspath) + assert( + result.nonEmpty, + result.exists { pathRef => pathRef.path.segments.contains("scalamacros") }, + result.exists { pathRef => pathRef.path.segments.contains("genjavadoc") }, + evalCount > 0 + ) + } + } + + 'compile - { + 'fromScratch - workspaceTest(HelloWorld){eval => + val Right((result, evalCount)) = eval.apply(HelloWorld.core.compile) + + val analysisFile = result.analysisFile + val outputFiles = os.walk(result.classes.path) + val expectedClassfiles = compileClassfiles.map( + eval.outPath / 'core / 'compile / 'dest / 'classes / _ + ) + assert( + result.classes.path == eval.outPath / 'core / 'compile / 'dest / 'classes, + os.exists(analysisFile), + outputFiles.nonEmpty, + outputFiles.forall(expectedClassfiles.contains), + evalCount > 0 + ) + + // don't recompile if nothing changed + val Right((_, unchangedEvalCount)) = eval.apply(HelloWorld.core.compile) + + assert(unchangedEvalCount == 0) + } + 'recompileOnChange - workspaceTest(HelloWorld){eval => + val Right((_, freshCount)) = eval.apply(HelloWorld.core.compile) + assert(freshCount > 0) + + os.write.append(HelloWorld.millSourcePath / 'core / 'src / "Main.scala", "\n") + + val Right((_, incCompileCount)) = eval.apply(HelloWorld.core.compile) + assert(incCompileCount > 0, incCompileCount < freshCount) + } + 'failOnError - workspaceTest(HelloWorld){eval => + os.write.append(HelloWorld.millSourcePath / 'core / 'src / "Main.scala", "val x: ") + + val Left(Result.Failure("Compilation failed", _)) = eval.apply(HelloWorld.core.compile) + + + val paths = Evaluator.resolveDestPaths( + eval.outPath, + HelloWorld.core.compile.ctx.segments + ) + + assert( + os.walk(paths.dest / 'classes).isEmpty, + !os.exists(paths.meta) + ) + // Works when fixed + os.write.over( + HelloWorld.millSourcePath / 'core / 'src / "Main.scala", + os.read(HelloWorld.millSourcePath / 'core / 'src / "Main.scala").dropRight("val x: ".length) + ) + + val Right((result, evalCount)) = eval.apply(HelloWorld.core.compile) + } + 'passScalacOptions - workspaceTest(HelloWorldFatalWarnings){ eval => + // compilation fails because of "-Xfatal-warnings" flag + val Left(Result.Failure("Compilation failed", _)) = eval.apply(HelloWorldFatalWarnings.core.compile) + } + } + + 'runMain - { + 'runMainObject - workspaceTest(HelloWorld){eval => + val runResult = eval.outPath / 'core / 'runMain / 'dest / "hello-mill" + + val Right((_, evalCount)) = eval.apply(HelloWorld.core.runMain("Main", runResult.toString)) + assert(evalCount > 0) + + assert( + os.exists(runResult), + os.read(runResult) == "hello rockjam, your age is: 25" + ) + } + 'runCross - { + def cross(eval: TestEvaluator, v: String, expectedOut: String) { + + val runResult = eval.outPath / "hello-mill" + + val Right((_, evalCount)) = eval.apply( + CrossHelloWorld.core(v).runMain("Shim", runResult.toString) + ) + + assert(evalCount > 0) + + + assert( + os.exists(runResult), + os.read(runResult) == expectedOut + ) + } + 'v210 - TestUtil.disableInJava9OrAbove(workspaceTest(CrossHelloWorld)(cross(_, "2.10.6", "2.10.6 rox"))) + 'v211 - TestUtil.disableInJava9OrAbove(workspaceTest(CrossHelloWorld)(cross(_, "2.11.11", "2.11.11 pwns"))) + 'v2123 - workspaceTest(CrossHelloWorld)(cross(_, "2.12.3", "2.12.3 leet")) + 'v2124 - workspaceTest(CrossHelloWorld)(cross(_, "2.12.4", "2.12.4 leet")) + 'v2130M3 - workspaceTest(CrossHelloWorld)(cross(_, "2.13.0-M3", "2.13.0-M3 idk")) + } + + + 'notRunInvalidMainObject - workspaceTest(HelloWorld){eval => + val Left(Result.Failure("subprocess failed", _)) = eval.apply(HelloWorld.core.runMain("Invalid")) + } + 'notRunWhenCompileFailed - workspaceTest(HelloWorld){eval => + os.write.append(HelloWorld.millSourcePath / 'core / 'src / "Main.scala", "val x: ") + + val Left(Result.Failure("Compilation failed", _)) = eval.apply(HelloWorld.core.runMain("Main")) + + } + } + + 'forkRun - { + 'runIfMainClassProvided - workspaceTest(HelloWorldWithMain){eval => + val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" + val Right((_, evalCount)) = eval.apply( + HelloWorldWithMain.core.run(runResult.toString) + ) + + assert(evalCount > 0) + + + assert( + os.exists(runResult), + os.read(runResult) == "hello rockjam, your age is: 25" + ) + } + 'notRunWithoutMainClass - workspaceTest( + HelloWorldWithoutMain, + os.pwd / 'scalalib / 'test / 'resources / "hello-world-no-main" + ){eval => + val Left(Result.Failure(_, None)) = eval.apply(HelloWorldWithoutMain.core.run()) + } + + 'runDiscoverMainClass - workspaceTest(HelloWorldWithoutMain){eval => + // Make sure even if there isn't a main class defined explicitly, it gets + // discovered by Zinc and used + val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" + val Right((_, evalCount)) = eval.apply( + HelloWorldWithoutMain.core.run(runResult.toString) + ) + + assert(evalCount > 0) + + + assert( + os.exists(runResult), + os.read(runResult) == "hello rockjam, your age is: 25" + ) + } + } + + 'run - { + 'runIfMainClassProvided - workspaceTest(HelloWorldWithMain){eval => + val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" + val Right((_, evalCount)) = eval.apply( + HelloWorldWithMain.core.runLocal(runResult.toString) + ) + + assert(evalCount > 0) + + + assert( + os.exists(runResult), + os.read(runResult) == "hello rockjam, your age is: 25" + ) + } + 'runWithDefaultMain - workspaceTest(HelloWorldDefaultMain){eval => + val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" + val Right((_, evalCount)) = eval.apply( + HelloWorldDefaultMain.core.runLocal(runResult.toString) + ) + + assert(evalCount > 0) + + + assert( + os.exists(runResult), + os.read(runResult) == "hello rockjam, your age is: 25" + ) + } + 'notRunWithoutMainClass - workspaceTest( + HelloWorldWithoutMain, + os.pwd / 'scalalib / 'test / 'resources / "hello-world-no-main" + ){eval => + val Left(Result.Failure(_, None)) = eval.apply(HelloWorldWithoutMain.core.runLocal()) + + } + } + + 'jar - { + 'nonEmpty - workspaceTest(HelloWorldWithMain){eval => + val Right((result, evalCount)) = eval.apply(HelloWorldWithMain.core.jar) + + assert( + os.exists(result.path), + evalCount > 0 + ) + + val jarFile = new JarFile(result.path.toIO) + val entries = jarFile.entries().asScala.map(_.getName).toSet + + val otherFiles = Seq[os.RelPath]( + os.rel / "META-INF" / "MANIFEST.MF", + "reference.conf" + ) + val expectedFiles = compileClassfiles ++ otherFiles + + assert( + entries.nonEmpty, + entries == expectedFiles.map(_.toString()).toSet + ) + + val mainClass = jarMainClass(jarFile) + assert(mainClass.contains("Main")) + } + + 'logOutputToFile - workspaceTest(HelloWorld){eval => + val outPath = eval.outPath + eval.apply(HelloWorld.core.compile) + + val logFile = outPath / 'core / 'compile / 'log + assert(os.exists(logFile)) + } + } + + 'assembly - { + 'assembly - workspaceTest(HelloWorldWithMain){ eval => + val Right((result, evalCount)) = eval.apply(HelloWorldWithMain.core.assembly) + assert( + os.exists(result.path), + evalCount > 0 + ) + val jarFile = new JarFile(result.path.toIO) + val entries = jarEntries(jarFile) + + val mainPresent = entries.contains("Main.class") + assert(mainPresent) + assert(entries.exists(s => s.contains("scala/Predef.class"))) + + val mainClass = jarMainClass(jarFile) + assert(mainClass.contains("Main")) + } + + 'assemblyRules - { + def checkAppend[M <: TestUtil.BaseModule](module: M, + target: Target[PathRef]) = + workspaceTest(module) { eval => + val Right((result, _)) = eval.apply(target) + + val jarFile = new JarFile(result.path.toIO) + + assert(jarEntries(jarFile).contains("reference.conf")) + + val referenceContent = readFileFromJar(jarFile, "reference.conf") + + assert( + // akka modules configs are present + referenceContent.contains("akka-http Reference Config File"), + referenceContent.contains("akka-http-core Reference Config File"), + referenceContent.contains("Akka Actor Reference Config File"), + referenceContent.contains("Akka Stream Reference Config File"), + // our application config is present too + referenceContent.contains("My application Reference Config File"), + referenceContent.contains( + """akka.http.client.user-agent-header="hello-world-client"""" + ) + ) + } + + val helloWorldMultiResourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-multi" + + def checkAppendMulti[M <: TestUtil.BaseModule]( + module: M, + target: Target[PathRef]) = + workspaceTest( + module, + resourcePath = helloWorldMultiResourcePath + ) { eval => + val Right((result, _)) = eval.apply(target) + + val jarFile = new JarFile(result.path.toIO) + + assert(jarEntries(jarFile).contains("reference.conf")) + + val referenceContent = readFileFromJar(jarFile, "reference.conf") + + assert( + // reference config from core module + referenceContent.contains("Core Reference Config File"), + // reference config from model module + referenceContent.contains("Model Reference Config File"), + // concatenated content + referenceContent.contains("bar.baz=hello"), + referenceContent.contains("foo.bar=2") + ) + } + + 'appendWithDeps - checkAppend( + HelloWorldAkkaHttpAppend, + HelloWorldAkkaHttpAppend.core.assembly + ) + 'appendMultiModule - checkAppendMulti( + HelloWorldMultiAppend, + HelloWorldMultiAppend.core.assembly + ) + 'appendPatternWithDeps - checkAppend( + HelloWorldAkkaHttpAppendPattern, + HelloWorldAkkaHttpAppendPattern.core.assembly + ) + 'appendPatternMultiModule - checkAppendMulti( + HelloWorldMultiAppendPattern, + HelloWorldMultiAppendPattern.core.assembly + ) + + def checkExclude[M <: TestUtil.BaseModule](module: M, + target: Target[PathRef], + resourcePath: os.Path = resourcePath + ) = + workspaceTest(module, resourcePath) { eval => + val Right((result, _)) = eval.apply(target) + + val jarFile = new JarFile(result.path.toIO) + + assert(!jarEntries(jarFile).contains("reference.conf")) + } + + 'excludeWithDeps - checkExclude( + HelloWorldAkkaHttpExclude, + HelloWorldAkkaHttpExclude.core.assembly + ) + 'excludeMultiModule - checkExclude( + HelloWorldMultiExclude, + HelloWorldMultiExclude.core.assembly, + resourcePath = helloWorldMultiResourcePath + + ) + 'excludePatternWithDeps - checkExclude( + HelloWorldAkkaHttpExcludePattern, + HelloWorldAkkaHttpExcludePattern.core.assembly + ) + 'excludePatternMultiModule - checkExclude( + HelloWorldMultiExcludePattern, + HelloWorldMultiExcludePattern.core.assembly, + resourcePath = helloWorldMultiResourcePath + ) + + 'writeFirstWhenNoRule - { + 'withDeps - workspaceTest(HelloWorldAkkaHttpNoRules) { eval => + val Right((result, _)) = eval.apply(HelloWorldAkkaHttpNoRules.core.assembly) + + val jarFile = new JarFile(result.path.toIO) + + assert(jarEntries(jarFile).contains("reference.conf")) + + val referenceContent = readFileFromJar(jarFile, "reference.conf") + + val allOccurrences = Seq( + referenceContent.contains("akka-http Reference Config File"), + referenceContent.contains("akka-http-core Reference Config File"), + referenceContent.contains("Akka Actor Reference Config File"), + referenceContent.contains("Akka Stream Reference Config File"), + referenceContent.contains("My application Reference Config File") + ) + + val timesOcccurres = allOccurrences.find(identity).size + + assert(timesOcccurres == 1) + } + + 'multiModule - workspaceTest( + HelloWorldMultiNoRules, + resourcePath = helloWorldMultiResourcePath + ) { eval => + val Right((result, _)) = eval.apply(HelloWorldMultiNoRules.core.assembly) + + val jarFile = new JarFile(result.path.toIO) + + assert(jarEntries(jarFile).contains("reference.conf")) + + val referenceContent = readFileFromJar(jarFile, "reference.conf") + + assert( + referenceContent.contains("Model Reference Config File"), + referenceContent.contains("foo.bar=2"), + + !referenceContent.contains("Core Reference Config File"), + !referenceContent.contains("bar.baz=hello") + ) + } + } + } + + 'run - workspaceTest(HelloWorldWithMain){eval => + val Right((result, evalCount)) = eval.apply(HelloWorldWithMain.core.assembly) + + assert( + os.exists(result.path), + evalCount > 0 + ) + val runResult = eval.outPath / "hello-mill" + + os.proc("java", "-jar", result.path, runResult).call(cwd = eval.outPath) + + assert( + os.exists(runResult), + os.read(runResult) == "hello rockjam, your age is: 25" + ) + } + } + + 'ivyDeps - workspaceTest(HelloWorldIvyDeps){ eval => + val Right((result, _)) = eval.apply(HelloWorldIvyDeps.moduleA.runClasspath) + assert( + result.exists(_.path.last == "sourcecode_2.12-0.1.3.jar"), + !result.exists(_.path.last == "sourcecode_2.12-0.1.4.jar") + ) + + val Right((result2, _)) = eval.apply(HelloWorldIvyDeps.moduleB.runClasspath) + assert( + result2.exists(_.path.last == "sourcecode_2.12-0.1.4.jar"), + !result2.exists(_.path.last == "sourcecode_2.12-0.1.3.jar") + ) + } + + 'typeLevel - workspaceTest(HelloWorldTypeLevel){ eval => + val classPathsToCheck = Seq( + HelloWorldTypeLevel.foo.runClasspath, + HelloWorldTypeLevel.foo.ammoniteReplClasspath, + HelloWorldTypeLevel.foo.compileClasspath + ) + for(cp <- classPathsToCheck){ + val Right((result, _)) = eval.apply(cp) + assert( + // Make sure every relevant piece org.scala-lang has been substituted for org.typelevel + !result.map(_.toString).exists(x => + x.contains("scala-lang") && + (x.contains("scala-library") || x.contains("scala-compiler") || x.contains("scala-reflect")) + ), + result.map(_.toString).exists(x => x.contains("typelevel") && x.contains("scala-library")) + + ) + } + } + + 'macros - { + // make sure macros are applied when compiling/running + 'runMain - workspaceTest( + HelloWorldMacros, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-macros" + ){ eval => + val Right((_, evalCount)) = eval.apply(HelloWorldMacros.core.runMain("Main")) + assert(evalCount > 0) + } + // make sure macros are applied when compiling during scaladoc generation + 'docJar - workspaceTest( + HelloWorldMacros, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-macros" + ){ eval => + val Right((_, evalCount)) = eval.apply(HelloWorldMacros.core.docJar) + assert(evalCount > 0) + } + } + + 'flags - { + // make sure flags are passed when compiling/running + 'runMain - workspaceTest( + HelloWorldFlags, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-flags" + ){ eval => + val Right((_, evalCount)) = eval.apply(HelloWorldFlags.core.runMain("Main")) + assert(evalCount > 0) + } + // make sure flags are passed during ScalaDoc generation + 'docJar - workspaceTest( + HelloWorldFlags, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-flags" + ){ eval => + val Right((_, evalCount)) = eval.apply(HelloWorldFlags.core.docJar) + assert(evalCount > 0) + } + } + + 'scalacheck - workspaceTest( + HelloScalacheck, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-scalacheck" + ){ eval => + val Right((res, evalCount)) = eval.apply(HelloScalacheck.foo.test.test()) + assert( + evalCount > 0, + res._2.map(_.selector) == Seq( + "String.startsWith", + "String.endsWith", + "String.substring", + "String.substring" + ) + ) + } + + 'dotty - workspaceTest( + HelloDotty, + resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-dotty" + ){ eval => + if (isJavaAtLeast("9")) { + // Skip the test because Dotty does not support Java >= 9 yet + // (see https://github.com/lampepfl/dotty/pull/3138) + } else { + val Right((_, evalCount)) = eval.apply(HelloDotty.foo.run()) + assert(evalCount > 0) + } + } + } +} diff --git a/scalalib/test/src/ResolveDepsTests.scala b/scalalib/test/src/ResolveDepsTests.scala new file mode 100644 index 00000000..78361625 --- /dev/null +++ b/scalalib/test/src/ResolveDepsTests.scala @@ -0,0 +1,77 @@ +package mill.scalalib + +import coursier.Cache +import coursier.maven.MavenRepository +import mill.api.Result.{Failure, Success} +import mill.eval.{PathRef, Result} +import mill.util.Loose.Agg +import utest._ + +object ResolveDepsTests extends TestSuite { + val repos = Seq(Cache.ivy2Local, MavenRepository("https://repo1.maven.org/maven2")) + + def evalDeps(deps: Agg[Dep]): Result[Agg[PathRef]] = Lib.resolveDependencies( + repos, + Lib.depToDependency(_, "2.12.4", ""), + deps + ) + + val tests = Tests { + 'resolveValidDeps - { + val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3") + val Success(paths) = evalDeps(deps) + assert(paths.nonEmpty) + } + + 'resolveValidDepsWithClassifier - { + val deps = Agg(ivy"org.lwjgl:lwjgl:3.1.1;classifier=natives-macos") + val Success(paths) = evalDeps(deps) + assert(paths.nonEmpty) + assert(paths.items.next.path.toString.contains("natives-macos")) + } + + 'resolveTransitiveRuntimeDeps - { + val deps = Agg(ivy"org.mockito:mockito-core:2.7.22") + val Success(paths) = evalDeps(deps) + assert(paths.nonEmpty) + assert(paths.exists(_.path.toString.contains("objenesis"))) + assert(paths.exists(_.path.toString.contains("byte-buddy"))) + } + + 'excludeTransitiveDeps - { + val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3".exclude("com.lihaoyi" -> "fansi_2.12")) + val Success(paths) = evalDeps(deps) + assert(!paths.exists(_.path.toString.contains("fansi_2.12"))) + } + + 'excludeTransitiveDepsByOrg - { + val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3".excludeOrg("com.lihaoyi")) + val Success(paths) = evalDeps(deps) + assert(!paths.exists(path => path.path.toString.contains("com/lihaoyi") && !path.path.toString.contains("pprint_2.12"))) + } + + 'excludeTransitiveDepsByName - { + val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3".excludeName("fansi_2.12")) + val Success(paths) = evalDeps(deps) + assert(!paths.exists(_.path.toString.contains("fansi_2.12"))) + } + + 'errOnInvalidOrgDeps - { + val deps = Agg(ivy"xxx.yyy.invalid::pprint:0.5.3") + val Failure(errMsg, _) = evalDeps(deps) + assert(errMsg.contains("xxx.yyy.invalid")) + } + + 'errOnInvalidVersionDeps - { + val deps = Agg(ivy"com.lihaoyi::pprint:invalid.version.num") + val Failure(errMsg, _) = evalDeps(deps) + assert(errMsg.contains("invalid.version.num")) + } + + 'errOnPartialSuccess - { + val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3", ivy"fake::fake:fake") + val Failure(errMsg, _) = evalDeps(deps) + assert(errMsg.contains("fake")) + } + } +} diff --git a/scalalib/test/src/VersionControlTests.scala b/scalalib/test/src/VersionControlTests.scala new file mode 100644 index 00000000..fafdca2d --- /dev/null +++ b/scalalib/test/src/VersionControlTests.scala @@ -0,0 +1,74 @@ +package mill.scalalib + +import mill.scalalib.publish.{VersionControl, VersionControlConnection} + +import utest._ + +object VersionContolTests extends TestSuite { + + import VersionControl._ + import VersionControlConnection._ + + val tests = Tests { + 'github - { + assert( + github("lihaoyi", "mill") == + VersionControl( + browsableRepository = Some("https://github.com/lihaoyi/mill"), + connection = Some("scm:git:git://github.com/lihaoyi/mill.git"), + developerConnection = Some("scm:git:ssh://git@github.com:lihaoyi/mill.git"), + tag = None + ) + ) + } + 'git - { + assert( + gitGit("example.org", "path.git", port = Some(9418)) == + "scm:git:git://example.org:9418/path.git" + ) + + assert( + gitHttp("example.org") == + "scm:git:http://example.org/" + ) + + assert( + gitHttps("example.org", "path.git") == + "scm:git:https://example.org/path.git" + ) + + assert( + gitSsh("example.org", "path.git") == + "scm:git:ssh://example.org/path.git" + ) + + assert( + gitFile("/home/gui/repos/foo/bare.git") == + "scm:git:file:///home/gui/repos/foo/bare.git" + ) + + } + 'svn - { + assert( + svnSsh("example.org", "repo") == + "scm:svn:svn+ssh://example.org/repo" + ) + assert( + svnHttp("example.org", "repo", Some("user"), Some("pass")) == + "scm:svn:http://user:pass@example.org/repo" + ) + assert( + svnHttps("example.org", "repo", Some("user")) == + "scm:svn:https://user@example.org/repo" + ) + assert( + svnSvn("example.org", "repo", port = Some(3690)) == + "scm:svn:svn://example.org:3690/repo" + ) + assert( + svnFile("/var/svn/repo") == + "scm:svn:file:///var/svn/repo" + ) + } + } +} \ No newline at end of file diff --git a/scalalib/test/src/dependency/metadata/MetadataLoaderFactoryTests.scala b/scalalib/test/src/dependency/metadata/MetadataLoaderFactoryTests.scala new file mode 100644 index 00000000..4c2206b8 --- /dev/null +++ b/scalalib/test/src/dependency/metadata/MetadataLoaderFactoryTests.scala @@ -0,0 +1,64 @@ +/* + * This file contains code originally published under the following license: + * + * Copyright (c) 2012, Roman Timushev + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * * The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package mill.scalalib.dependency.metadata + +import coursier.Fetch.Content +import coursier.core.{Artifact, Module, Project, Repository} +import coursier.ivy.IvyRepository +import coursier.maven.MavenRepository +import coursier.util.{EitherT, Monad} +import utest._ + +object MetadataLoaderFactoryTests extends TestSuite { + + val tests = Tests { + 'mavenRepository - { + val mavenRepo = MavenRepository("https://repo1.maven.org/maven2") + assertMatch(MetadataLoaderFactory(mavenRepo)) { + case Some(MavenMetadataLoader(`mavenRepo`)) => + } + } + 'ivyRepository - { + val Right(ivyRepo) = IvyRepository.parse( + "https://dl.bintray.com/sbt/sbt-plugin-releases/" + coursier.ivy.Pattern.default.string, + dropInfoAttributes = true) + assertMatch(MetadataLoaderFactory(ivyRepo)) { case None => } + } + 'otherRepository - { + val otherRepo = new CustomRepository + assertMatch(MetadataLoaderFactory(otherRepo)) { case None => } + } + } + + case class CustomRepository() extends Repository { + override def find[F[_]](module: Module, version: String, fetch: Content[F])( + implicit F: Monad[F]): EitherT[F, String, (Artifact.Source, Project)] = + ??? + } +} diff --git a/scalalib/test/src/dependency/updates/UpdatesFinderTests.scala b/scalalib/test/src/dependency/updates/UpdatesFinderTests.scala new file mode 100644 index 00000000..7b6e6e36 --- /dev/null +++ b/scalalib/test/src/dependency/updates/UpdatesFinderTests.scala @@ -0,0 +1,173 @@ +/* + * This file contains code originally published under the following license: + * + * Copyright (c) 2012, Roman Timushev + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * * The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package mill.scalalib.dependency.updates + +import mill.scalalib.dependency.versions.{DependencyVersions, Version} +import utest._ + +object UpdatesFinderTests extends TestSuite { + + private def updates(current: String, + available: Seq[String], + allowPreRelease: Boolean) = { + val dependency = coursier.Dependency( + coursier.Module("com.example.organization", "example-artifact"), + current) + val currentVersion = Version(current) + val allVersions = available.map(Version(_)).toSet + + UpdatesFinder + .findUpdates(DependencyVersions(dependency, currentVersion, allVersions), + allowPreRelease) + .updates + .map(_.toString) + } + + val available = Seq( + "0.9.9-SNAPSHOT", + "0.9.9-M3", + "0.9.9", + "1.0.0-SNAPSHOT", + "1.0.0-M2", + "1.0.0-M3", + "1.0.0", + "1.0.1-SNAPSHOT", + "1.0.1-M3", + "1.0.1" + ) + + val tests = Tests { + + 'snapshotArtifacts - { + val u = updates("1.0.0-SNAPSHOT", available, allowPreRelease = false) + val pu = updates("1.0.0-SNAPSHOT", available, allowPreRelease = true) + + 'noOldStableVersions - { + assert(!u.contains("0.9.9")) + } + 'noOldMilestones - { + assert(!u.contains("0.9.9-M3")) + } + 'noOldSnapshots - { + assert(!u.contains("0.9.9-SNAPSHOT")) + } + 'noCurrentMilestones - { + assert(!u.contains("1.0.0-M3")) + } + 'noCurrentSnapshot - { + assert(!u.contains("1.0.0-SNAPSHOT")) + } + 'stableUpdates - { + assert(u.contains("1.0.0") && u.contains("1.0.1")) + } + 'milestoneUpdates - { + assert(u.contains("1.0.1-M3")) + } + 'snapshotUpdates - { + assert(u.contains("1.0.1-SNAPSHOT")) + } + 'noDifferencesRegardingOptionalPreReleases - { + assert(u == pu) + } + } + + 'milestoneArtifacts - { + val u = updates("1.0.0-M2", available, allowPreRelease = false) + val pu = updates("1.0.0-M2", available, allowPreRelease = true) + + 'noOldStableVersions - { + assert(!u.contains("0.9.9")) + } + 'noOldSnapshots - { + assert(!u.contains("0.9.9-SNAPSHOT")) + } + 'noOldMilestones - { + assert(!u.contains("0.9.9-M3")) + } + 'noCurrentSnapshot - { + assert(!u.contains("1.0.0-SNAPSHOT")) + } + 'currentMilestones - { + assert(u.contains("1.0.0-M3")) + } + 'stableUpdates - { + assert(u.contains("1.0.1")) + } + 'noSnapshotUpdates - { + assert(!u.contains("1.0.1-SNAPSHOT")) + } + 'milestoneUpdates - { + assert(u.contains("1.0.1-M3")) + } + 'noDifferencesRegardingOptionalPreReleases - { + assert(u == pu) + } + } + + 'stableArtifacts - { + val u = updates("1.0.0", available, allowPreRelease = false) + val pu = updates("1.0.0", available, allowPreRelease = true) + + 'noOldStableVersions - { + assert(!u.contains("0.9.9")) + assert(!pu.contains("0.9.9")) + } + 'noOldSnapshots - { + assert(!u.contains("0.9.9-SNAPSHOT")) + assert(!pu.contains("0.9.9-SNAPSHOT")) + } + 'noOldMilestones - { + assert(!u.contains("0.9.9-M3")) + assert(!pu.contains("0.9.9-M3")) + } + 'noCurrentSnapshot - { + assert(!u.contains("1.0.0-SNAPSHOT")) + assert(!pu.contains("1.0.0-SNAPSHOT")) + } + 'noCurrentMilestones - { + assert(!u.contains("1.0.0-M3")) + assert(!pu.contains("1.0.0-M3")) + } + 'stableUpdates - { + assert(u.contains("1.0.1")) + assert(pu.contains("1.0.1")) + } + 'noSnapshotUpdates - { + assert(!u.contains("1.0.1-SNAPSHOT")) + assert(!pu.contains("1.0.1-SNAPSHOT")) + } + 'noMilestoneUpdates - { + assert(!u.contains("1.0.1-M3")) + } + 'milestoneUpdatesWhenAllowingPreReleases - { + assert(pu.contains("1.0.1-M3")) + } + } + } +} diff --git a/scalalib/test/src/dependency/versions/VersionTests.scala b/scalalib/test/src/dependency/versions/VersionTests.scala new file mode 100644 index 00000000..b916c86f --- /dev/null +++ b/scalalib/test/src/dependency/versions/VersionTests.scala @@ -0,0 +1,138 @@ +/* + * This file contains code originally published under the following license: + * + * Copyright (c) 2012, Roman Timushev + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * * The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package mill.scalalib.dependency.versions + +import utest._ +import fastparse.Parsed + +object VersionTests extends TestSuite { + + val tests = Tests { + 'versionsClassification - { + 'ReleaseVersion - { + List("1.0.0", "1.0.0.Final", "1.0.0-FINAL", "1.0.0.RELEASE") foreach { + rel => + assertMatch(Version(rel)) { + case ReleaseVersion(List(1, 0, 0)) => + } + } + } + 'PreReleaseVersion - { + assertMatch(Version("1.0.0-alpha.1")) { + case PreReleaseVersion(List(1, 0, 0), List("alpha", "1")) => + } + } + 'PreReleaseBuildVersion - { + assertMatch(Version("1.0.0-alpha.1+build.10")) { + case PreReleaseBuildVersion(List(1, 0, 0), + List("alpha", "1"), + List("build", "10")) => + } + } + 'BuildVersion - { + assertMatch(Version("1.0.0+build.10")) { + case BuildVersion(List(1, 0, 0), List("build", "10")) => + } + } + } + + 'semverVersionsOrdering - { + import scala.Ordered._ + + val v = List( + "invalid", + "1.0.0-20131213005945", + "1.0.0-alpha", + "1.0.0-alpha.1", + "1.0.0-beta.2", + "1.0.0-beta.11", + "1.0.0-rc.1", + "1.0.0-rc.1+build.1", + "1.0.0", + "1.0.0+0.3.7", + "1.33.7+build", + "1.33.7+build.2.b8f12d7", + "1.33.7+build.11.e0f985a", + "2.0.M5b", + "2.0.M6-SNAP9", + "2.0.M6-SNAP23", + "2.0.M6-SNAP23a" + ).map(Version.apply) + val pairs = v.tails.flatMap { + case h :: t => t.map((h, _)) + case Nil => List.empty + } + pairs.foreach { + case (a, b) => + assert(a < b) + assert(b > a) + } + } + + 'parser - { + + Symbol("parse 1.0.5") - { + assertMatch(VersionParser.parse("1.0.5")) { + case Parsed.Success((Seq(1, 0, 5), Seq(), Seq()), _) => + } + } + + Symbol("parse 1.0.M3") - { + assertMatch(VersionParser.parse("1.0.M3")) { + case Parsed.Success((Seq(1, 0), Seq("M3"), Seq()), _) => + } + } + Symbol("parse 1.0.3m") - { + assertMatch(VersionParser.parse("1.0.3m")) { + case Parsed.Success((Seq(1, 0), Seq("3m"), Seq()), _) => + } + } + Symbol("parse 1.0.3m.4") - { + assertMatch(VersionParser.parse("1.0.3m.4")) { + case Parsed.Success((Seq(1, 0), Seq("3m", "4"), Seq()), _) => + } + } + Symbol("parse 9.1-901-1.jdbc4") - { + assertMatch(VersionParser.parse("9.1-901-1.jdbc4")) { + case Parsed.Success((Seq(9, 1), Seq("901", "1", "jdbc4"), Seq()), _) => + } + } + Symbol("parse 1.33.7+build/11.e0f985a") - { + assertMatch(VersionParser.parse("1.33.7+build/11.e0f985a")) { + case Parsed.Success((Seq(1, 33, 7), Seq(), Seq("build/11", "e0f985a")), _) => + } + } + Symbol("parse 9.1-901-1.jdbc4+build/11.e0f985a") - { + assertMatch(VersionParser.parse("9.1-901-1.jdbc4+build/11.e0f985a")) { + case Parsed.Success((Seq(9, 1), Seq("901", "1", "jdbc4"), Seq("build/11", "e0f985a")), _) => + } + } + } + } +} diff --git a/scalalib/test/src/mill/scalalib/GenIdeaTests.scala b/scalalib/test/src/mill/scalalib/GenIdeaTests.scala deleted file mode 100644 index 50db95c0..00000000 --- a/scalalib/test/src/mill/scalalib/GenIdeaTests.scala +++ /dev/null @@ -1,62 +0,0 @@ -package mill.scalalib - -import coursier.Cache -import mill._ -import mill.util.{TestEvaluator, TestUtil} -import utest._ - -object GenIdeaTests extends TestSuite { - - val millSourcePath = os.pwd / 'target / 'workspace / "gen-idea" - - trait HelloWorldModule extends scalalib.ScalaModule { - def scalaVersion = "2.12.4" - def millSourcePath = GenIdeaTests.millSourcePath - object test extends super.Tests { - def testFrameworks = Seq("utest.runner.Framework") - } - } - - object HelloWorld extends TestUtil.BaseModule with HelloWorldModule - - val helloWorldEvaluator = TestEvaluator.static(HelloWorld) - - def tests: Tests = Tests { - 'genIdeaTests - { - val pp = new scala.xml.PrettyPrinter(999, 4) - - val layout = GenIdeaImpl.xmlFileLayout( - helloWorldEvaluator.evaluator, - HelloWorld, - ("JDK_1_8", "1.8 (1)"), fetchMillModules = false) - for((relPath, xml) <- layout){ - os.write.over(millSourcePath/ "generated"/ relPath, pp.format(xml), createFolders = true) - } - - Seq( - "gen-idea/idea_modules/iml" -> - millSourcePath / "generated" / ".idea_modules" /".iml", - "gen-idea/idea_modules/test.iml" -> - millSourcePath / "generated" / ".idea_modules" /"test.iml", - "gen-idea/idea_modules/mill-build.iml" -> - millSourcePath / "generated" / ".idea_modules" /"mill-build.iml", - "gen-idea/idea/libraries/scala-library-2.12.4.jar.xml" -> - millSourcePath / "generated" / ".idea" / "libraries" / "scala-library-2.12.4.jar.xml", - "gen-idea/idea/modules.xml" -> - millSourcePath / "generated" / ".idea" / "modules.xml", - "gen-idea/idea/misc.xml" -> - millSourcePath / "generated" / ".idea" / "misc.xml" - ).foreach { case (resource, generated) => - val resourceString = scala.io.Source.fromResource(resource).getLines().mkString("\n") - val generatedString = normaliseLibraryPaths(os.read(generated)) - - assert(resourceString == generatedString) - } - } - } - - - private def normaliseLibraryPaths(in: String): String = { - in.replaceAll(Cache.default.toPath.toAbsolutePath.toString, "COURSIER_HOME") - } -} diff --git a/scalalib/test/src/mill/scalalib/HelloJavaTests.scala b/scalalib/test/src/mill/scalalib/HelloJavaTests.scala deleted file mode 100644 index 5b7b93b2..00000000 --- a/scalalib/test/src/mill/scalalib/HelloJavaTests.scala +++ /dev/null @@ -1,114 +0,0 @@ -package mill -package scalalib - - -import mill.api.Result -import mill.util.{TestEvaluator, TestUtil} -import utest._ -import utest.framework.TestPath - - -object HelloJavaTests extends TestSuite { - - object HelloJava extends TestUtil.BaseModule{ - def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - trait JUnitTests extends TestModule{ - def testFrameworks = Seq("com.novocode.junit.JUnitFramework") - def ivyDeps = Agg(ivy"com.novocode:junit-interface:0.11") - } - - object core extends JavaModule{ - object test extends Tests with JUnitTests - } - object app extends JavaModule{ - def moduleDeps = Seq(core) - object test extends Tests with JUnitTests - } - } - val resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-java" - - def init()(implicit tp: TestPath) = { - val eval = new TestEvaluator(HelloJava) - os.remove.all(HelloJava.millSourcePath) - os.remove.all(eval.outPath) - os.makeDir.all(HelloJava.millSourcePath / os.up) - os.copy(resourcePath, HelloJava.millSourcePath) - eval - } - def tests: Tests = Tests { - 'compile - { - val eval = init() - - val Right((res1, n1)) = eval.apply(HelloJava.core.compile) - val Right((res2, 0)) = eval.apply(HelloJava.core.compile) - val Right((res3, n2)) = eval.apply(HelloJava.app.compile) - - assert( - res1 == res2, - n1 != 0, - n2 != 0, - os.walk(res1.classes.path).exists(_.last == "Core.class"), - !os.walk(res1.classes.path).exists(_.last == "Main.class"), - os.walk(res3.classes.path).exists(_.last == "Main.class"), - !os.walk(res3.classes.path).exists(_.last == "Core.class") - ) - } - 'docJar - { - val eval = init() - - val Right((ref1, _)) = eval.apply(HelloJava.core.docJar) - val Right((ref2, _)) = eval.apply(HelloJava.app.docJar) - - assert( - os.proc("jar", "tf", ref1.path).call().out.lines.contains("hello/Core.html"), - os.proc("jar", "tf", ref2.path).call().out.lines.contains("hello/Main.html") - ) - } - 'test - { - val eval = init() - - val Left(Result.Failure(ref1, Some(v1))) = eval.apply(HelloJava.core.test.test()) - - assert( - v1._2(0).fullyQualifiedName == "hello.MyCoreTests.lengthTest", - v1._2(0).status == "Success", - v1._2(1).fullyQualifiedName == "hello.MyCoreTests.msgTest", - v1._2(1).status == "Failure" - ) - - val Right((v2, _)) = eval.apply(HelloJava.app.test.test()) - - assert( - v2._2(0).fullyQualifiedName == "hello.MyAppTests.appTest", - v2._2(0).status == "Success", - v2._2(1).fullyQualifiedName == "hello.MyAppTests.coreTest", - v2._2(1).status == "Success" - ) - } - 'failures - { - val eval = init() - - val mainJava = HelloJava.millSourcePath / 'app / 'src / 'hello / "Main.java" - val coreJava = HelloJava.millSourcePath / 'core / 'src / 'hello / "Core.java" - - val Right(_) = eval.apply(HelloJava.core.compile) - val Right(_) = eval.apply(HelloJava.app.compile) - - ammonite.ops.write.over(mainJava, ammonite.ops.read(mainJava) + "}") - - val Right(_) = eval.apply(HelloJava.core.compile) - val Left(_) = eval.apply(HelloJava.app.compile) - - ammonite.ops.write.over(coreJava, ammonite.ops.read(coreJava) + "}") - - val Left(_) = eval.apply(HelloJava.core.compile) - val Left(_) = eval.apply(HelloJava.app.compile) - - ammonite.ops.write.over(mainJava, ammonite.ops.read(mainJava).dropRight(1)) - ammonite.ops.write.over(coreJava, ammonite.ops.read(coreJava).dropRight(1)) - - val Right(_) = eval.apply(HelloJava.core.compile) - val Right(_) = eval.apply(HelloJava.app.compile) - } - } -} diff --git a/scalalib/test/src/mill/scalalib/HelloWorldTests.scala b/scalalib/test/src/mill/scalalib/HelloWorldTests.scala deleted file mode 100644 index da08f056..00000000 --- a/scalalib/test/src/mill/scalalib/HelloWorldTests.scala +++ /dev/null @@ -1,934 +0,0 @@ -package mill.scalalib - -import java.io.ByteArrayOutputStream -import java.util.jar.JarFile - -import mill._ -import mill.define.Target -import mill.api.Result.Exception -import mill.eval.{Evaluator, Result} -import mill.modules.Assembly -import mill.scalalib.publish._ -import mill.util.{TestEvaluator, TestUtil} -import mill.scalalib.publish.VersionControl -import utest._ -import utest.framework.TestPath - -import scala.collection.JavaConverters._ -import scala.util.Properties.isJavaAtLeast - - -object HelloWorldTests extends TestSuite { - trait HelloBase extends TestUtil.BaseModule{ - def millSourcePath = TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - } - - trait HelloWorldModule extends scalalib.ScalaModule { - def scalaVersion = "2.12.4" - } - - trait HelloWorldModuleWithMain extends HelloWorldModule { - def mainClass = Some("Main") - } - - object HelloWorld extends HelloBase { - object core extends HelloWorldModule - } - object CrossHelloWorld extends HelloBase { - object core extends Cross[HelloWorldCross]("2.10.6", "2.11.11", "2.12.3", "2.12.4", "2.13.0-M3") - class HelloWorldCross(val crossScalaVersion: String) extends CrossScalaModule - } - - object HelloWorldDefaultMain extends HelloBase { - object core extends HelloWorldModule - } - - object HelloWorldWithoutMain extends HelloBase { - object core extends HelloWorldModule{ - def mainClass = None - } - } - - object HelloWorldWithMain extends HelloBase { - object core extends HelloWorldModuleWithMain - } - - val akkaHttpDeps = Agg(ivy"com.typesafe.akka::akka-http:10.0.13") - - object HelloWorldAkkaHttpAppend extends HelloBase { - object core extends HelloWorldModuleWithMain { - def ivyDeps = akkaHttpDeps - - def assemblyRules = Seq(Assembly.Rule.Append("reference.conf")) - } - } - - object HelloWorldAkkaHttpExclude extends HelloBase { - object core extends HelloWorldModuleWithMain { - def ivyDeps = akkaHttpDeps - - def assemblyRules = Seq(Assembly.Rule.Exclude("reference.conf")) - } - } - - object HelloWorldAkkaHttpAppendPattern extends HelloBase { - object core extends HelloWorldModuleWithMain { - def ivyDeps = akkaHttpDeps - - def assemblyRules = Seq(Assembly.Rule.AppendPattern(".*.conf")) - } - } - - object HelloWorldAkkaHttpExcludePattern extends HelloBase { - object core extends HelloWorldModuleWithMain { - def ivyDeps = akkaHttpDeps - - def assemblyRules = Seq(Assembly.Rule.ExcludePattern(".*.conf")) - } - } - - object HelloWorldAkkaHttpNoRules extends HelloBase { - object core extends HelloWorldModuleWithMain { - def ivyDeps = akkaHttpDeps - - def assemblyRules = Seq.empty - } - } - - object HelloWorldMultiAppend extends HelloBase { - object core extends HelloWorldModuleWithMain { - def moduleDeps = Seq(model) - - def assemblyRules = Seq(Assembly.Rule.Append("reference.conf")) - } - object model extends HelloWorldModule - } - - object HelloWorldMultiExclude extends HelloBase { - object core extends HelloWorldModuleWithMain { - def moduleDeps = Seq(model) - - def assemblyRules = Seq(Assembly.Rule.Exclude("reference.conf")) - } - object model extends HelloWorldModule - } - - object HelloWorldMultiAppendPattern extends HelloBase { - object core extends HelloWorldModuleWithMain { - def moduleDeps = Seq(model) - - def assemblyRules = Seq(Assembly.Rule.AppendPattern(".*.conf")) - } - object model extends HelloWorldModule - } - - object HelloWorldMultiExcludePattern extends HelloBase { - object core extends HelloWorldModuleWithMain { - def moduleDeps = Seq(model) - - def assemblyRules = Seq(Assembly.Rule.ExcludePattern(".*.conf")) - } - object model extends HelloWorldModule - } - - object HelloWorldMultiNoRules extends HelloBase { - object core extends HelloWorldModuleWithMain { - def moduleDeps = Seq(model) - - def assemblyRules = Seq.empty - } - object model extends HelloWorldModule - } - - object HelloWorldWarnUnused extends HelloBase { - object core extends HelloWorldModule { - def scalacOptions = T(Seq("-Ywarn-unused")) - } - } - - object HelloWorldFatalWarnings extends HelloBase { - object core extends HelloWorldModule { - def scalacOptions = T(Seq("-Ywarn-unused", "-Xfatal-warnings")) - } - } - - object HelloWorldWithDocVersion extends HelloBase { - object core extends HelloWorldModule { - def scalacOptions = T(Seq("-Ywarn-unused", "-Xfatal-warnings")) - def scalaDocOptions = super.scalaDocOptions() ++ Seq("-doc-version", "1.2.3") - } - } - - object HelloWorldOnlyDocVersion extends HelloBase { - object core extends HelloWorldModule { - def scalacOptions = T(Seq("-Ywarn-unused", "-Xfatal-warnings")) - def scalaDocOptions = T(Seq("-doc-version", "1.2.3")) - } - } - - object HelloWorldDocTitle extends HelloBase { - object core extends HelloWorldModule { - def scalaDocOptions = T(Seq("-doc-title", "Hello World")) - } - } - - object HelloWorldWithPublish extends HelloBase{ - object core extends HelloWorldModule with PublishModule{ - - def artifactName = "hello-world" - def publishVersion = "0.0.1" - - def pomSettings = PomSettings( - organization = "com.lihaoyi", - description = "hello world ready for real world publishing", - url = "https://github.com/lihaoyi/hello-world-publish", - licenses = Seq(License.Common.Apache2), - versionControl = VersionControl.github("lihaoyi", "hello-world-publish"), - developers = - Seq(Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi")) - ) - } - } - - object HelloWorldScalaOverride extends HelloBase{ - object core extends HelloWorldModule { - - override def scalaVersion: Target[String] = "2.11.11" - } - } - - object HelloWorldIvyDeps extends HelloBase{ - object moduleA extends HelloWorldModule { - - override def ivyDeps = Agg(ivy"com.lihaoyi::sourcecode:0.1.3") - } - object moduleB extends HelloWorldModule { - override def moduleDeps = Seq(moduleA) - override def ivyDeps = Agg(ivy"com.lihaoyi::sourcecode:0.1.4") - } - } - - object HelloWorldTypeLevel extends HelloBase{ - object foo extends ScalaModule { - def scalaVersion = "2.11.8" - override def scalaOrganization = "org.typelevel" - - def ivyDeps = Agg( - ivy"com.github.julien-truffaut::monocle-macro::1.4.0" - ) - def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg( - ivy"org.scalamacros:::paradise:2.1.0" - ) - def scalaDocPluginIvyDeps = super.scalaDocPluginIvyDeps() ++ Agg( - ivy"com.typesafe.genjavadoc:::genjavadoc-plugin:0.11" - ) - } - } - - object HelloWorldMacros extends HelloBase{ - object core extends ScalaModule { - def scalaVersion = "2.12.4" - - def ivyDeps = Agg( - ivy"com.github.julien-truffaut::monocle-macro::1.4.0" - ) - def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg( - ivy"org.scalamacros:::paradise:2.1.0" - ) - } - } - - object HelloWorldFlags extends HelloBase{ - object core extends ScalaModule { - def scalaVersion = "2.12.4" - - def scalacOptions = super.scalacOptions() ++ Seq( - "-Ypartial-unification" - ) - } - } - - object HelloScalacheck extends HelloBase{ - object foo extends ScalaModule { - def scalaVersion = "2.12.4" - object test extends Tests { - def ivyDeps = Agg(ivy"org.scalacheck::scalacheck:1.13.5") - def testFrameworks = Seq("org.scalacheck.ScalaCheckFramework") - } - } - } - - object HelloDotty extends HelloBase{ - object foo extends ScalaModule { - def scalaVersion = "0.9.0-RC1" - def ivyDeps = Agg(ivy"org.typelevel::cats-core:1.2.0".withDottyCompat(scalaVersion())) - } - } - - val resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" - - def jarMainClass(jar: JarFile): Option[String] = { - import java.util.jar.Attributes._ - val attrs = jar.getManifest.getMainAttributes.asScala - attrs.get(Name.MAIN_CLASS).map(_.asInstanceOf[String]) - } - - def jarEntries(jar: JarFile): Set[String] = { - jar.entries().asScala.map(_.getName).toSet - } - - def readFileFromJar(jar: JarFile, name: String): String = { - val is = jar.getInputStream(jar.getEntry(name)) - val baos = new ByteArrayOutputStream() - os.Internals.transfer(is, baos) - new String(baos.toByteArray) - } - - def compileClassfiles = Seq[os.RelPath]( - "Main.class", - "Main$.class", - "Main0.class", - "Main0$.class", - "Main$delayedInit$body.class", - "Person.class", - "Person$.class" - ) - - def workspaceTest[T](m: TestUtil.BaseModule, resourcePath: os.Path = resourcePath) - (t: TestEvaluator => T) - (implicit tp: TestPath): T = { - val eval = new TestEvaluator(m) - os.remove.all(m.millSourcePath) - os.remove.all(eval.outPath) - os.makeDir.all(m.millSourcePath / os.up) - os.copy(resourcePath, m.millSourcePath) - t(eval) - } - - - - - def tests: Tests = Tests { - 'scalaVersion - { - - 'fromBuild - workspaceTest(HelloWorld){eval => - val Right((result, evalCount)) = eval.apply(HelloWorld.core.scalaVersion) - - assert( - result == "2.12.4", - evalCount > 0 - ) - } - 'override - workspaceTest(HelloWorldScalaOverride){eval => - val Right((result, evalCount)) = eval.apply(HelloWorldScalaOverride.core.scalaVersion) - - assert( - result == "2.11.11", - evalCount > 0 - ) - } - } - - 'scalacOptions - { - 'emptyByDefault - workspaceTest(HelloWorld){eval => - val Right((result, evalCount)) = eval.apply(HelloWorld.core.scalacOptions) - - assert( - result.isEmpty, - evalCount > 0 - ) - } - 'override - workspaceTest(HelloWorldFatalWarnings){ eval => - val Right((result, evalCount)) = eval.apply(HelloWorldFatalWarnings.core.scalacOptions) - - assert( - result == Seq("-Ywarn-unused", "-Xfatal-warnings"), - evalCount > 0 - ) - } - } - - 'scalaDocOptions - { - 'emptyByDefault - workspaceTest(HelloWorld){eval => - val Right((result, evalCount)) = eval.apply(HelloWorld.core.scalaDocOptions) - assert( - result.isEmpty, - evalCount > 0 - ) - } - 'override - workspaceTest(HelloWorldDocTitle){ eval => - val Right((result, evalCount)) = eval.apply(HelloWorldDocTitle.core.scalaDocOptions) - assert( - result == Seq("-doc-title", "Hello World"), - evalCount > 0 - ) - } - 'extend - workspaceTest(HelloWorldWithDocVersion){ eval => - val Right((result, evalCount)) = eval.apply(HelloWorldWithDocVersion.core.scalaDocOptions) - assert( - result == Seq("-Ywarn-unused", "-Xfatal-warnings", "-doc-version", "1.2.3"), - evalCount > 0 - ) - } - // make sure options are passed during ScalaDoc generation - 'docJarWithTitle - workspaceTest( - HelloWorldDocTitle, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" - ){ eval => - val Right((_, evalCount)) = eval.apply(HelloWorldDocTitle.core.docJar) - assert( - evalCount > 0, - os.read(eval.outPath / 'core / 'docJar / 'dest / 'javadoc / "index.html").contains("Hello World") - ) - } - 'docJarWithVersion - workspaceTest( - HelloWorldWithDocVersion, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" - ){ eval => - // scaladoc generation fails because of "-Xfatal-warnings" flag - val Left(Result.Failure("docJar generation failed", None)) = eval.apply(HelloWorldWithDocVersion.core.docJar) - } - 'docJarOnlyVersion - workspaceTest( - HelloWorldOnlyDocVersion, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world" - ){ eval => - val Right((_, evalCount)) = eval.apply(HelloWorldOnlyDocVersion.core.docJar) - assert( - evalCount > 0, - os.read(eval.outPath / 'core / 'docJar / 'dest / 'javadoc / "index.html").contains("1.2.3") - ) - } - } - - 'scalacPluginClasspath - { - 'withMacroParadise - workspaceTest(HelloWorldTypeLevel){eval => - val Right((result, evalCount)) = eval.apply(HelloWorldTypeLevel.foo.scalacPluginClasspath) - assert( - result.nonEmpty, - result.exists { pathRef => pathRef.path.segments.contains("scalamacros") }, - evalCount > 0 - ) - } - } - - 'scalaDocPluginClasspath - { - 'extend - workspaceTest(HelloWorldTypeLevel){eval => - val Right((result, evalCount)) = eval.apply(HelloWorldTypeLevel.foo.scalaDocPluginClasspath) - assert( - result.nonEmpty, - result.exists { pathRef => pathRef.path.segments.contains("scalamacros") }, - result.exists { pathRef => pathRef.path.segments.contains("genjavadoc") }, - evalCount > 0 - ) - } - } - - 'compile - { - 'fromScratch - workspaceTest(HelloWorld){eval => - val Right((result, evalCount)) = eval.apply(HelloWorld.core.compile) - - val analysisFile = result.analysisFile - val outputFiles = os.walk(result.classes.path) - val expectedClassfiles = compileClassfiles.map( - eval.outPath / 'core / 'compile / 'dest / 'classes / _ - ) - assert( - result.classes.path == eval.outPath / 'core / 'compile / 'dest / 'classes, - os.exists(analysisFile), - outputFiles.nonEmpty, - outputFiles.forall(expectedClassfiles.contains), - evalCount > 0 - ) - - // don't recompile if nothing changed - val Right((_, unchangedEvalCount)) = eval.apply(HelloWorld.core.compile) - - assert(unchangedEvalCount == 0) - } - 'recompileOnChange - workspaceTest(HelloWorld){eval => - val Right((_, freshCount)) = eval.apply(HelloWorld.core.compile) - assert(freshCount > 0) - - os.write.append(HelloWorld.millSourcePath / 'core / 'src / "Main.scala", "\n") - - val Right((_, incCompileCount)) = eval.apply(HelloWorld.core.compile) - assert(incCompileCount > 0, incCompileCount < freshCount) - } - 'failOnError - workspaceTest(HelloWorld){eval => - os.write.append(HelloWorld.millSourcePath / 'core / 'src / "Main.scala", "val x: ") - - val Left(Result.Failure("Compilation failed", _)) = eval.apply(HelloWorld.core.compile) - - - val paths = Evaluator.resolveDestPaths( - eval.outPath, - HelloWorld.core.compile.ctx.segments - ) - - assert( - os.walk(paths.dest / 'classes).isEmpty, - !os.exists(paths.meta) - ) - // Works when fixed - os.write.over( - HelloWorld.millSourcePath / 'core / 'src / "Main.scala", - os.read(HelloWorld.millSourcePath / 'core / 'src / "Main.scala").dropRight("val x: ".length) - ) - - val Right((result, evalCount)) = eval.apply(HelloWorld.core.compile) - } - 'passScalacOptions - workspaceTest(HelloWorldFatalWarnings){ eval => - // compilation fails because of "-Xfatal-warnings" flag - val Left(Result.Failure("Compilation failed", _)) = eval.apply(HelloWorldFatalWarnings.core.compile) - } - } - - 'runMain - { - 'runMainObject - workspaceTest(HelloWorld){eval => - val runResult = eval.outPath / 'core / 'runMain / 'dest / "hello-mill" - - val Right((_, evalCount)) = eval.apply(HelloWorld.core.runMain("Main", runResult.toString)) - assert(evalCount > 0) - - assert( - os.exists(runResult), - os.read(runResult) == "hello rockjam, your age is: 25" - ) - } - 'runCross - { - def cross(eval: TestEvaluator, v: String, expectedOut: String) { - - val runResult = eval.outPath / "hello-mill" - - val Right((_, evalCount)) = eval.apply( - CrossHelloWorld.core(v).runMain("Shim", runResult.toString) - ) - - assert(evalCount > 0) - - - assert( - os.exists(runResult), - os.read(runResult) == expectedOut - ) - } - 'v210 - TestUtil.disableInJava9OrAbove(workspaceTest(CrossHelloWorld)(cross(_, "2.10.6", "2.10.6 rox"))) - 'v211 - TestUtil.disableInJava9OrAbove(workspaceTest(CrossHelloWorld)(cross(_, "2.11.11", "2.11.11 pwns"))) - 'v2123 - workspaceTest(CrossHelloWorld)(cross(_, "2.12.3", "2.12.3 leet")) - 'v2124 - workspaceTest(CrossHelloWorld)(cross(_, "2.12.4", "2.12.4 leet")) - 'v2130M3 - workspaceTest(CrossHelloWorld)(cross(_, "2.13.0-M3", "2.13.0-M3 idk")) - } - - - 'notRunInvalidMainObject - workspaceTest(HelloWorld){eval => - val Left(Result.Failure("subprocess failed", _)) = eval.apply(HelloWorld.core.runMain("Invalid")) - } - 'notRunWhenCompileFailed - workspaceTest(HelloWorld){eval => - os.write.append(HelloWorld.millSourcePath / 'core / 'src / "Main.scala", "val x: ") - - val Left(Result.Failure("Compilation failed", _)) = eval.apply(HelloWorld.core.runMain("Main")) - - } - } - - 'forkRun - { - 'runIfMainClassProvided - workspaceTest(HelloWorldWithMain){eval => - val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" - val Right((_, evalCount)) = eval.apply( - HelloWorldWithMain.core.run(runResult.toString) - ) - - assert(evalCount > 0) - - - assert( - os.exists(runResult), - os.read(runResult) == "hello rockjam, your age is: 25" - ) - } - 'notRunWithoutMainClass - workspaceTest( - HelloWorldWithoutMain, - os.pwd / 'scalalib / 'test / 'resources / "hello-world-no-main" - ){eval => - val Left(Result.Failure(_, None)) = eval.apply(HelloWorldWithoutMain.core.run()) - } - - 'runDiscoverMainClass - workspaceTest(HelloWorldWithoutMain){eval => - // Make sure even if there isn't a main class defined explicitly, it gets - // discovered by Zinc and used - val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" - val Right((_, evalCount)) = eval.apply( - HelloWorldWithoutMain.core.run(runResult.toString) - ) - - assert(evalCount > 0) - - - assert( - os.exists(runResult), - os.read(runResult) == "hello rockjam, your age is: 25" - ) - } - } - - 'run - { - 'runIfMainClassProvided - workspaceTest(HelloWorldWithMain){eval => - val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" - val Right((_, evalCount)) = eval.apply( - HelloWorldWithMain.core.runLocal(runResult.toString) - ) - - assert(evalCount > 0) - - - assert( - os.exists(runResult), - os.read(runResult) == "hello rockjam, your age is: 25" - ) - } - 'runWithDefaultMain - workspaceTest(HelloWorldDefaultMain){eval => - val runResult = eval.outPath / 'core / 'run / 'dest / "hello-mill" - val Right((_, evalCount)) = eval.apply( - HelloWorldDefaultMain.core.runLocal(runResult.toString) - ) - - assert(evalCount > 0) - - - assert( - os.exists(runResult), - os.read(runResult) == "hello rockjam, your age is: 25" - ) - } - 'notRunWithoutMainClass - workspaceTest( - HelloWorldWithoutMain, - os.pwd / 'scalalib / 'test / 'resources / "hello-world-no-main" - ){eval => - val Left(Result.Failure(_, None)) = eval.apply(HelloWorldWithoutMain.core.runLocal()) - - } - } - - 'jar - { - 'nonEmpty - workspaceTest(HelloWorldWithMain){eval => - val Right((result, evalCount)) = eval.apply(HelloWorldWithMain.core.jar) - - assert( - os.exists(result.path), - evalCount > 0 - ) - - val jarFile = new JarFile(result.path.toIO) - val entries = jarFile.entries().asScala.map(_.getName).toSet - - val otherFiles = Seq[os.RelPath]( - os.rel / "META-INF" / "MANIFEST.MF", - "reference.conf" - ) - val expectedFiles = compileClassfiles ++ otherFiles - - assert( - entries.nonEmpty, - entries == expectedFiles.map(_.toString()).toSet - ) - - val mainClass = jarMainClass(jarFile) - assert(mainClass.contains("Main")) - } - - 'logOutputToFile - workspaceTest(HelloWorld){eval => - val outPath = eval.outPath - eval.apply(HelloWorld.core.compile) - - val logFile = outPath / 'core / 'compile / 'log - assert(os.exists(logFile)) - } - } - - 'assembly - { - 'assembly - workspaceTest(HelloWorldWithMain){ eval => - val Right((result, evalCount)) = eval.apply(HelloWorldWithMain.core.assembly) - assert( - os.exists(result.path), - evalCount > 0 - ) - val jarFile = new JarFile(result.path.toIO) - val entries = jarEntries(jarFile) - - val mainPresent = entries.contains("Main.class") - assert(mainPresent) - assert(entries.exists(s => s.contains("scala/Predef.class"))) - - val mainClass = jarMainClass(jarFile) - assert(mainClass.contains("Main")) - } - - 'assemblyRules - { - def checkAppend[M <: TestUtil.BaseModule](module: M, - target: Target[PathRef]) = - workspaceTest(module) { eval => - val Right((result, _)) = eval.apply(target) - - val jarFile = new JarFile(result.path.toIO) - - assert(jarEntries(jarFile).contains("reference.conf")) - - val referenceContent = readFileFromJar(jarFile, "reference.conf") - - assert( - // akka modules configs are present - referenceContent.contains("akka-http Reference Config File"), - referenceContent.contains("akka-http-core Reference Config File"), - referenceContent.contains("Akka Actor Reference Config File"), - referenceContent.contains("Akka Stream Reference Config File"), - // our application config is present too - referenceContent.contains("My application Reference Config File"), - referenceContent.contains( - """akka.http.client.user-agent-header="hello-world-client"""" - ) - ) - } - - val helloWorldMultiResourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-multi" - - def checkAppendMulti[M <: TestUtil.BaseModule]( - module: M, - target: Target[PathRef]) = - workspaceTest( - module, - resourcePath = helloWorldMultiResourcePath - ) { eval => - val Right((result, _)) = eval.apply(target) - - val jarFile = new JarFile(result.path.toIO) - - assert(jarEntries(jarFile).contains("reference.conf")) - - val referenceContent = readFileFromJar(jarFile, "reference.conf") - - assert( - // reference config from core module - referenceContent.contains("Core Reference Config File"), - // reference config from model module - referenceContent.contains("Model Reference Config File"), - // concatenated content - referenceContent.contains("bar.baz=hello"), - referenceContent.contains("foo.bar=2") - ) - } - - 'appendWithDeps - checkAppend( - HelloWorldAkkaHttpAppend, - HelloWorldAkkaHttpAppend.core.assembly - ) - 'appendMultiModule - checkAppendMulti( - HelloWorldMultiAppend, - HelloWorldMultiAppend.core.assembly - ) - 'appendPatternWithDeps - checkAppend( - HelloWorldAkkaHttpAppendPattern, - HelloWorldAkkaHttpAppendPattern.core.assembly - ) - 'appendPatternMultiModule - checkAppendMulti( - HelloWorldMultiAppendPattern, - HelloWorldMultiAppendPattern.core.assembly - ) - - def checkExclude[M <: TestUtil.BaseModule](module: M, - target: Target[PathRef], - resourcePath: os.Path = resourcePath - ) = - workspaceTest(module, resourcePath) { eval => - val Right((result, _)) = eval.apply(target) - - val jarFile = new JarFile(result.path.toIO) - - assert(!jarEntries(jarFile).contains("reference.conf")) - } - - 'excludeWithDeps - checkExclude( - HelloWorldAkkaHttpExclude, - HelloWorldAkkaHttpExclude.core.assembly - ) - 'excludeMultiModule - checkExclude( - HelloWorldMultiExclude, - HelloWorldMultiExclude.core.assembly, - resourcePath = helloWorldMultiResourcePath - - ) - 'excludePatternWithDeps - checkExclude( - HelloWorldAkkaHttpExcludePattern, - HelloWorldAkkaHttpExcludePattern.core.assembly - ) - 'excludePatternMultiModule - checkExclude( - HelloWorldMultiExcludePattern, - HelloWorldMultiExcludePattern.core.assembly, - resourcePath = helloWorldMultiResourcePath - ) - - 'writeFirstWhenNoRule - { - 'withDeps - workspaceTest(HelloWorldAkkaHttpNoRules) { eval => - val Right((result, _)) = eval.apply(HelloWorldAkkaHttpNoRules.core.assembly) - - val jarFile = new JarFile(result.path.toIO) - - assert(jarEntries(jarFile).contains("reference.conf")) - - val referenceContent = readFileFromJar(jarFile, "reference.conf") - - val allOccurrences = Seq( - referenceContent.contains("akka-http Reference Config File"), - referenceContent.contains("akka-http-core Reference Config File"), - referenceContent.contains("Akka Actor Reference Config File"), - referenceContent.contains("Akka Stream Reference Config File"), - referenceContent.contains("My application Reference Config File") - ) - - val timesOcccurres = allOccurrences.find(identity).size - - assert(timesOcccurres == 1) - } - - 'multiModule - workspaceTest( - HelloWorldMultiNoRules, - resourcePath = helloWorldMultiResourcePath - ) { eval => - val Right((result, _)) = eval.apply(HelloWorldMultiNoRules.core.assembly) - - val jarFile = new JarFile(result.path.toIO) - - assert(jarEntries(jarFile).contains("reference.conf")) - - val referenceContent = readFileFromJar(jarFile, "reference.conf") - - assert( - referenceContent.contains("Model Reference Config File"), - referenceContent.contains("foo.bar=2"), - - !referenceContent.contains("Core Reference Config File"), - !referenceContent.contains("bar.baz=hello") - ) - } - } - } - - 'run - workspaceTest(HelloWorldWithMain){eval => - val Right((result, evalCount)) = eval.apply(HelloWorldWithMain.core.assembly) - - assert( - os.exists(result.path), - evalCount > 0 - ) - val runResult = eval.outPath / "hello-mill" - - os.proc("java", "-jar", result.path, runResult).call(cwd = eval.outPath) - - assert( - os.exists(runResult), - os.read(runResult) == "hello rockjam, your age is: 25" - ) - } - } - - 'ivyDeps - workspaceTest(HelloWorldIvyDeps){ eval => - val Right((result, _)) = eval.apply(HelloWorldIvyDeps.moduleA.runClasspath) - assert( - result.exists(_.path.last == "sourcecode_2.12-0.1.3.jar"), - !result.exists(_.path.last == "sourcecode_2.12-0.1.4.jar") - ) - - val Right((result2, _)) = eval.apply(HelloWorldIvyDeps.moduleB.runClasspath) - assert( - result2.exists(_.path.last == "sourcecode_2.12-0.1.4.jar"), - !result2.exists(_.path.last == "sourcecode_2.12-0.1.3.jar") - ) - } - - 'typeLevel - workspaceTest(HelloWorldTypeLevel){ eval => - val classPathsToCheck = Seq( - HelloWorldTypeLevel.foo.runClasspath, - HelloWorldTypeLevel.foo.ammoniteReplClasspath, - HelloWorldTypeLevel.foo.compileClasspath - ) - for(cp <- classPathsToCheck){ - val Right((result, _)) = eval.apply(cp) - assert( - // Make sure every relevant piece org.scala-lang has been substituted for org.typelevel - !result.map(_.toString).exists(x => - x.contains("scala-lang") && - (x.contains("scala-library") || x.contains("scala-compiler") || x.contains("scala-reflect")) - ), - result.map(_.toString).exists(x => x.contains("typelevel") && x.contains("scala-library")) - - ) - } - } - - 'macros - { - // make sure macros are applied when compiling/running - 'runMain - workspaceTest( - HelloWorldMacros, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-macros" - ){ eval => - val Right((_, evalCount)) = eval.apply(HelloWorldMacros.core.runMain("Main")) - assert(evalCount > 0) - } - // make sure macros are applied when compiling during scaladoc generation - 'docJar - workspaceTest( - HelloWorldMacros, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-macros" - ){ eval => - val Right((_, evalCount)) = eval.apply(HelloWorldMacros.core.docJar) - assert(evalCount > 0) - } - } - - 'flags - { - // make sure flags are passed when compiling/running - 'runMain - workspaceTest( - HelloWorldFlags, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-flags" - ){ eval => - val Right((_, evalCount)) = eval.apply(HelloWorldFlags.core.runMain("Main")) - assert(evalCount > 0) - } - // make sure flags are passed during ScalaDoc generation - 'docJar - workspaceTest( - HelloWorldFlags, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-world-flags" - ){ eval => - val Right((_, evalCount)) = eval.apply(HelloWorldFlags.core.docJar) - assert(evalCount > 0) - } - } - - 'scalacheck - workspaceTest( - HelloScalacheck, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-scalacheck" - ){ eval => - val Right((res, evalCount)) = eval.apply(HelloScalacheck.foo.test.test()) - assert( - evalCount > 0, - res._2.map(_.selector) == Seq( - "String.startsWith", - "String.endsWith", - "String.substring", - "String.substring" - ) - ) - } - - 'dotty - workspaceTest( - HelloDotty, - resourcePath = os.pwd / 'scalalib / 'test / 'resources / "hello-dotty" - ){ eval => - if (isJavaAtLeast("9")) { - // Skip the test because Dotty does not support Java >= 9 yet - // (see https://github.com/lampepfl/dotty/pull/3138) - } else { - val Right((_, evalCount)) = eval.apply(HelloDotty.foo.run()) - assert(evalCount > 0) - } - } - } -} diff --git a/scalalib/test/src/mill/scalalib/ResolveDepsTests.scala b/scalalib/test/src/mill/scalalib/ResolveDepsTests.scala deleted file mode 100644 index 78361625..00000000 --- a/scalalib/test/src/mill/scalalib/ResolveDepsTests.scala +++ /dev/null @@ -1,77 +0,0 @@ -package mill.scalalib - -import coursier.Cache -import coursier.maven.MavenRepository -import mill.api.Result.{Failure, Success} -import mill.eval.{PathRef, Result} -import mill.util.Loose.Agg -import utest._ - -object ResolveDepsTests extends TestSuite { - val repos = Seq(Cache.ivy2Local, MavenRepository("https://repo1.maven.org/maven2")) - - def evalDeps(deps: Agg[Dep]): Result[Agg[PathRef]] = Lib.resolveDependencies( - repos, - Lib.depToDependency(_, "2.12.4", ""), - deps - ) - - val tests = Tests { - 'resolveValidDeps - { - val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3") - val Success(paths) = evalDeps(deps) - assert(paths.nonEmpty) - } - - 'resolveValidDepsWithClassifier - { - val deps = Agg(ivy"org.lwjgl:lwjgl:3.1.1;classifier=natives-macos") - val Success(paths) = evalDeps(deps) - assert(paths.nonEmpty) - assert(paths.items.next.path.toString.contains("natives-macos")) - } - - 'resolveTransitiveRuntimeDeps - { - val deps = Agg(ivy"org.mockito:mockito-core:2.7.22") - val Success(paths) = evalDeps(deps) - assert(paths.nonEmpty) - assert(paths.exists(_.path.toString.contains("objenesis"))) - assert(paths.exists(_.path.toString.contains("byte-buddy"))) - } - - 'excludeTransitiveDeps - { - val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3".exclude("com.lihaoyi" -> "fansi_2.12")) - val Success(paths) = evalDeps(deps) - assert(!paths.exists(_.path.toString.contains("fansi_2.12"))) - } - - 'excludeTransitiveDepsByOrg - { - val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3".excludeOrg("com.lihaoyi")) - val Success(paths) = evalDeps(deps) - assert(!paths.exists(path => path.path.toString.contains("com/lihaoyi") && !path.path.toString.contains("pprint_2.12"))) - } - - 'excludeTransitiveDepsByName - { - val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3".excludeName("fansi_2.12")) - val Success(paths) = evalDeps(deps) - assert(!paths.exists(_.path.toString.contains("fansi_2.12"))) - } - - 'errOnInvalidOrgDeps - { - val deps = Agg(ivy"xxx.yyy.invalid::pprint:0.5.3") - val Failure(errMsg, _) = evalDeps(deps) - assert(errMsg.contains("xxx.yyy.invalid")) - } - - 'errOnInvalidVersionDeps - { - val deps = Agg(ivy"com.lihaoyi::pprint:invalid.version.num") - val Failure(errMsg, _) = evalDeps(deps) - assert(errMsg.contains("invalid.version.num")) - } - - 'errOnPartialSuccess - { - val deps = Agg(ivy"com.lihaoyi::pprint:0.5.3", ivy"fake::fake:fake") - val Failure(errMsg, _) = evalDeps(deps) - assert(errMsg.contains("fake")) - } - } -} diff --git a/scalalib/test/src/mill/scalalib/VersionControlTests.scala b/scalalib/test/src/mill/scalalib/VersionControlTests.scala deleted file mode 100644 index fafdca2d..00000000 --- a/scalalib/test/src/mill/scalalib/VersionControlTests.scala +++ /dev/null @@ -1,74 +0,0 @@ -package mill.scalalib - -import mill.scalalib.publish.{VersionControl, VersionControlConnection} - -import utest._ - -object VersionContolTests extends TestSuite { - - import VersionControl._ - import VersionControlConnection._ - - val tests = Tests { - 'github - { - assert( - github("lihaoyi", "mill") == - VersionControl( - browsableRepository = Some("https://github.com/lihaoyi/mill"), - connection = Some("scm:git:git://github.com/lihaoyi/mill.git"), - developerConnection = Some("scm:git:ssh://git@github.com:lihaoyi/mill.git"), - tag = None - ) - ) - } - 'git - { - assert( - gitGit("example.org", "path.git", port = Some(9418)) == - "scm:git:git://example.org:9418/path.git" - ) - - assert( - gitHttp("example.org") == - "scm:git:http://example.org/" - ) - - assert( - gitHttps("example.org", "path.git") == - "scm:git:https://example.org/path.git" - ) - - assert( - gitSsh("example.org", "path.git") == - "scm:git:ssh://example.org/path.git" - ) - - assert( - gitFile("/home/gui/repos/foo/bare.git") == - "scm:git:file:///home/gui/repos/foo/bare.git" - ) - - } - 'svn - { - assert( - svnSsh("example.org", "repo") == - "scm:svn:svn+ssh://example.org/repo" - ) - assert( - svnHttp("example.org", "repo", Some("user"), Some("pass")) == - "scm:svn:http://user:pass@example.org/repo" - ) - assert( - svnHttps("example.org", "repo", Some("user")) == - "scm:svn:https://user@example.org/repo" - ) - assert( - svnSvn("example.org", "repo", port = Some(3690)) == - "scm:svn:svn://example.org:3690/repo" - ) - assert( - svnFile("/var/svn/repo") == - "scm:svn:file:///var/svn/repo" - ) - } - } -} \ No newline at end of file diff --git a/scalalib/test/src/mill/scalalib/dependency/metadata/MetadataLoaderFactoryTests.scala b/scalalib/test/src/mill/scalalib/dependency/metadata/MetadataLoaderFactoryTests.scala deleted file mode 100644 index 4c2206b8..00000000 --- a/scalalib/test/src/mill/scalalib/dependency/metadata/MetadataLoaderFactoryTests.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* - * This file contains code originally published under the following license: - * - * Copyright (c) 2012, Roman Timushev - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY - * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND - * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -package mill.scalalib.dependency.metadata - -import coursier.Fetch.Content -import coursier.core.{Artifact, Module, Project, Repository} -import coursier.ivy.IvyRepository -import coursier.maven.MavenRepository -import coursier.util.{EitherT, Monad} -import utest._ - -object MetadataLoaderFactoryTests extends TestSuite { - - val tests = Tests { - 'mavenRepository - { - val mavenRepo = MavenRepository("https://repo1.maven.org/maven2") - assertMatch(MetadataLoaderFactory(mavenRepo)) { - case Some(MavenMetadataLoader(`mavenRepo`)) => - } - } - 'ivyRepository - { - val Right(ivyRepo) = IvyRepository.parse( - "https://dl.bintray.com/sbt/sbt-plugin-releases/" + coursier.ivy.Pattern.default.string, - dropInfoAttributes = true) - assertMatch(MetadataLoaderFactory(ivyRepo)) { case None => } - } - 'otherRepository - { - val otherRepo = new CustomRepository - assertMatch(MetadataLoaderFactory(otherRepo)) { case None => } - } - } - - case class CustomRepository() extends Repository { - override def find[F[_]](module: Module, version: String, fetch: Content[F])( - implicit F: Monad[F]): EitherT[F, String, (Artifact.Source, Project)] = - ??? - } -} diff --git a/scalalib/test/src/mill/scalalib/dependency/updates/UpdatesFinderTests.scala b/scalalib/test/src/mill/scalalib/dependency/updates/UpdatesFinderTests.scala deleted file mode 100644 index 7b6e6e36..00000000 --- a/scalalib/test/src/mill/scalalib/dependency/updates/UpdatesFinderTests.scala +++ /dev/null @@ -1,173 +0,0 @@ -/* - * This file contains code originally published under the following license: - * - * Copyright (c) 2012, Roman Timushev - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY - * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND - * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -package mill.scalalib.dependency.updates - -import mill.scalalib.dependency.versions.{DependencyVersions, Version} -import utest._ - -object UpdatesFinderTests extends TestSuite { - - private def updates(current: String, - available: Seq[String], - allowPreRelease: Boolean) = { - val dependency = coursier.Dependency( - coursier.Module("com.example.organization", "example-artifact"), - current) - val currentVersion = Version(current) - val allVersions = available.map(Version(_)).toSet - - UpdatesFinder - .findUpdates(DependencyVersions(dependency, currentVersion, allVersions), - allowPreRelease) - .updates - .map(_.toString) - } - - val available = Seq( - "0.9.9-SNAPSHOT", - "0.9.9-M3", - "0.9.9", - "1.0.0-SNAPSHOT", - "1.0.0-M2", - "1.0.0-M3", - "1.0.0", - "1.0.1-SNAPSHOT", - "1.0.1-M3", - "1.0.1" - ) - - val tests = Tests { - - 'snapshotArtifacts - { - val u = updates("1.0.0-SNAPSHOT", available, allowPreRelease = false) - val pu = updates("1.0.0-SNAPSHOT", available, allowPreRelease = true) - - 'noOldStableVersions - { - assert(!u.contains("0.9.9")) - } - 'noOldMilestones - { - assert(!u.contains("0.9.9-M3")) - } - 'noOldSnapshots - { - assert(!u.contains("0.9.9-SNAPSHOT")) - } - 'noCurrentMilestones - { - assert(!u.contains("1.0.0-M3")) - } - 'noCurrentSnapshot - { - assert(!u.contains("1.0.0-SNAPSHOT")) - } - 'stableUpdates - { - assert(u.contains("1.0.0") && u.contains("1.0.1")) - } - 'milestoneUpdates - { - assert(u.contains("1.0.1-M3")) - } - 'snapshotUpdates - { - assert(u.contains("1.0.1-SNAPSHOT")) - } - 'noDifferencesRegardingOptionalPreReleases - { - assert(u == pu) - } - } - - 'milestoneArtifacts - { - val u = updates("1.0.0-M2", available, allowPreRelease = false) - val pu = updates("1.0.0-M2", available, allowPreRelease = true) - - 'noOldStableVersions - { - assert(!u.contains("0.9.9")) - } - 'noOldSnapshots - { - assert(!u.contains("0.9.9-SNAPSHOT")) - } - 'noOldMilestones - { - assert(!u.contains("0.9.9-M3")) - } - 'noCurrentSnapshot - { - assert(!u.contains("1.0.0-SNAPSHOT")) - } - 'currentMilestones - { - assert(u.contains("1.0.0-M3")) - } - 'stableUpdates - { - assert(u.contains("1.0.1")) - } - 'noSnapshotUpdates - { - assert(!u.contains("1.0.1-SNAPSHOT")) - } - 'milestoneUpdates - { - assert(u.contains("1.0.1-M3")) - } - 'noDifferencesRegardingOptionalPreReleases - { - assert(u == pu) - } - } - - 'stableArtifacts - { - val u = updates("1.0.0", available, allowPreRelease = false) - val pu = updates("1.0.0", available, allowPreRelease = true) - - 'noOldStableVersions - { - assert(!u.contains("0.9.9")) - assert(!pu.contains("0.9.9")) - } - 'noOldSnapshots - { - assert(!u.contains("0.9.9-SNAPSHOT")) - assert(!pu.contains("0.9.9-SNAPSHOT")) - } - 'noOldMilestones - { - assert(!u.contains("0.9.9-M3")) - assert(!pu.contains("0.9.9-M3")) - } - 'noCurrentSnapshot - { - assert(!u.contains("1.0.0-SNAPSHOT")) - assert(!pu.contains("1.0.0-SNAPSHOT")) - } - 'noCurrentMilestones - { - assert(!u.contains("1.0.0-M3")) - assert(!pu.contains("1.0.0-M3")) - } - 'stableUpdates - { - assert(u.contains("1.0.1")) - assert(pu.contains("1.0.1")) - } - 'noSnapshotUpdates - { - assert(!u.contains("1.0.1-SNAPSHOT")) - assert(!pu.contains("1.0.1-SNAPSHOT")) - } - 'noMilestoneUpdates - { - assert(!u.contains("1.0.1-M3")) - } - 'milestoneUpdatesWhenAllowingPreReleases - { - assert(pu.contains("1.0.1-M3")) - } - } - } -} diff --git a/scalalib/test/src/mill/scalalib/dependency/versions/VersionTests.scala b/scalalib/test/src/mill/scalalib/dependency/versions/VersionTests.scala deleted file mode 100644 index b916c86f..00000000 --- a/scalalib/test/src/mill/scalalib/dependency/versions/VersionTests.scala +++ /dev/null @@ -1,138 +0,0 @@ -/* - * This file contains code originally published under the following license: - * - * Copyright (c) 2012, Roman Timushev - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY - * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND - * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -package mill.scalalib.dependency.versions - -import utest._ -import fastparse.Parsed - -object VersionTests extends TestSuite { - - val tests = Tests { - 'versionsClassification - { - 'ReleaseVersion - { - List("1.0.0", "1.0.0.Final", "1.0.0-FINAL", "1.0.0.RELEASE") foreach { - rel => - assertMatch(Version(rel)) { - case ReleaseVersion(List(1, 0, 0)) => - } - } - } - 'PreReleaseVersion - { - assertMatch(Version("1.0.0-alpha.1")) { - case PreReleaseVersion(List(1, 0, 0), List("alpha", "1")) => - } - } - 'PreReleaseBuildVersion - { - assertMatch(Version("1.0.0-alpha.1+build.10")) { - case PreReleaseBuildVersion(List(1, 0, 0), - List("alpha", "1"), - List("build", "10")) => - } - } - 'BuildVersion - { - assertMatch(Version("1.0.0+build.10")) { - case BuildVersion(List(1, 0, 0), List("build", "10")) => - } - } - } - - 'semverVersionsOrdering - { - import scala.Ordered._ - - val v = List( - "invalid", - "1.0.0-20131213005945", - "1.0.0-alpha", - "1.0.0-alpha.1", - "1.0.0-beta.2", - "1.0.0-beta.11", - "1.0.0-rc.1", - "1.0.0-rc.1+build.1", - "1.0.0", - "1.0.0+0.3.7", - "1.33.7+build", - "1.33.7+build.2.b8f12d7", - "1.33.7+build.11.e0f985a", - "2.0.M5b", - "2.0.M6-SNAP9", - "2.0.M6-SNAP23", - "2.0.M6-SNAP23a" - ).map(Version.apply) - val pairs = v.tails.flatMap { - case h :: t => t.map((h, _)) - case Nil => List.empty - } - pairs.foreach { - case (a, b) => - assert(a < b) - assert(b > a) - } - } - - 'parser - { - - Symbol("parse 1.0.5") - { - assertMatch(VersionParser.parse("1.0.5")) { - case Parsed.Success((Seq(1, 0, 5), Seq(), Seq()), _) => - } - } - - Symbol("parse 1.0.M3") - { - assertMatch(VersionParser.parse("1.0.M3")) { - case Parsed.Success((Seq(1, 0), Seq("M3"), Seq()), _) => - } - } - Symbol("parse 1.0.3m") - { - assertMatch(VersionParser.parse("1.0.3m")) { - case Parsed.Success((Seq(1, 0), Seq("3m"), Seq()), _) => - } - } - Symbol("parse 1.0.3m.4") - { - assertMatch(VersionParser.parse("1.0.3m.4")) { - case Parsed.Success((Seq(1, 0), Seq("3m", "4"), Seq()), _) => - } - } - Symbol("parse 9.1-901-1.jdbc4") - { - assertMatch(VersionParser.parse("9.1-901-1.jdbc4")) { - case Parsed.Success((Seq(9, 1), Seq("901", "1", "jdbc4"), Seq()), _) => - } - } - Symbol("parse 1.33.7+build/11.e0f985a") - { - assertMatch(VersionParser.parse("1.33.7+build/11.e0f985a")) { - case Parsed.Success((Seq(1, 33, 7), Seq(), Seq("build/11", "e0f985a")), _) => - } - } - Symbol("parse 9.1-901-1.jdbc4+build/11.e0f985a") - { - assertMatch(VersionParser.parse("9.1-901-1.jdbc4+build/11.e0f985a")) { - case Parsed.Success((Seq(9, 1), Seq("901", "1", "jdbc4"), Seq("build/11", "e0f985a")), _) => - } - } - } - } -} diff --git a/scalalib/test/src/mill/scalalib/publish/IvyTests.scala b/scalalib/test/src/mill/scalalib/publish/IvyTests.scala deleted file mode 100644 index d187f969..00000000 --- a/scalalib/test/src/mill/scalalib/publish/IvyTests.scala +++ /dev/null @@ -1,60 +0,0 @@ -package mill.scalalib.publish - -import utest._ -import mill._ - -import scala.xml.{Node, NodeSeq, XML} - -object IvyTests extends TestSuite { - - def tests: Tests = Tests { - val artifactId = "mill-scalalib_2.12" - val artifact = - Artifact("com.lihaoyi", "mill-scalalib_2.12", "0.0.1") - val deps = Agg( - Dependency(Artifact("com.lihaoyi", "mill-main_2.12", "0.1.4"), - Scope.Compile), - Dependency(Artifact("org.scala-sbt", "test-interface", "1.0"), - Scope.Compile), - Dependency(Artifact("com.lihaoyi", "pprint_2.12", "0.5.3"), - Scope.Compile, exclusions = List("com.lihaoyi" -> "fansi_2.12", "*" -> "sourcecode_2.12")) - ) - - 'fullIvy - { - val fullIvy = XML.loadString(Ivy(artifact, deps)) - - 'topLevel - { - val info = singleNode(fullIvy \ "info") - assert( - singleAttr(info, "organisation") == artifact.group, - singleAttr(info, "module") == artifact.id, - singleAttr(info, "revision") == artifact.version - ) - } - - 'dependencies - { - val dependencies = fullIvy \ "dependencies" \ "dependency" - val ivyDeps = deps.indexed - - assert(dependencies.size == ivyDeps.size) - - dependencies.zipWithIndex.foreach { case (dep, index) => - assert( - singleAttr(dep, "org") == ivyDeps(index).artifact.group, - singleAttr(dep, "name") == ivyDeps(index).artifact.id, - singleAttr(dep, "rev") == ivyDeps(index).artifact.version, - (dep \ "exclude").zipWithIndex forall { case (exclude, j) => - singleAttr(exclude, "org") == ivyDeps(index).exclusions(j)._1 && - singleAttr(exclude, "name") == ivyDeps(index).exclusions(j)._2 - } - ) - } - } - } - } - - def singleNode(seq: NodeSeq): Node = - seq.headOption.getOrElse(throw new RuntimeException("empty seq")) - def singleAttr(node: Node, attr: String): String = - node.attribute(attr).flatMap(_.headOption.map(_.text)).getOrElse(throw new RuntimeException(s"empty attr $attr")) -} diff --git a/scalalib/test/src/mill/scalalib/publish/PomTests.scala b/scalalib/test/src/mill/scalalib/publish/PomTests.scala deleted file mode 100644 index 307ae379..00000000 --- a/scalalib/test/src/mill/scalalib/publish/PomTests.scala +++ /dev/null @@ -1,205 +0,0 @@ -package mill.scalalib.publish - -import utest._ -import mill._ - -import scala.xml.{NodeSeq, XML} - -object PomTests extends TestSuite { - - def tests: Tests = Tests { - val artifactId = "mill-scalalib_2.12" - val artifact = - Artifact("com.lihaoyi", "mill-scalalib_2.12", "0.0.1") - val deps = Agg( - Dependency(Artifact("com.lihaoyi", "mill-main_2.12", "0.1.4"), - Scope.Compile), - Dependency(Artifact("org.scala-sbt", "test-interface", "1.0"), - Scope.Compile), - Dependency(Artifact("com.lihaoyi", "pprint_2.12", "0.5.3"), - Scope.Compile, exclusions = List("com.lihaoyi" -> "fansi_2.12", "*" -> "sourcecode_2.12")) - ) - val settings = PomSettings( - description = "mill-scalalib", - organization = "com.lihaoyi", - url = "https://github.com/lihaoyi/mill", - licenses = Seq(License.`MIT`), - versionControl = VersionControl.github("lihaoyi", "mill"), - developers = List( - Developer("lihaoyi", - "Li Haoyi", - "https://github.com/lihaoyi", - None, - None), - Developer("rockjam", - "Nikolai Tatarinov", - "https://github.com/rockjam", - Some("80pct done Inc."), - Some("https://80pctdone.com/")) - ) - ) - - 'fullPom - { - val fullPom = pomXml(artifact, deps, artifactId, settings) - - 'topLevel - { - assert( - singleText(fullPom \ "modelVersion") == "4.0.0", - singleText(fullPom \ "name") == artifactId, - singleText(fullPom \ "groupId") == artifact.group, - singleText(fullPom \ "artifactId") == artifact.id, - singleText(fullPom \ "packaging") == "jar", - singleText(fullPom \ "description") == settings.description, - singleText(fullPom \ "version") == artifact.version, - singleText(fullPom \ "url") == settings.url - ) - } - - 'licenses - { - val licenses = fullPom \ "licenses" \ "license" - - assert(licenses.size == 1) - - val license = licenses.head - val pomLicense = settings.licenses.head - assert( - singleText(license \ "name") == pomLicense.name, - singleText(license \ "url") == pomLicense.url, - singleText(license \ "distribution") == pomLicense.distribution - ) - } - - 'scm - { - val scm = (fullPom \ "scm").head - val pomScm = settings.versionControl - - assert( - optText(scm \ "connection") == pomScm.connection, - optText(scm \ "developerConnection") == pomScm.developerConnection, - optText(scm \ "tag").isEmpty, - optText(scm \ "url") == pomScm.browsableRepository - ) - } - - 'developers - { - val developers = fullPom \ "developers" \ "developer" - - assert(developers.size == 2) - - val pomDevelopers = settings.developers - - assert( - singleText(developers.head \ "id") == pomDevelopers.head.id, - singleText(developers.head \ "name") == pomDevelopers.head.name, - optText(developers.head \ "organization").isEmpty, - optText(developers.head \ "organizationUrl").isEmpty - ) - - assert( - singleText(developers.last \ "id") == pomDevelopers.last.id, - singleText(developers.last \ "name") == pomDevelopers.last.name, - optText(developers.last \ "organization") == pomDevelopers.last.organization, - optText(developers.last \ "organizationUrl") == pomDevelopers.last.organizationUrl - ) - } - - 'dependencies - { - val dependencies = fullPom \ "dependencies" \ "dependency" - - assert(dependencies.size == 3) - - val pomDeps = deps.indexed - - dependencies.zipWithIndex.foreach { - case (dep, index) => - assert( - singleText(dep \ "groupId") == pomDeps(index).artifact.group, - singleText(dep \ "artifactId") == pomDeps(index).artifact.id, - singleText(dep \ "version") == pomDeps(index).artifact.version, - optText(dep \ "scope").isEmpty, - (dep \ "exclusions").zipWithIndex.forall { case (node, j) => - singleText(node \ "exclusion" \ "groupId") == pomDeps(index).exclusions(j)._1 && - singleText(node \ "exclusion" \ "artifactId") == pomDeps(index).exclusions(j)._2 - } - ) - } - } - } - - 'pomEmptyScm - { - val updatedSettings = settings.copy( - versionControl = VersionControl( - browsableRepository = Some("git://github.com/lihaoyi/mill.git"), - connection = None, - developerConnection = None, - tag = None - )) - val pomEmptyScm = pomXml(artifact, deps, artifactId, updatedSettings) - - 'scm - { - val scm = (pomEmptyScm \ "scm").head - val pomScm = updatedSettings.versionControl - - assert( - optText(scm \ "connection").isEmpty, - optText(scm \ "developerConnection").isEmpty, - optText(scm \ "tag").isEmpty, - optText(scm \ "url") == pomScm.browsableRepository - ) - } - } - - 'pomNoLicenses - { - val updatedSettings = settings.copy(licenses = Seq.empty) - val pomNoLicenses = pomXml(artifact, deps, artifactId, updatedSettings) - - 'licenses - { - assert( - (pomNoLicenses \ "licenses").nonEmpty, - (pomNoLicenses \ "licenses" \ "licenses").isEmpty - ) - } - } - - 'pomNoDeps - { - val pomNoDeps = pomXml(artifact, - dependencies = Agg.empty, - artifactId = artifactId, - pomSettings = settings) - - 'dependencies - { - assert( - (pomNoDeps \ "dependencies").nonEmpty, - (pomNoDeps \ "dependencies" \ "dependency").isEmpty - ) - } - } - - 'pomNoDevelopers - { - val updatedSettings = settings.copy(developers = Seq.empty) - val pomNoDevelopers = pomXml(artifact, deps, artifactId, updatedSettings) - - 'developers - { - assert( - (pomNoDevelopers \ "developers").nonEmpty, - (pomNoDevelopers \ "developers" \ "developer").isEmpty - ) - } - } - } - - def pomXml(artifact: Artifact, - dependencies: Agg[Dependency], - artifactId: String, - pomSettings: PomSettings) = - XML.loadString(Pom(artifact, dependencies, artifactId, pomSettings)) - - def singleText(seq: NodeSeq) = - seq - .map(_.text) - .headOption - .getOrElse(throw new RuntimeException("seq was empty")) - - def optText(seq: NodeSeq) = seq.map(_.text).headOption - -} diff --git a/scalalib/test/src/mill/scalalib/scalafmt/ScalafmtTests.scala b/scalalib/test/src/mill/scalalib/scalafmt/ScalafmtTests.scala deleted file mode 100644 index dcbdb769..00000000 --- a/scalalib/test/src/mill/scalalib/scalafmt/ScalafmtTests.scala +++ /dev/null @@ -1,104 +0,0 @@ -package mill.scalalib.scalafmt - -import mill.main.Tasks -import mill.scalalib.ScalaModule -import mill.util.{TestEvaluator, TestUtil} -import utest._ -import utest.framework.TestPath - -object ScalafmtTests extends TestSuite { - - trait TestBase extends TestUtil.BaseModule { - def millSourcePath = - TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') - } - - object ScalafmtTestModule extends TestBase { - object core extends ScalaModule with ScalafmtModule { - def scalaVersion = "2.12.4" - } - } - - val resourcePath = os.pwd / 'scalalib / 'test / 'resources / 'scalafmt - - def workspaceTest[T]( - m: TestUtil.BaseModule, - resourcePath: os.Path = resourcePath)(t: TestEvaluator => T)( - implicit tp: TestPath): T = { - val eval = new TestEvaluator(m) - os.remove.all(m.millSourcePath) - os.remove.all(eval.outPath) - os.makeDir.all(m.millSourcePath / os.up) - os.copy(resourcePath, m.millSourcePath) - t(eval) - } - - def tests: Tests = Tests { - 'scalafmt - { - def checkReformat(reformatCommand: mill.define.Command[Unit]) = - workspaceTest(ScalafmtTestModule) { eval => - val before = getProjectFiles(ScalafmtTestModule.core, eval) - - // first reformat - val Right(_) = eval.apply(reformatCommand) - - val firstReformat = getProjectFiles(ScalafmtTestModule.core, eval) - - assert( - firstReformat("Main.scala").modifyTime > before("Main.scala").modifyTime, - firstReformat("Main.scala").content != before("Main.scala").content, - firstReformat("Person.scala").modifyTime > before("Person.scala").modifyTime, - firstReformat("Person.scala").content != before("Person.scala").content, - // resources files aren't modified - firstReformat("application.conf").modifyTime == before( - "application.conf").modifyTime - ) - - // cached reformat - val Right(_) = eval.apply(reformatCommand) - - val cached = getProjectFiles(ScalafmtTestModule.core, eval) - - assert( - cached("Main.scala").modifyTime == firstReformat("Main.scala").modifyTime, - cached("Person.scala").modifyTime == firstReformat("Person.scala").modifyTime, - cached("application.conf").modifyTime == firstReformat( - "application.conf").modifyTime - ) - - // reformat after change - os.write.over(cached("Main.scala").path, - cached("Main.scala").content + "\n object Foo") - - val Right(_) = eval.apply(reformatCommand) - - val afterChange = getProjectFiles(ScalafmtTestModule.core, eval) - - assert( - afterChange("Main.scala").modifyTime > cached("Main.scala").modifyTime, - afterChange("Person.scala").modifyTime == cached("Person.scala").modifyTime, - afterChange("application.conf").modifyTime == cached( - "application.conf").modifyTime - ) - } - - 'reformat - checkReformat(ScalafmtTestModule.core.reformat()) - 'reformatAll - checkReformat( - ScalafmtModule.reformatAll(Tasks(Seq(ScalafmtTestModule.core.sources)))) - } - } - - case class FileInfo(content: String, modifyTime: Long, path: os.Path) - - def getProjectFiles(m: ScalaModule, eval: TestEvaluator) = { - val Right((sources, _)) = eval.apply(m.sources) - val Right((resources, _)) = eval.apply(m.resources) - - val sourcesFiles = sources.flatMap(p => os.walk(p.path)) - val resourcesFiles = resources.flatMap(p => os.walk(p.path)) - (sourcesFiles ++ resourcesFiles).map { p => - p.last -> FileInfo(os.read(p), os.mtime(p), p) - }.toMap - } - -} diff --git a/scalalib/test/src/publish/IvyTests.scala b/scalalib/test/src/publish/IvyTests.scala new file mode 100644 index 00000000..d187f969 --- /dev/null +++ b/scalalib/test/src/publish/IvyTests.scala @@ -0,0 +1,60 @@ +package mill.scalalib.publish + +import utest._ +import mill._ + +import scala.xml.{Node, NodeSeq, XML} + +object IvyTests extends TestSuite { + + def tests: Tests = Tests { + val artifactId = "mill-scalalib_2.12" + val artifact = + Artifact("com.lihaoyi", "mill-scalalib_2.12", "0.0.1") + val deps = Agg( + Dependency(Artifact("com.lihaoyi", "mill-main_2.12", "0.1.4"), + Scope.Compile), + Dependency(Artifact("org.scala-sbt", "test-interface", "1.0"), + Scope.Compile), + Dependency(Artifact("com.lihaoyi", "pprint_2.12", "0.5.3"), + Scope.Compile, exclusions = List("com.lihaoyi" -> "fansi_2.12", "*" -> "sourcecode_2.12")) + ) + + 'fullIvy - { + val fullIvy = XML.loadString(Ivy(artifact, deps)) + + 'topLevel - { + val info = singleNode(fullIvy \ "info") + assert( + singleAttr(info, "organisation") == artifact.group, + singleAttr(info, "module") == artifact.id, + singleAttr(info, "revision") == artifact.version + ) + } + + 'dependencies - { + val dependencies = fullIvy \ "dependencies" \ "dependency" + val ivyDeps = deps.indexed + + assert(dependencies.size == ivyDeps.size) + + dependencies.zipWithIndex.foreach { case (dep, index) => + assert( + singleAttr(dep, "org") == ivyDeps(index).artifact.group, + singleAttr(dep, "name") == ivyDeps(index).artifact.id, + singleAttr(dep, "rev") == ivyDeps(index).artifact.version, + (dep \ "exclude").zipWithIndex forall { case (exclude, j) => + singleAttr(exclude, "org") == ivyDeps(index).exclusions(j)._1 && + singleAttr(exclude, "name") == ivyDeps(index).exclusions(j)._2 + } + ) + } + } + } + } + + def singleNode(seq: NodeSeq): Node = + seq.headOption.getOrElse(throw new RuntimeException("empty seq")) + def singleAttr(node: Node, attr: String): String = + node.attribute(attr).flatMap(_.headOption.map(_.text)).getOrElse(throw new RuntimeException(s"empty attr $attr")) +} diff --git a/scalalib/test/src/publish/PomTests.scala b/scalalib/test/src/publish/PomTests.scala new file mode 100644 index 00000000..307ae379 --- /dev/null +++ b/scalalib/test/src/publish/PomTests.scala @@ -0,0 +1,205 @@ +package mill.scalalib.publish + +import utest._ +import mill._ + +import scala.xml.{NodeSeq, XML} + +object PomTests extends TestSuite { + + def tests: Tests = Tests { + val artifactId = "mill-scalalib_2.12" + val artifact = + Artifact("com.lihaoyi", "mill-scalalib_2.12", "0.0.1") + val deps = Agg( + Dependency(Artifact("com.lihaoyi", "mill-main_2.12", "0.1.4"), + Scope.Compile), + Dependency(Artifact("org.scala-sbt", "test-interface", "1.0"), + Scope.Compile), + Dependency(Artifact("com.lihaoyi", "pprint_2.12", "0.5.3"), + Scope.Compile, exclusions = List("com.lihaoyi" -> "fansi_2.12", "*" -> "sourcecode_2.12")) + ) + val settings = PomSettings( + description = "mill-scalalib", + organization = "com.lihaoyi", + url = "https://github.com/lihaoyi/mill", + licenses = Seq(License.`MIT`), + versionControl = VersionControl.github("lihaoyi", "mill"), + developers = List( + Developer("lihaoyi", + "Li Haoyi", + "https://github.com/lihaoyi", + None, + None), + Developer("rockjam", + "Nikolai Tatarinov", + "https://github.com/rockjam", + Some("80pct done Inc."), + Some("https://80pctdone.com/")) + ) + ) + + 'fullPom - { + val fullPom = pomXml(artifact, deps, artifactId, settings) + + 'topLevel - { + assert( + singleText(fullPom \ "modelVersion") == "4.0.0", + singleText(fullPom \ "name") == artifactId, + singleText(fullPom \ "groupId") == artifact.group, + singleText(fullPom \ "artifactId") == artifact.id, + singleText(fullPom \ "packaging") == "jar", + singleText(fullPom \ "description") == settings.description, + singleText(fullPom \ "version") == artifact.version, + singleText(fullPom \ "url") == settings.url + ) + } + + 'licenses - { + val licenses = fullPom \ "licenses" \ "license" + + assert(licenses.size == 1) + + val license = licenses.head + val pomLicense = settings.licenses.head + assert( + singleText(license \ "name") == pomLicense.name, + singleText(license \ "url") == pomLicense.url, + singleText(license \ "distribution") == pomLicense.distribution + ) + } + + 'scm - { + val scm = (fullPom \ "scm").head + val pomScm = settings.versionControl + + assert( + optText(scm \ "connection") == pomScm.connection, + optText(scm \ "developerConnection") == pomScm.developerConnection, + optText(scm \ "tag").isEmpty, + optText(scm \ "url") == pomScm.browsableRepository + ) + } + + 'developers - { + val developers = fullPom \ "developers" \ "developer" + + assert(developers.size == 2) + + val pomDevelopers = settings.developers + + assert( + singleText(developers.head \ "id") == pomDevelopers.head.id, + singleText(developers.head \ "name") == pomDevelopers.head.name, + optText(developers.head \ "organization").isEmpty, + optText(developers.head \ "organizationUrl").isEmpty + ) + + assert( + singleText(developers.last \ "id") == pomDevelopers.last.id, + singleText(developers.last \ "name") == pomDevelopers.last.name, + optText(developers.last \ "organization") == pomDevelopers.last.organization, + optText(developers.last \ "organizationUrl") == pomDevelopers.last.organizationUrl + ) + } + + 'dependencies - { + val dependencies = fullPom \ "dependencies" \ "dependency" + + assert(dependencies.size == 3) + + val pomDeps = deps.indexed + + dependencies.zipWithIndex.foreach { + case (dep, index) => + assert( + singleText(dep \ "groupId") == pomDeps(index).artifact.group, + singleText(dep \ "artifactId") == pomDeps(index).artifact.id, + singleText(dep \ "version") == pomDeps(index).artifact.version, + optText(dep \ "scope").isEmpty, + (dep \ "exclusions").zipWithIndex.forall { case (node, j) => + singleText(node \ "exclusion" \ "groupId") == pomDeps(index).exclusions(j)._1 && + singleText(node \ "exclusion" \ "artifactId") == pomDeps(index).exclusions(j)._2 + } + ) + } + } + } + + 'pomEmptyScm - { + val updatedSettings = settings.copy( + versionControl = VersionControl( + browsableRepository = Some("git://github.com/lihaoyi/mill.git"), + connection = None, + developerConnection = None, + tag = None + )) + val pomEmptyScm = pomXml(artifact, deps, artifactId, updatedSettings) + + 'scm - { + val scm = (pomEmptyScm \ "scm").head + val pomScm = updatedSettings.versionControl + + assert( + optText(scm \ "connection").isEmpty, + optText(scm \ "developerConnection").isEmpty, + optText(scm \ "tag").isEmpty, + optText(scm \ "url") == pomScm.browsableRepository + ) + } + } + + 'pomNoLicenses - { + val updatedSettings = settings.copy(licenses = Seq.empty) + val pomNoLicenses = pomXml(artifact, deps, artifactId, updatedSettings) + + 'licenses - { + assert( + (pomNoLicenses \ "licenses").nonEmpty, + (pomNoLicenses \ "licenses" \ "licenses").isEmpty + ) + } + } + + 'pomNoDeps - { + val pomNoDeps = pomXml(artifact, + dependencies = Agg.empty, + artifactId = artifactId, + pomSettings = settings) + + 'dependencies - { + assert( + (pomNoDeps \ "dependencies").nonEmpty, + (pomNoDeps \ "dependencies" \ "dependency").isEmpty + ) + } + } + + 'pomNoDevelopers - { + val updatedSettings = settings.copy(developers = Seq.empty) + val pomNoDevelopers = pomXml(artifact, deps, artifactId, updatedSettings) + + 'developers - { + assert( + (pomNoDevelopers \ "developers").nonEmpty, + (pomNoDevelopers \ "developers" \ "developer").isEmpty + ) + } + } + } + + def pomXml(artifact: Artifact, + dependencies: Agg[Dependency], + artifactId: String, + pomSettings: PomSettings) = + XML.loadString(Pom(artifact, dependencies, artifactId, pomSettings)) + + def singleText(seq: NodeSeq) = + seq + .map(_.text) + .headOption + .getOrElse(throw new RuntimeException("seq was empty")) + + def optText(seq: NodeSeq) = seq.map(_.text).headOption + +} diff --git a/scalalib/test/src/scalafmt/ScalafmtTests.scala b/scalalib/test/src/scalafmt/ScalafmtTests.scala new file mode 100644 index 00000000..dcbdb769 --- /dev/null +++ b/scalalib/test/src/scalafmt/ScalafmtTests.scala @@ -0,0 +1,104 @@ +package mill.scalalib.scalafmt + +import mill.main.Tasks +import mill.scalalib.ScalaModule +import mill.util.{TestEvaluator, TestUtil} +import utest._ +import utest.framework.TestPath + +object ScalafmtTests extends TestSuite { + + trait TestBase extends TestUtil.BaseModule { + def millSourcePath = + TestUtil.getSrcPathBase() / millOuterCtx.enclosing.split('.') + } + + object ScalafmtTestModule extends TestBase { + object core extends ScalaModule with ScalafmtModule { + def scalaVersion = "2.12.4" + } + } + + val resourcePath = os.pwd / 'scalalib / 'test / 'resources / 'scalafmt + + def workspaceTest[T]( + m: TestUtil.BaseModule, + resourcePath: os.Path = resourcePath)(t: TestEvaluator => T)( + implicit tp: TestPath): T = { + val eval = new TestEvaluator(m) + os.remove.all(m.millSourcePath) + os.remove.all(eval.outPath) + os.makeDir.all(m.millSourcePath / os.up) + os.copy(resourcePath, m.millSourcePath) + t(eval) + } + + def tests: Tests = Tests { + 'scalafmt - { + def checkReformat(reformatCommand: mill.define.Command[Unit]) = + workspaceTest(ScalafmtTestModule) { eval => + val before = getProjectFiles(ScalafmtTestModule.core, eval) + + // first reformat + val Right(_) = eval.apply(reformatCommand) + + val firstReformat = getProjectFiles(ScalafmtTestModule.core, eval) + + assert( + firstReformat("Main.scala").modifyTime > before("Main.scala").modifyTime, + firstReformat("Main.scala").content != before("Main.scala").content, + firstReformat("Person.scala").modifyTime > before("Person.scala").modifyTime, + firstReformat("Person.scala").content != before("Person.scala").content, + // resources files aren't modified + firstReformat("application.conf").modifyTime == before( + "application.conf").modifyTime + ) + + // cached reformat + val Right(_) = eval.apply(reformatCommand) + + val cached = getProjectFiles(ScalafmtTestModule.core, eval) + + assert( + cached("Main.scala").modifyTime == firstReformat("Main.scala").modifyTime, + cached("Person.scala").modifyTime == firstReformat("Person.scala").modifyTime, + cached("application.conf").modifyTime == firstReformat( + "application.conf").modifyTime + ) + + // reformat after change + os.write.over(cached("Main.scala").path, + cached("Main.scala").content + "\n object Foo") + + val Right(_) = eval.apply(reformatCommand) + + val afterChange = getProjectFiles(ScalafmtTestModule.core, eval) + + assert( + afterChange("Main.scala").modifyTime > cached("Main.scala").modifyTime, + afterChange("Person.scala").modifyTime == cached("Person.scala").modifyTime, + afterChange("application.conf").modifyTime == cached( + "application.conf").modifyTime + ) + } + + 'reformat - checkReformat(ScalafmtTestModule.core.reformat()) + 'reformatAll - checkReformat( + ScalafmtModule.reformatAll(Tasks(Seq(ScalafmtTestModule.core.sources)))) + } + } + + case class FileInfo(content: String, modifyTime: Long, path: os.Path) + + def getProjectFiles(m: ScalaModule, eval: TestEvaluator) = { + val Right((sources, _)) = eval.apply(m.sources) + val Right((resources, _)) = eval.apply(m.resources) + + val sourcesFiles = sources.flatMap(p => os.walk(p.path)) + val resourcesFiles = resources.flatMap(p => os.walk(p.path)) + (sourcesFiles ++ resourcesFiles).map { p => + p.last -> FileInfo(os.read(p), os.mtime(p), p) + }.toMap + } + +} diff --git a/scalalib/worker/src/ZincWorkerImpl.scala b/scalalib/worker/src/ZincWorkerImpl.scala new file mode 100644 index 00000000..705d4682 --- /dev/null +++ b/scalalib/worker/src/ZincWorkerImpl.scala @@ -0,0 +1,284 @@ +package mill.scalalib.worker + +import java.io.File +import java.util.Optional + +import mill.api.Loose.Agg +import mill.api.PathRef +import xsbti.compile.{CompilerCache => _, FileAnalysisStore => _, ScalaInstance => _, _} +import mill.scalalib.api.Util.{isDotty, grepJar, scalaBinaryVersion} +import sbt.internal.inc._ +import sbt.internal.util.{ConsoleOut, MainAppender} +import sbt.util.LogExchange +import mill.scalalib.api.CompilationResult +case class MockedLookup(am: File => Optional[CompileAnalysis]) extends PerClasspathEntryLookup { + override def analysis(classpathEntry: File): Optional[CompileAnalysis] = + am(classpathEntry) + + override def definesClass(classpathEntry: File): DefinesClass = + Locate.definesClass(classpathEntry) +} + +class ZincWorkerImpl(ctx0: mill.api.Ctx, + compilerBridgeClasspath: Array[String]) extends mill.scalalib.api.ZincWorkerApi{ + private val ic = new sbt.internal.inc.IncrementalCompilerImpl() + val javaOnlyCompilers = { + // Keep the classpath as written by the user + val classpathOptions = ClasspathOptions.of(false, false, false, false, false) + + val dummyFile = new java.io.File("") + // Zinc does not have an entry point for Java-only compilation, so we need + // to make up a dummy ScalaCompiler instance. + val scalac = ZincUtil.scalaCompiler( + new ScalaInstance("", null, null, dummyFile, dummyFile, new Array(0), Some("")), null, + classpathOptions // this is used for javac too + ) + + ic.compilers( + instance = null, + classpathOptions, + None, + scalac + ) + } + + @volatile var mixedCompilersCache = Option.empty[(Long, Compilers)] + + def docJar(scalaVersion: String, + compilerBridgeSources: os.Path, + compilerClasspath: Agg[os.Path], + scalacPluginClasspath: Agg[os.Path], + args: Seq[String]) + (implicit ctx: mill.api.Ctx): Boolean = { + val compilers: Compilers = prepareCompilers( + scalaVersion, + compilerBridgeSources, + compilerClasspath, + scalacPluginClasspath + ) + val scaladocClass = compilers.scalac().scalaInstance().loader().loadClass("scala.tools.nsc.ScalaDoc") + val scaladocMethod = scaladocClass.getMethod("process", classOf[Array[String]]) + scaladocMethod.invoke(scaladocClass.newInstance(), args.toArray).asInstanceOf[Boolean] + } + /** Compile the bridge if it doesn't exist yet and return the output directory. + * TODO: Proper invalidation, see #389 + */ + def compileZincBridgeIfNeeded(scalaVersion: String, + sourcesJar: os.Path, + compilerJars: Array[File]): os.Path = { + val workingDir = ctx0.dest / scalaVersion + val compiledDest = workingDir / 'compiled + if (!os.exists(workingDir)) { + + ctx0.log.info("Compiling compiler interface...") + + os.makeDir.all(workingDir) + os.makeDir.all(compiledDest) + + val sourceFolder = mill.api.IO.unpackZip(sourcesJar)(workingDir) + val classloader = mill.api.ClassLoader.create(compilerJars.map(_.toURI.toURL), null)(ctx0) + val compilerMain = classloader.loadClass( + if (isDotty(scalaVersion)) + "dotty.tools.dotc.Main" + else + "scala.tools.nsc.Main" + ) + val argsArray = Array[String]( + "-d", compiledDest.toString, + "-classpath", (compilerJars ++ compilerBridgeClasspath).mkString(File.pathSeparator) + ) ++ os.walk(sourceFolder.path).filter(_.ext == "scala").map(_.toString) + + compilerMain.getMethod("process", classOf[Array[String]]) + .invoke(null, argsArray) + } + compiledDest + } + + + + def discoverMainClasses(compilationResult: CompilationResult)(implicit ctx: mill.api.Ctx): Seq[String] = { + def toScala[A](o: Optional[A]): Option[A] = if (o.isPresent) Some(o.get) else None + + toScala(FileAnalysisStore.binary(compilationResult.analysisFile.toIO).get()) + .map(_.getAnalysis) + .flatMap{ + case analysis: Analysis => + Some(analysis.infos.allInfos.values.map(_.getMainClasses).flatten.toSeq.sorted) + case _ => + None + } + .getOrElse(Seq.empty[String]) + } + + def compileJava(upstreamCompileOutput: Seq[CompilationResult], + sources: Agg[os.Path], + compileClasspath: Agg[os.Path], + javacOptions: Seq[String]) + (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { + compileInternal( + upstreamCompileOutput, + sources, + compileClasspath, + javacOptions, + scalacOptions = Nil, + javaOnlyCompilers + ) + } + + def compileMixed(upstreamCompileOutput: Seq[CompilationResult], + sources: Agg[os.Path], + compileClasspath: Agg[os.Path], + javacOptions: Seq[String], + scalaVersion: String, + scalacOptions: Seq[String], + compilerBridgeSources: os.Path, + compilerClasspath: Agg[os.Path], + scalacPluginClasspath: Agg[os.Path]) + (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { + val compilers: Compilers = prepareCompilers( + scalaVersion, + compilerBridgeSources, + compilerClasspath, + scalacPluginClasspath + ) + + compileInternal( + upstreamCompileOutput, + sources, + compileClasspath, + javacOptions, + scalacOptions = scalacPluginClasspath.map(jar => s"-Xplugin:${jar}").toSeq ++ scalacOptions, + compilers + ) + } + + private def prepareCompilers(scalaVersion: String, + compilerBridgeSources: os.Path, + compilerClasspath: Agg[os.Path], + scalacPluginClasspath: Agg[os.Path]) + (implicit ctx: mill.api.Ctx)= { + val combinedCompilerClasspath = compilerClasspath ++ scalacPluginClasspath + val combinedCompilerJars = combinedCompilerClasspath.toArray.map(_.toIO) + + val compilerBridge = compileZincBridgeIfNeeded( + scalaVersion, + compilerBridgeSources, + compilerClasspath.toArray.map(_.toIO) + ) + val compilerBridgeSig = os.mtime(compilerBridge) + + val compilersSig = + compilerBridgeSig + + combinedCompilerClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum + + val compilers = mixedCompilersCache match { + case Some((k, v)) if k == compilersSig => v + case _ => + val compilerName = + if (isDotty(scalaVersion)) + s"dotty-compiler_${scalaBinaryVersion(scalaVersion)}" + else + "scala-compiler" + val scalaInstance = new ScalaInstance( + version = scalaVersion, + loader = mill.api.ClassLoader.create(combinedCompilerJars.map(_.toURI.toURL), null), + libraryJar = grepJar(compilerClasspath, "scala-library", scalaVersion).toIO, + compilerJar = grepJar(compilerClasspath, compilerName, scalaVersion).toIO, + allJars = combinedCompilerJars, + explicitActual = None + ) + val compilers = ic.compilers( + scalaInstance, + ClasspathOptionsUtil.boot, + None, + ZincUtil.scalaCompiler(scalaInstance, compilerBridge.toIO) + ) + mixedCompilersCache = Some((compilersSig, compilers)) + compilers + } + compilers + } + + private def compileInternal(upstreamCompileOutput: Seq[CompilationResult], + sources: Agg[os.Path], + compileClasspath: Agg[os.Path], + javacOptions: Seq[String], + scalacOptions: Seq[String], + compilers: Compilers) + (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { + os.makeDir.all(ctx.dest) + + val logger = { + val consoleAppender = MainAppender.defaultScreen(ConsoleOut.printStreamOut( + ctx.log.outputStream + )) + val l = LogExchange.logger("Hello") + LogExchange.unbindLoggerAppenders("Hello") + LogExchange.bindLoggerAppenders("Hello", (consoleAppender -> sbt.util.Level.Info) :: Nil) + l + } + + def analysisMap(f: File): Optional[CompileAnalysis] = { + if (f.isFile) { + Optional.empty[CompileAnalysis] + } else { + upstreamCompileOutput.collectFirst { + case CompilationResult(zincPath, classFiles) if classFiles.path.toNIO == f.toPath => + FileAnalysisStore.binary(zincPath.toIO).get().map[CompileAnalysis](_.getAnalysis) + }.getOrElse(Optional.empty[CompileAnalysis]) + } + } + + val lookup = MockedLookup(analysisMap) + + val zincFile = ctx.dest / 'zinc + val classesDir = ctx.dest / 'classes + + val zincIOFile = zincFile.toIO + val classesIODir = classesDir.toIO + + val store = FileAnalysisStore.binary(zincIOFile) + + val inputs = ic.inputs( + classpath = classesIODir +: compileClasspath.map(_.toIO).toArray, + sources = sources.toArray.map(_.toIO), + classesDirectory = classesIODir, + scalacOptions = scalacOptions.toArray, + javacOptions = javacOptions.toArray, + maxErrors = 10, + sourcePositionMappers = Array(), + order = CompileOrder.Mixed, + compilers = compilers, + setup = ic.setup( + lookup, + skip = false, + zincIOFile, + new FreshCompilerCache, + IncOptions.of(), + new ManagedLoggedReporter(10, logger), + None, + Array() + ), + pr = { + val prev = store.get() + PreviousResult.of(prev.map(_.getAnalysis), prev.map(_.getMiniSetup)) + } + ) + + try { + val newResult = ic.compile( + in = inputs, + logger = logger + ) + + store.set( + AnalysisContents.create( + newResult.analysis(), + newResult.setup() + ) + ) + + mill.api.Result.Success(CompilationResult(zincFile, PathRef(classesDir))) + }catch{case e: CompileFailed => mill.api.Result.Failure(e.toString)} + } +} diff --git a/scalalib/worker/src/mill/scalalib/worker/ZincWorkerImpl.scala b/scalalib/worker/src/mill/scalalib/worker/ZincWorkerImpl.scala deleted file mode 100644 index 705d4682..00000000 --- a/scalalib/worker/src/mill/scalalib/worker/ZincWorkerImpl.scala +++ /dev/null @@ -1,284 +0,0 @@ -package mill.scalalib.worker - -import java.io.File -import java.util.Optional - -import mill.api.Loose.Agg -import mill.api.PathRef -import xsbti.compile.{CompilerCache => _, FileAnalysisStore => _, ScalaInstance => _, _} -import mill.scalalib.api.Util.{isDotty, grepJar, scalaBinaryVersion} -import sbt.internal.inc._ -import sbt.internal.util.{ConsoleOut, MainAppender} -import sbt.util.LogExchange -import mill.scalalib.api.CompilationResult -case class MockedLookup(am: File => Optional[CompileAnalysis]) extends PerClasspathEntryLookup { - override def analysis(classpathEntry: File): Optional[CompileAnalysis] = - am(classpathEntry) - - override def definesClass(classpathEntry: File): DefinesClass = - Locate.definesClass(classpathEntry) -} - -class ZincWorkerImpl(ctx0: mill.api.Ctx, - compilerBridgeClasspath: Array[String]) extends mill.scalalib.api.ZincWorkerApi{ - private val ic = new sbt.internal.inc.IncrementalCompilerImpl() - val javaOnlyCompilers = { - // Keep the classpath as written by the user - val classpathOptions = ClasspathOptions.of(false, false, false, false, false) - - val dummyFile = new java.io.File("") - // Zinc does not have an entry point for Java-only compilation, so we need - // to make up a dummy ScalaCompiler instance. - val scalac = ZincUtil.scalaCompiler( - new ScalaInstance("", null, null, dummyFile, dummyFile, new Array(0), Some("")), null, - classpathOptions // this is used for javac too - ) - - ic.compilers( - instance = null, - classpathOptions, - None, - scalac - ) - } - - @volatile var mixedCompilersCache = Option.empty[(Long, Compilers)] - - def docJar(scalaVersion: String, - compilerBridgeSources: os.Path, - compilerClasspath: Agg[os.Path], - scalacPluginClasspath: Agg[os.Path], - args: Seq[String]) - (implicit ctx: mill.api.Ctx): Boolean = { - val compilers: Compilers = prepareCompilers( - scalaVersion, - compilerBridgeSources, - compilerClasspath, - scalacPluginClasspath - ) - val scaladocClass = compilers.scalac().scalaInstance().loader().loadClass("scala.tools.nsc.ScalaDoc") - val scaladocMethod = scaladocClass.getMethod("process", classOf[Array[String]]) - scaladocMethod.invoke(scaladocClass.newInstance(), args.toArray).asInstanceOf[Boolean] - } - /** Compile the bridge if it doesn't exist yet and return the output directory. - * TODO: Proper invalidation, see #389 - */ - def compileZincBridgeIfNeeded(scalaVersion: String, - sourcesJar: os.Path, - compilerJars: Array[File]): os.Path = { - val workingDir = ctx0.dest / scalaVersion - val compiledDest = workingDir / 'compiled - if (!os.exists(workingDir)) { - - ctx0.log.info("Compiling compiler interface...") - - os.makeDir.all(workingDir) - os.makeDir.all(compiledDest) - - val sourceFolder = mill.api.IO.unpackZip(sourcesJar)(workingDir) - val classloader = mill.api.ClassLoader.create(compilerJars.map(_.toURI.toURL), null)(ctx0) - val compilerMain = classloader.loadClass( - if (isDotty(scalaVersion)) - "dotty.tools.dotc.Main" - else - "scala.tools.nsc.Main" - ) - val argsArray = Array[String]( - "-d", compiledDest.toString, - "-classpath", (compilerJars ++ compilerBridgeClasspath).mkString(File.pathSeparator) - ) ++ os.walk(sourceFolder.path).filter(_.ext == "scala").map(_.toString) - - compilerMain.getMethod("process", classOf[Array[String]]) - .invoke(null, argsArray) - } - compiledDest - } - - - - def discoverMainClasses(compilationResult: CompilationResult)(implicit ctx: mill.api.Ctx): Seq[String] = { - def toScala[A](o: Optional[A]): Option[A] = if (o.isPresent) Some(o.get) else None - - toScala(FileAnalysisStore.binary(compilationResult.analysisFile.toIO).get()) - .map(_.getAnalysis) - .flatMap{ - case analysis: Analysis => - Some(analysis.infos.allInfos.values.map(_.getMainClasses).flatten.toSeq.sorted) - case _ => - None - } - .getOrElse(Seq.empty[String]) - } - - def compileJava(upstreamCompileOutput: Seq[CompilationResult], - sources: Agg[os.Path], - compileClasspath: Agg[os.Path], - javacOptions: Seq[String]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { - compileInternal( - upstreamCompileOutput, - sources, - compileClasspath, - javacOptions, - scalacOptions = Nil, - javaOnlyCompilers - ) - } - - def compileMixed(upstreamCompileOutput: Seq[CompilationResult], - sources: Agg[os.Path], - compileClasspath: Agg[os.Path], - javacOptions: Seq[String], - scalaVersion: String, - scalacOptions: Seq[String], - compilerBridgeSources: os.Path, - compilerClasspath: Agg[os.Path], - scalacPluginClasspath: Agg[os.Path]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { - val compilers: Compilers = prepareCompilers( - scalaVersion, - compilerBridgeSources, - compilerClasspath, - scalacPluginClasspath - ) - - compileInternal( - upstreamCompileOutput, - sources, - compileClasspath, - javacOptions, - scalacOptions = scalacPluginClasspath.map(jar => s"-Xplugin:${jar}").toSeq ++ scalacOptions, - compilers - ) - } - - private def prepareCompilers(scalaVersion: String, - compilerBridgeSources: os.Path, - compilerClasspath: Agg[os.Path], - scalacPluginClasspath: Agg[os.Path]) - (implicit ctx: mill.api.Ctx)= { - val combinedCompilerClasspath = compilerClasspath ++ scalacPluginClasspath - val combinedCompilerJars = combinedCompilerClasspath.toArray.map(_.toIO) - - val compilerBridge = compileZincBridgeIfNeeded( - scalaVersion, - compilerBridgeSources, - compilerClasspath.toArray.map(_.toIO) - ) - val compilerBridgeSig = os.mtime(compilerBridge) - - val compilersSig = - compilerBridgeSig + - combinedCompilerClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum - - val compilers = mixedCompilersCache match { - case Some((k, v)) if k == compilersSig => v - case _ => - val compilerName = - if (isDotty(scalaVersion)) - s"dotty-compiler_${scalaBinaryVersion(scalaVersion)}" - else - "scala-compiler" - val scalaInstance = new ScalaInstance( - version = scalaVersion, - loader = mill.api.ClassLoader.create(combinedCompilerJars.map(_.toURI.toURL), null), - libraryJar = grepJar(compilerClasspath, "scala-library", scalaVersion).toIO, - compilerJar = grepJar(compilerClasspath, compilerName, scalaVersion).toIO, - allJars = combinedCompilerJars, - explicitActual = None - ) - val compilers = ic.compilers( - scalaInstance, - ClasspathOptionsUtil.boot, - None, - ZincUtil.scalaCompiler(scalaInstance, compilerBridge.toIO) - ) - mixedCompilersCache = Some((compilersSig, compilers)) - compilers - } - compilers - } - - private def compileInternal(upstreamCompileOutput: Seq[CompilationResult], - sources: Agg[os.Path], - compileClasspath: Agg[os.Path], - javacOptions: Seq[String], - scalacOptions: Seq[String], - compilers: Compilers) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { - os.makeDir.all(ctx.dest) - - val logger = { - val consoleAppender = MainAppender.defaultScreen(ConsoleOut.printStreamOut( - ctx.log.outputStream - )) - val l = LogExchange.logger("Hello") - LogExchange.unbindLoggerAppenders("Hello") - LogExchange.bindLoggerAppenders("Hello", (consoleAppender -> sbt.util.Level.Info) :: Nil) - l - } - - def analysisMap(f: File): Optional[CompileAnalysis] = { - if (f.isFile) { - Optional.empty[CompileAnalysis] - } else { - upstreamCompileOutput.collectFirst { - case CompilationResult(zincPath, classFiles) if classFiles.path.toNIO == f.toPath => - FileAnalysisStore.binary(zincPath.toIO).get().map[CompileAnalysis](_.getAnalysis) - }.getOrElse(Optional.empty[CompileAnalysis]) - } - } - - val lookup = MockedLookup(analysisMap) - - val zincFile = ctx.dest / 'zinc - val classesDir = ctx.dest / 'classes - - val zincIOFile = zincFile.toIO - val classesIODir = classesDir.toIO - - val store = FileAnalysisStore.binary(zincIOFile) - - val inputs = ic.inputs( - classpath = classesIODir +: compileClasspath.map(_.toIO).toArray, - sources = sources.toArray.map(_.toIO), - classesDirectory = classesIODir, - scalacOptions = scalacOptions.toArray, - javacOptions = javacOptions.toArray, - maxErrors = 10, - sourcePositionMappers = Array(), - order = CompileOrder.Mixed, - compilers = compilers, - setup = ic.setup( - lookup, - skip = false, - zincIOFile, - new FreshCompilerCache, - IncOptions.of(), - new ManagedLoggedReporter(10, logger), - None, - Array() - ), - pr = { - val prev = store.get() - PreviousResult.of(prev.map(_.getAnalysis), prev.map(_.getMiniSetup)) - } - ) - - try { - val newResult = ic.compile( - in = inputs, - logger = logger - ) - - store.set( - AnalysisContents.create( - newResult.analysis(), - newResult.setup() - ) - ) - - mill.api.Result.Success(CompilationResult(zincFile, PathRef(classesDir))) - }catch{case e: CompileFailed => mill.api.Result.Failure(e.toString)} - } -} diff --git a/scalanativelib/api/src/ScalaNativeWorkerApi.scala b/scalanativelib/api/src/ScalaNativeWorkerApi.scala new file mode 100644 index 00000000..e1fee0da --- /dev/null +++ b/scalanativelib/api/src/ScalaNativeWorkerApi.scala @@ -0,0 +1,60 @@ +package mill.scalanativelib.api + +import upickle.default.{macroRW, ReadWriter => RW} +import java.io.File +import sbt.testing.Framework + +trait ScalaNativeWorkerApi { + def discoverClang: os.Path + def discoverClangPP: os.Path + def discoverTarget(clang: os.Path, workDir: os.Path): String + def discoverCompileOptions: Seq[String] + def discoverLinkingOptions: Seq[String] + + def config(nativeLibJar: os.Path, + mainClass: String, + classpath: Seq[os.Path], + nativeWorkdir: os.Path, + nativeClang: os.Path, + nativeClangPP: os.Path, + nativeTarget: String, + nativeCompileOptions: Seq[String], + nativeLinkingOptions: Seq[String], + nativeGC: String, + nativeLinkStubs: Boolean, + releaseMode: ReleaseMode, + logLevel: NativeLogLevel): NativeConfig + + def defaultGarbageCollector: String + def nativeLink(nativeConfig: NativeConfig, outPath: os.Path): os.Path + + def newScalaNativeFrameWork(framework: Framework, id: Int, testBinary: File, + logLevel: NativeLogLevel, envVars: Map[String, String]): Framework +} + + +sealed abstract class NativeLogLevel(val level: Int) extends Ordered[NativeLogLevel] { + def compare(that: NativeLogLevel) = this.level - that.level +} + +object NativeLogLevel { + case object Error extends NativeLogLevel(200) + case object Warn extends NativeLogLevel(300) + case object Info extends NativeLogLevel(400) + case object Debug extends NativeLogLevel(500) + case object Trace extends NativeLogLevel(600) + + implicit def rw: RW[NativeLogLevel] = macroRW +} + +sealed abstract class ReleaseMode(val name: String) + +object ReleaseMode { + case object Debug extends ReleaseMode("debug") + case object Release extends ReleaseMode("release") + + implicit def rw: RW[ReleaseMode] = macroRW +} + +// result wrapper to preserve some type safety +case class NativeConfig(config: Any) \ No newline at end of file diff --git a/scalanativelib/api/src/mill/scalanativelib/api/ScalaNativeWorkerApi.scala b/scalanativelib/api/src/mill/scalanativelib/api/ScalaNativeWorkerApi.scala deleted file mode 100644 index e1fee0da..00000000 --- a/scalanativelib/api/src/mill/scalanativelib/api/ScalaNativeWorkerApi.scala +++ /dev/null @@ -1,60 +0,0 @@ -package mill.scalanativelib.api - -import upickle.default.{macroRW, ReadWriter => RW} -import java.io.File -import sbt.testing.Framework - -trait ScalaNativeWorkerApi { - def discoverClang: os.Path - def discoverClangPP: os.Path - def discoverTarget(clang: os.Path, workDir: os.Path): String - def discoverCompileOptions: Seq[String] - def discoverLinkingOptions: Seq[String] - - def config(nativeLibJar: os.Path, - mainClass: String, - classpath: Seq[os.Path], - nativeWorkdir: os.Path, - nativeClang: os.Path, - nativeClangPP: os.Path, - nativeTarget: String, - nativeCompileOptions: Seq[String], - nativeLinkingOptions: Seq[String], - nativeGC: String, - nativeLinkStubs: Boolean, - releaseMode: ReleaseMode, - logLevel: NativeLogLevel): NativeConfig - - def defaultGarbageCollector: String - def nativeLink(nativeConfig: NativeConfig, outPath: os.Path): os.Path - - def newScalaNativeFrameWork(framework: Framework, id: Int, testBinary: File, - logLevel: NativeLogLevel, envVars: Map[String, String]): Framework -} - - -sealed abstract class NativeLogLevel(val level: Int) extends Ordered[NativeLogLevel] { - def compare(that: NativeLogLevel) = this.level - that.level -} - -object NativeLogLevel { - case object Error extends NativeLogLevel(200) - case object Warn extends NativeLogLevel(300) - case object Info extends NativeLogLevel(400) - case object Debug extends NativeLogLevel(500) - case object Trace extends NativeLogLevel(600) - - implicit def rw: RW[NativeLogLevel] = macroRW -} - -sealed abstract class ReleaseMode(val name: String) - -object ReleaseMode { - case object Debug extends ReleaseMode("debug") - case object Release extends ReleaseMode("release") - - implicit def rw: RW[ReleaseMode] = macroRW -} - -// result wrapper to preserve some type safety -case class NativeConfig(config: Any) \ No newline at end of file diff --git a/scalanativelib/src/ScalaNativeModule.scala b/scalanativelib/src/ScalaNativeModule.scala new file mode 100644 index 00000000..a7a2b96b --- /dev/null +++ b/scalanativelib/src/ScalaNativeModule.scala @@ -0,0 +1,294 @@ +package mill +package scalanativelib + +import java.net.URLClassLoader + +import coursier.Cache +import coursier.maven.MavenRepository +import mill.define.{Target, Task} +import mill.api.Result +import mill.modules.Jvm +import mill.scalalib.{Dep, DepSyntax, Lib, SbtModule, ScalaModule, TestModule, TestRunner} +import mill.util.Loose.Agg +import sbt.testing.{AnnotatedFingerprint, SubclassFingerprint} +import sbt.testing.Fingerprint +import upickle.default.{ReadWriter => RW, macroRW} +import mill.scalanativelib.api._ + + +trait ScalaNativeModule extends ScalaModule { outer => + def scalaNativeVersion: T[String] + override def platformSuffix = s"_native${scalaNativeBinaryVersion()}" + override def artifactSuffix: T[String] = s"${platformSuffix()}_${artifactScalaVersion()}" + + trait Tests extends TestScalaNativeModule { + override def zincWorker = outer.zincWorker + override def scalaOrganization = outer.scalaOrganization() + override def scalaVersion = outer.scalaVersion() + override def scalaNativeVersion = outer.scalaNativeVersion() + override def releaseMode = outer.releaseMode() + override def logLevel = outer.logLevel() + override def moduleDeps = Seq(outer) + } + + def scalaNativeBinaryVersion = T{ scalaNativeVersion().split('.').take(2).mkString(".") } + + // This allows compilation and testing versus SNAPSHOT versions of scala-native + def scalaNativeToolsVersion = T{ + if (scalaNativeVersion().endsWith("-SNAPSHOT")) + scalaNativeVersion() + else + scalaNativeBinaryVersion() + } + + def scalaNativeWorker = T.task{ ScalaNativeWorkerApi.scalaNativeWorker().impl(bridgeFullClassPath()) } + + def scalaNativeWorkerClasspath = T { + val workerKey = "MILL_SCALANATIVE_WORKER_" + scalaNativeBinaryVersion().replace('.', '_').replace('-', '_') + val workerPath = sys.props(workerKey) + if (workerPath != null) + Result.Success(Agg(workerPath.split(',').map(p => PathRef(os.Path(p), quick = true)): _*)) + else + Lib.resolveDependencies( + Seq(Cache.ivy2Local, MavenRepository("https://repo1.maven.org/maven2")), + Lib.depToDependency(_, "2.12.4", ""), + Seq(ivy"com.lihaoyi::mill-scalanativelib-worker-${scalaNativeBinaryVersion()}:${sys.props("MILL_VERSION")}") + ) + } + + def toolsIvyDeps = T{ + Seq( + ivy"org.scala-native:tools_2.12:${scalaNativeVersion()}", + ivy"org.scala-native:util_2.12:${scalaNativeVersion()}", + ivy"org.scala-native:nir_2.12:${scalaNativeVersion()}" + ) + } + + override def transitiveIvyDeps: T[Agg[Dep]] = T{ + ivyDeps() ++ nativeIvyDeps() ++ Task.traverse(moduleDeps)(_.transitiveIvyDeps)().flatten + } + + def nativeLibIvy = T{ ivy"org.scala-native::nativelib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}" } + + def nativeIvyDeps = T{ + Seq(nativeLibIvy()) ++ + Seq( + ivy"org.scala-native::javalib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}", + ivy"org.scala-native::auxlib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}", + ivy"org.scala-native::scalalib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}" + ) + } + + def bridgeFullClassPath = T { + Lib.resolveDependencies( + Seq(Cache.ivy2Local, MavenRepository("https://repo1.maven.org/maven2")), + Lib.depToDependency(_, scalaVersion(), platformSuffix()), + toolsIvyDeps() + ).map(t => (scalaNativeWorkerClasspath().toSeq ++ t.toSeq).map(_.path)) + } + + override def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ + Agg(ivy"org.scala-native:nscplugin_${scalaVersion()}:${scalaNativeVersion()}") + + def logLevel: Target[NativeLogLevel] = T{ NativeLogLevel.Info } + + def releaseMode: Target[ReleaseMode] = T { ReleaseMode.Debug } + + def nativeWorkdir = T{ T.ctx().dest } + + // Location of the clang compiler + def nativeClang = T{ scalaNativeWorker().discoverClang } + + // Location of the clang++ compiler + def nativeClangPP = T{ scalaNativeWorker().discoverClangPP } + + // GC choice, either "none", "boehm" or "immix" + def nativeGC = T{ + Option(System.getenv.get("SCALANATIVE_GC")) + .getOrElse(scalaNativeWorker().defaultGarbageCollector) + } + + def nativeTarget = T{ scalaNativeWorker().discoverTarget(nativeClang(), nativeWorkdir()) } + + // Options that are passed to clang during compilation + def nativeCompileOptions = T{ scalaNativeWorker().discoverCompileOptions } + + // Options that are passed to clang during linking + def nativeLinkingOptions = T{ scalaNativeWorker().discoverLinkingOptions } + + // Whether to link `@stub` methods, or ignore them + def nativeLinkStubs = T { false } + + + def nativeLibJar = T{ + resolveDeps(T.task{Agg(nativeLibIvy())})() + .filter{p => p.toString.contains("scala-native") && p.toString.contains("nativelib")} + .toList + .head + } + + def nativeConfig = T.task { + val classpath = runClasspath().map(_.path).filter(_.toIO.exists).toList + + scalaNativeWorker().config( + nativeLibJar().path, + finalMainClass(), + classpath, + nativeWorkdir(), + nativeClang(), + nativeClangPP(), + nativeTarget(), + nativeCompileOptions(), + nativeLinkingOptions(), + nativeGC(), + nativeLinkStubs(), + releaseMode(), + logLevel()) + } + + // Generates native binary + def nativeLink = T{ scalaNativeWorker().nativeLink(nativeConfig(), (T.ctx().dest / 'out)) } + + // Runs the native binary + override def run(args: String*) = T.command{ + Jvm.baseInteractiveSubprocess( + Vector(nativeLink().toString) ++ args, + forkEnv(), + workingDir = ammonite.ops.pwd) + } +} + + +trait TestScalaNativeModule extends ScalaNativeModule with TestModule { testOuter => + case class TestDefinition(framework: String, clazz: Class[_], fingerprint: Fingerprint) { + def name = clazz.getName.reverse.dropWhile(_ == '$').reverse + } + + override def testLocal(args: String*) = T.command { test(args:_*) } + + override def test(args: String*) = T.command{ + val outputPath = T.ctx().dest / "out.json" + + // The test frameworks run under the JVM and communicate with the native binary over a socket + // therefore the test framework is loaded from a JVM classloader + val testClassloader = + new URLClassLoader(testClasspathJvm().map(_.path.toIO.toURI.toURL).toArray, + this.getClass.getClassLoader) + val frameworkInstances = TestRunner.frameworks(testFrameworks())(testClassloader) + val testBinary = testRunnerNative.nativeLink().toIO + val envVars = forkEnv() + + val nativeFrameworks = (cl: ClassLoader) => + frameworkInstances.zipWithIndex.map { case (f, id) => + scalaNativeWorker().newScalaNativeFrameWork(f, id, testBinary, logLevel(), envVars) + } + + val (doneMsg, results) = TestRunner.runTests( + nativeFrameworks, + testClasspathJvm().map(_.path), + Agg(compile().classes.path), + args + ) + + TestModule.handleResults(doneMsg, results) + } + + private val supportedTestFrameworks = Set("utest", "scalatest") + + // get the JVM classpath entries for supported test frameworks + def testFrameworksJvmClasspath = T{ + Lib.resolveDependencies( + repositories, + Lib.depToDependency(_, scalaVersion(), ""), + transitiveIvyDeps().filter(d => d.cross.isBinary && supportedTestFrameworks(d.dep.module.name)) + ) + } + + def testClasspathJvm = T{ + localClasspath() ++ + transitiveLocalClasspath() ++ + unmanagedClasspath() ++ + testFrameworksJvmClasspath() + } + + // creates a specific binary used for running tests - has a different (generated) main class + // which knows the names of all the tests and references to invoke them + object testRunnerNative extends ScalaNativeModule { + override def zincWorker = testOuter.zincWorker + override def scalaOrganization = testOuter.scalaOrganization() + override def scalaVersion = testOuter.scalaVersion() + override def scalaNativeVersion = testOuter.scalaNativeVersion() + override def moduleDeps = Seq(testOuter) + override def releaseMode = testOuter.releaseMode() + override def logLevel = testOuter.logLevel() + override def nativeLinkStubs = true + + override def ivyDeps = testOuter.ivyDeps() ++ Agg( + ivy"org.scala-native::test-interface_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}" + ) + + override def mainClass = Some("scala.scalanative.testinterface.TestMain") + + override def generatedSources = T { + val outDir = T.ctx().dest + ammonite.ops.write.over(outDir / "TestMain.scala", makeTestMain()) + Seq(PathRef(outDir)) + } + } + + // generate a main class for the tests + def makeTestMain = T{ + val frameworkInstances = TestRunner.frameworks(testFrameworks()) _ + + val testClasses = + Jvm.inprocess(testClasspathJvm().map(_.path), classLoaderOverrideSbtTesting = true, isolated = true, closeContextClassLoaderWhenDone = true, + cl => { + frameworkInstances(cl).flatMap { framework => + val df = Lib.discoverTests(cl, framework, Agg(compile().classes.path)) + df.map(d => TestDefinition(framework.getClass.getName, d._1, d._2)) + } + } + ) + + val frameworks = testClasses.map(_.framework).distinct + + val frameworksList = + if (frameworks.nonEmpty) frameworks.mkString("List(new _root_.", ", new _root_.", ")") + else { + throw new Exception( + "Cannot find any tests; make sure you defined the test framework correctly, " + + "and extend whatever trait or annotation necessary to mark your test suites" + ) + } + + + val testsMap = makeTestsMap(testClasses) + + s"""package scala.scalanative.testinterface + |object TestMain extends TestMainBase { + | override val frameworks = $frameworksList + | override val tests = Map[String, AnyRef]($testsMap) + | def main(args: Array[String]): Unit = + | testMain(args) + |}""".stripMargin + } + + private def makeTestsMap(tests: Seq[TestDefinition]): String = { + tests + .map { t => + val isModule = t.fingerprint match { + case af: AnnotatedFingerprint => af.isModule + case sf: SubclassFingerprint => sf.isModule + } + + val inst = + if (isModule) s"_root_.${t.name}" else s"new _root_.${t.name}" + s""""${t.name}" -> $inst""" + } + .mkString(", ") + } +} + + +trait SbtNativeModule extends ScalaNativeModule with SbtModule + diff --git a/scalanativelib/src/ScalaNativeWorkerApi.scala b/scalanativelib/src/ScalaNativeWorkerApi.scala new file mode 100644 index 00000000..80325f1e --- /dev/null +++ b/scalanativelib/src/ScalaNativeWorkerApi.scala @@ -0,0 +1,44 @@ +package mill.scalanativelib + +import java.io.File +import java.net.URLClassLoader + +import mill.define.{Discover, Worker} +import mill.{Agg, T} +import mill.scalanativelib.api._ + + +class ScalaNativeWorker { + private var scalaInstanceCache = Option.empty[(Long, ScalaNativeWorkerApi)] + + def impl(toolsClasspath: Agg[os.Path]): ScalaNativeWorkerApi = { + val classloaderSig = toolsClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum + scalaInstanceCache match { + case Some((sig, bridge)) if sig == classloaderSig => bridge + case _ => + val cl = new URLClassLoader( + toolsClasspath.map(_.toIO.toURI.toURL).toArray, + getClass.getClassLoader + ) + try { + val bridge = cl + .loadClass("mill.scalanativelib.worker.ScalaNativeWorkerImpl") + .getDeclaredConstructor() + .newInstance() + .asInstanceOf[ScalaNativeWorkerApi] + scalaInstanceCache = Some((classloaderSig, bridge)) + bridge + } + catch { + case e: Exception => + e.printStackTrace() + throw e + } + } + } +} + +object ScalaNativeWorkerApi extends mill.define.ExternalModule { + def scalaNativeWorker: Worker[ScalaNativeWorker] = T.worker { new ScalaNativeWorker() } + lazy val millDiscover = Discover[this.type] +} diff --git a/scalanativelib/src/mill/scalanativelib/ScalaNativeModule.scala b/scalanativelib/src/mill/scalanativelib/ScalaNativeModule.scala deleted file mode 100644 index a7a2b96b..00000000 --- a/scalanativelib/src/mill/scalanativelib/ScalaNativeModule.scala +++ /dev/null @@ -1,294 +0,0 @@ -package mill -package scalanativelib - -import java.net.URLClassLoader - -import coursier.Cache -import coursier.maven.MavenRepository -import mill.define.{Target, Task} -import mill.api.Result -import mill.modules.Jvm -import mill.scalalib.{Dep, DepSyntax, Lib, SbtModule, ScalaModule, TestModule, TestRunner} -import mill.util.Loose.Agg -import sbt.testing.{AnnotatedFingerprint, SubclassFingerprint} -import sbt.testing.Fingerprint -import upickle.default.{ReadWriter => RW, macroRW} -import mill.scalanativelib.api._ - - -trait ScalaNativeModule extends ScalaModule { outer => - def scalaNativeVersion: T[String] - override def platformSuffix = s"_native${scalaNativeBinaryVersion()}" - override def artifactSuffix: T[String] = s"${platformSuffix()}_${artifactScalaVersion()}" - - trait Tests extends TestScalaNativeModule { - override def zincWorker = outer.zincWorker - override def scalaOrganization = outer.scalaOrganization() - override def scalaVersion = outer.scalaVersion() - override def scalaNativeVersion = outer.scalaNativeVersion() - override def releaseMode = outer.releaseMode() - override def logLevel = outer.logLevel() - override def moduleDeps = Seq(outer) - } - - def scalaNativeBinaryVersion = T{ scalaNativeVersion().split('.').take(2).mkString(".") } - - // This allows compilation and testing versus SNAPSHOT versions of scala-native - def scalaNativeToolsVersion = T{ - if (scalaNativeVersion().endsWith("-SNAPSHOT")) - scalaNativeVersion() - else - scalaNativeBinaryVersion() - } - - def scalaNativeWorker = T.task{ ScalaNativeWorkerApi.scalaNativeWorker().impl(bridgeFullClassPath()) } - - def scalaNativeWorkerClasspath = T { - val workerKey = "MILL_SCALANATIVE_WORKER_" + scalaNativeBinaryVersion().replace('.', '_').replace('-', '_') - val workerPath = sys.props(workerKey) - if (workerPath != null) - Result.Success(Agg(workerPath.split(',').map(p => PathRef(os.Path(p), quick = true)): _*)) - else - Lib.resolveDependencies( - Seq(Cache.ivy2Local, MavenRepository("https://repo1.maven.org/maven2")), - Lib.depToDependency(_, "2.12.4", ""), - Seq(ivy"com.lihaoyi::mill-scalanativelib-worker-${scalaNativeBinaryVersion()}:${sys.props("MILL_VERSION")}") - ) - } - - def toolsIvyDeps = T{ - Seq( - ivy"org.scala-native:tools_2.12:${scalaNativeVersion()}", - ivy"org.scala-native:util_2.12:${scalaNativeVersion()}", - ivy"org.scala-native:nir_2.12:${scalaNativeVersion()}" - ) - } - - override def transitiveIvyDeps: T[Agg[Dep]] = T{ - ivyDeps() ++ nativeIvyDeps() ++ Task.traverse(moduleDeps)(_.transitiveIvyDeps)().flatten - } - - def nativeLibIvy = T{ ivy"org.scala-native::nativelib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}" } - - def nativeIvyDeps = T{ - Seq(nativeLibIvy()) ++ - Seq( - ivy"org.scala-native::javalib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}", - ivy"org.scala-native::auxlib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}", - ivy"org.scala-native::scalalib_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}" - ) - } - - def bridgeFullClassPath = T { - Lib.resolveDependencies( - Seq(Cache.ivy2Local, MavenRepository("https://repo1.maven.org/maven2")), - Lib.depToDependency(_, scalaVersion(), platformSuffix()), - toolsIvyDeps() - ).map(t => (scalaNativeWorkerClasspath().toSeq ++ t.toSeq).map(_.path)) - } - - override def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ - Agg(ivy"org.scala-native:nscplugin_${scalaVersion()}:${scalaNativeVersion()}") - - def logLevel: Target[NativeLogLevel] = T{ NativeLogLevel.Info } - - def releaseMode: Target[ReleaseMode] = T { ReleaseMode.Debug } - - def nativeWorkdir = T{ T.ctx().dest } - - // Location of the clang compiler - def nativeClang = T{ scalaNativeWorker().discoverClang } - - // Location of the clang++ compiler - def nativeClangPP = T{ scalaNativeWorker().discoverClangPP } - - // GC choice, either "none", "boehm" or "immix" - def nativeGC = T{ - Option(System.getenv.get("SCALANATIVE_GC")) - .getOrElse(scalaNativeWorker().defaultGarbageCollector) - } - - def nativeTarget = T{ scalaNativeWorker().discoverTarget(nativeClang(), nativeWorkdir()) } - - // Options that are passed to clang during compilation - def nativeCompileOptions = T{ scalaNativeWorker().discoverCompileOptions } - - // Options that are passed to clang during linking - def nativeLinkingOptions = T{ scalaNativeWorker().discoverLinkingOptions } - - // Whether to link `@stub` methods, or ignore them - def nativeLinkStubs = T { false } - - - def nativeLibJar = T{ - resolveDeps(T.task{Agg(nativeLibIvy())})() - .filter{p => p.toString.contains("scala-native") && p.toString.contains("nativelib")} - .toList - .head - } - - def nativeConfig = T.task { - val classpath = runClasspath().map(_.path).filter(_.toIO.exists).toList - - scalaNativeWorker().config( - nativeLibJar().path, - finalMainClass(), - classpath, - nativeWorkdir(), - nativeClang(), - nativeClangPP(), - nativeTarget(), - nativeCompileOptions(), - nativeLinkingOptions(), - nativeGC(), - nativeLinkStubs(), - releaseMode(), - logLevel()) - } - - // Generates native binary - def nativeLink = T{ scalaNativeWorker().nativeLink(nativeConfig(), (T.ctx().dest / 'out)) } - - // Runs the native binary - override def run(args: String*) = T.command{ - Jvm.baseInteractiveSubprocess( - Vector(nativeLink().toString) ++ args, - forkEnv(), - workingDir = ammonite.ops.pwd) - } -} - - -trait TestScalaNativeModule extends ScalaNativeModule with TestModule { testOuter => - case class TestDefinition(framework: String, clazz: Class[_], fingerprint: Fingerprint) { - def name = clazz.getName.reverse.dropWhile(_ == '$').reverse - } - - override def testLocal(args: String*) = T.command { test(args:_*) } - - override def test(args: String*) = T.command{ - val outputPath = T.ctx().dest / "out.json" - - // The test frameworks run under the JVM and communicate with the native binary over a socket - // therefore the test framework is loaded from a JVM classloader - val testClassloader = - new URLClassLoader(testClasspathJvm().map(_.path.toIO.toURI.toURL).toArray, - this.getClass.getClassLoader) - val frameworkInstances = TestRunner.frameworks(testFrameworks())(testClassloader) - val testBinary = testRunnerNative.nativeLink().toIO - val envVars = forkEnv() - - val nativeFrameworks = (cl: ClassLoader) => - frameworkInstances.zipWithIndex.map { case (f, id) => - scalaNativeWorker().newScalaNativeFrameWork(f, id, testBinary, logLevel(), envVars) - } - - val (doneMsg, results) = TestRunner.runTests( - nativeFrameworks, - testClasspathJvm().map(_.path), - Agg(compile().classes.path), - args - ) - - TestModule.handleResults(doneMsg, results) - } - - private val supportedTestFrameworks = Set("utest", "scalatest") - - // get the JVM classpath entries for supported test frameworks - def testFrameworksJvmClasspath = T{ - Lib.resolveDependencies( - repositories, - Lib.depToDependency(_, scalaVersion(), ""), - transitiveIvyDeps().filter(d => d.cross.isBinary && supportedTestFrameworks(d.dep.module.name)) - ) - } - - def testClasspathJvm = T{ - localClasspath() ++ - transitiveLocalClasspath() ++ - unmanagedClasspath() ++ - testFrameworksJvmClasspath() - } - - // creates a specific binary used for running tests - has a different (generated) main class - // which knows the names of all the tests and references to invoke them - object testRunnerNative extends ScalaNativeModule { - override def zincWorker = testOuter.zincWorker - override def scalaOrganization = testOuter.scalaOrganization() - override def scalaVersion = testOuter.scalaVersion() - override def scalaNativeVersion = testOuter.scalaNativeVersion() - override def moduleDeps = Seq(testOuter) - override def releaseMode = testOuter.releaseMode() - override def logLevel = testOuter.logLevel() - override def nativeLinkStubs = true - - override def ivyDeps = testOuter.ivyDeps() ++ Agg( - ivy"org.scala-native::test-interface_native${scalaNativeToolsVersion()}:${scalaNativeVersion()}" - ) - - override def mainClass = Some("scala.scalanative.testinterface.TestMain") - - override def generatedSources = T { - val outDir = T.ctx().dest - ammonite.ops.write.over(outDir / "TestMain.scala", makeTestMain()) - Seq(PathRef(outDir)) - } - } - - // generate a main class for the tests - def makeTestMain = T{ - val frameworkInstances = TestRunner.frameworks(testFrameworks()) _ - - val testClasses = - Jvm.inprocess(testClasspathJvm().map(_.path), classLoaderOverrideSbtTesting = true, isolated = true, closeContextClassLoaderWhenDone = true, - cl => { - frameworkInstances(cl).flatMap { framework => - val df = Lib.discoverTests(cl, framework, Agg(compile().classes.path)) - df.map(d => TestDefinition(framework.getClass.getName, d._1, d._2)) - } - } - ) - - val frameworks = testClasses.map(_.framework).distinct - - val frameworksList = - if (frameworks.nonEmpty) frameworks.mkString("List(new _root_.", ", new _root_.", ")") - else { - throw new Exception( - "Cannot find any tests; make sure you defined the test framework correctly, " + - "and extend whatever trait or annotation necessary to mark your test suites" - ) - } - - - val testsMap = makeTestsMap(testClasses) - - s"""package scala.scalanative.testinterface - |object TestMain extends TestMainBase { - | override val frameworks = $frameworksList - | override val tests = Map[String, AnyRef]($testsMap) - | def main(args: Array[String]): Unit = - | testMain(args) - |}""".stripMargin - } - - private def makeTestsMap(tests: Seq[TestDefinition]): String = { - tests - .map { t => - val isModule = t.fingerprint match { - case af: AnnotatedFingerprint => af.isModule - case sf: SubclassFingerprint => sf.isModule - } - - val inst = - if (isModule) s"_root_.${t.name}" else s"new _root_.${t.name}" - s""""${t.name}" -> $inst""" - } - .mkString(", ") - } -} - - -trait SbtNativeModule extends ScalaNativeModule with SbtModule - diff --git a/scalanativelib/src/mill/scalanativelib/ScalaNativeWorkerApi.scala b/scalanativelib/src/mill/scalanativelib/ScalaNativeWorkerApi.scala deleted file mode 100644 index 80325f1e..00000000 --- a/scalanativelib/src/mill/scalanativelib/ScalaNativeWorkerApi.scala +++ /dev/null @@ -1,44 +0,0 @@ -package mill.scalanativelib - -import java.io.File -import java.net.URLClassLoader - -import mill.define.{Discover, Worker} -import mill.{Agg, T} -import mill.scalanativelib.api._ - - -class ScalaNativeWorker { - private var scalaInstanceCache = Option.empty[(Long, ScalaNativeWorkerApi)] - - def impl(toolsClasspath: Agg[os.Path]): ScalaNativeWorkerApi = { - val classloaderSig = toolsClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum - scalaInstanceCache match { - case Some((sig, bridge)) if sig == classloaderSig => bridge - case _ => - val cl = new URLClassLoader( - toolsClasspath.map(_.toIO.toURI.toURL).toArray, - getClass.getClassLoader - ) - try { - val bridge = cl - .loadClass("mill.scalanativelib.worker.ScalaNativeWorkerImpl") - .getDeclaredConstructor() - .newInstance() - .asInstanceOf[ScalaNativeWorkerApi] - scalaInstanceCache = Some((classloaderSig, bridge)) - bridge - } - catch { - case e: Exception => - e.printStackTrace() - throw e - } - } - } -} - -object ScalaNativeWorkerApi extends mill.define.ExternalModule { - def scalaNativeWorker: Worker[ScalaNativeWorker] = T.worker { new ScalaNativeWorker() } - lazy val millDiscover = Discover[this.type] -} diff --git a/scalanativelib/test/resources/hello-native-world/src/ArgsParser.scala b/scalanativelib/test/resources/hello-native-world/src/ArgsParser.scala new file mode 100644 index 00000000..8ad93598 --- /dev/null +++ b/scalanativelib/test/resources/hello-native-world/src/ArgsParser.scala @@ -0,0 +1,5 @@ +package hello + +object ArgsParser { + def parse(s:String): Seq[String] = s.split(":").toSeq +} diff --git a/scalanativelib/test/resources/hello-native-world/src/Main.scala b/scalanativelib/test/resources/hello-native-world/src/Main.scala new file mode 100644 index 00000000..5e04dbb3 --- /dev/null +++ b/scalanativelib/test/resources/hello-native-world/src/Main.scala @@ -0,0 +1,6 @@ +package hello + +object Main extends App { + println("Hello " + vmName) + def vmName = sys.props("java.vm.name") +} diff --git a/scalanativelib/test/resources/hello-native-world/src/hello/ArgsParser.scala b/scalanativelib/test/resources/hello-native-world/src/hello/ArgsParser.scala deleted file mode 100644 index 8ad93598..00000000 --- a/scalanativelib/test/resources/hello-native-world/src/hello/ArgsParser.scala +++ /dev/null @@ -1,5 +0,0 @@ -package hello - -object ArgsParser { - def parse(s:String): Seq[String] = s.split(":").toSeq -} diff --git a/scalanativelib/test/resources/hello-native-world/src/hello/Main.scala b/scalanativelib/test/resources/hello-native-world/src/hello/Main.scala deleted file mode 100644 index 5e04dbb3..00000000 --- a/scalanativelib/test/resources/hello-native-world/src/hello/Main.scala +++ /dev/null @@ -1,6 +0,0 @@ -package hello - -object Main extends App { - println("Hello " + vmName) - def vmName = sys.props("java.vm.name") -} diff --git a/scalanativelib/test/src/HelloNativeWorldTests.scala b/scalanativelib/test/src/HelloNativeWorldTests.scala new file mode 100644 index 00000000..74d4238a --- /dev/null +++ b/scalanativelib/test/src/HelloNativeWorldTests.scala @@ -0,0 +1,216 @@ +package mill.scalanativelib + +import java.util.jar.JarFile + +import mill._ +import mill.define.Discover +import mill.eval.{Evaluator, Result} +import mill.scalalib.{CrossScalaModule, DepSyntax, Lib, PublishModule, TestRunner} +import mill.scalalib.publish.{Developer, License, PomSettings, VersionControl} +import mill.util.{TestEvaluator, TestUtil} +import utest._ + + +import scala.collection.JavaConverters._ +import mill.scalanativelib.api._ + +object HelloNativeWorldTests extends TestSuite { + val workspacePath = TestUtil.getOutPathStatic() / "hello-native-world" + + trait HelloNativeWorldModule extends CrossScalaModule with ScalaNativeModule with PublishModule { + override def millSourcePath = workspacePath + def publishVersion = "0.0.1-SNAPSHOT" + override def mainClass = Some("hello.Main") + } + + object HelloNativeWorld extends TestUtil.BaseModule { + val matrix = for { + scala <- Seq("2.11.12") + scalaNative <- Seq("0.3.8") + mode <- List(ReleaseMode.Debug, ReleaseMode.Release) + } yield (scala, scalaNative, mode) + + object helloNativeWorld extends Cross[BuildModule](matrix:_*) + class BuildModule(val crossScalaVersion: String, sNativeVersion: String, mode: ReleaseMode) extends HelloNativeWorldModule { + override def artifactName = "hello-native-world" + def scalaNativeVersion = sNativeVersion + def releaseMode = T{ mode } + def pomSettings = PomSettings( + organization = "com.lihaoyi", + description = "hello native world ready for real world publishing", + url = "https://github.com/lihaoyi/hello-world-publish", + licenses = Seq(License.Common.Apache2), + versionControl = VersionControl.github("lihaoyi", "hello-world-publish"), + developers = + Seq(Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi")) + ) + } + + object buildUTest extends Cross[BuildModuleUtest](matrix:_*) + class BuildModuleUtest(crossScalaVersion: String, sNativeVersion: String, mode: ReleaseMode) + extends BuildModule(crossScalaVersion, sNativeVersion, mode) { + object test extends super.Tests { + override def sources = T.sources{ millSourcePath / 'src / 'utest } + def testFrameworks = Seq("utest.runner.Framework") + override def ivyDeps = Agg( + ivy"com.lihaoyi::utest::0.6.4" + ) + } + } + + object buildScalaTest extends Cross[BuildModuleScalaTest](matrix:_*) + class BuildModuleScalaTest(crossScalaVersion: String, sNativeVersion: String, mode: ReleaseMode) + extends BuildModule(crossScalaVersion, sNativeVersion, mode) { + object test extends super.Tests { + override def sources = T.sources{ millSourcePath / 'src / 'scalatest } + def testFrameworks = Seq("org.scalatest.tools.Framework") + override def ivyDeps = Agg( + ivy"org.scalatest::scalatest::3.2.0-SNAP10" + ) + } + } + override lazy val millDiscover = Discover[this.type] + } + + val millSourcePath = os.pwd / 'scalanativelib / 'test / 'resources / "hello-native-world" + + val helloWorldEvaluator = TestEvaluator.static(HelloNativeWorld) + + + val mainObject = helloWorldEvaluator.outPath / 'src / "Main.scala" + + def tests: Tests = Tests { + prepareWorkspace() + 'compile - { + def testCompileFromScratch(scalaVersion: String, + scalaNativeVersion: String, + mode: ReleaseMode): Unit = { + val Right((result, evalCount)) = + helloWorldEvaluator(HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode).compile) + + val outPath = result.classes.path + val outputFiles = os.walk(outPath).filter(os.isFile) + val expectedClassfiles = compileClassfiles(outPath / 'hello) + assert( + outputFiles.toSet == expectedClassfiles, + evalCount > 0 + ) + + // don't recompile if nothing changed + val Right((_, unchangedEvalCount)) = + helloWorldEvaluator(HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode).compile) + assert(unchangedEvalCount == 0) + } + + 'fromScratch_21112_037 - testCompileFromScratch("2.11.12", "0.3.8", ReleaseMode.Debug) + } + + 'jar - { + 'containsNirs - { + val Right((result, evalCount)) = + helloWorldEvaluator(HelloNativeWorld.helloNativeWorld("2.11.12", "0.3.8", ReleaseMode.Debug).jar) + val jar = result.path + val entries = new JarFile(jar.toIO).entries().asScala.map(_.getName) + assert(entries.contains("hello/Main$.nir")) + } + } + 'publish - { + def testArtifactId(scalaVersion: String, + scalaNativeVersion: String, + mode: ReleaseMode, + artifactId: String): Unit = { + val Right((result, evalCount)) = helloWorldEvaluator( + HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode: ReleaseMode).artifactMetadata) + assert(result.id == artifactId) + } + 'artifactId_038 - testArtifactId("2.11.12", "0.3.8", ReleaseMode.Debug, "hello-native-world_native0.3_2.11") + } + 'test - { + def runTests(testTask: define.Command[(String, Seq[TestRunner.Result])]): Map[String, Map[String, TestRunner.Result]] = { + val Left(Result.Failure(_, Some(res))) = helloWorldEvaluator(testTask) + + val (doneMsg, testResults) = res + testResults + .groupBy(_.fullyQualifiedName) + .mapValues(_.map(e => e.selector -> e).toMap) + } + + def checkUtest(scalaVersion: String, scalaNativeVersion: String, mode: ReleaseMode) = { + val resultMap = runTests(HelloNativeWorld.buildUTest(scalaVersion, scalaNativeVersion, mode).test.test()) + + val mainTests = resultMap("hellotest.MainTests") + val argParserTests = resultMap("hellotest.ArgsParserTests") + + assert( + mainTests.size == 2, + mainTests("hellotest.MainTests.vmName.containNative").status == "Success", + mainTests("hellotest.MainTests.vmName.containScala").status == "Success", + + argParserTests.size == 2, + argParserTests("hellotest.ArgsParserTests.one").status == "Success", + argParserTests("hellotest.ArgsParserTests.two").status == "Failure" + ) + } + + def checkScalaTest(scalaVersion: String, scalaNativeVersion: String, mode: ReleaseMode) = { + val resultMap = runTests(HelloNativeWorld.buildScalaTest(scalaVersion, scalaNativeVersion, mode).test.test()) + + val mainSpec = resultMap("hellotest.MainSpec") + val argParserSpec = resultMap("hellotest.ArgsParserSpec") + + assert( + mainSpec.size == 2, + mainSpec("vmName should contain Native").status == "Success", + mainSpec("vmName should contain Scala").status == "Success", + + argParserSpec.size == 2, + argParserSpec("parse should one").status == "Success", + argParserSpec("parse should two").status == "Failure" + ) + } + + 'utest_21112_038_debug - (checkUtest("2.11.12", "0.3.8", ReleaseMode.Debug)) + 'utest_21112_038_release - (checkUtest("2.11.12", "0.3.8", ReleaseMode.Release)) + 'scalaTest_21112_038_debug - (checkScalaTest("2.11.12", "0.3.8", ReleaseMode.Debug)) + 'scalaTest_21112_038_release - (checkScalaTest("2.11.12", "0.3.8", ReleaseMode.Release)) + } + + def checkRun(scalaVersion: String, scalaNativeVersion: String, mode: ReleaseMode): Unit = { + val task = HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode).run() + + val Right((_, evalCount)) = helloWorldEvaluator(task) + + val paths = Evaluator.resolveDestPaths( + helloWorldEvaluator.outPath, + task.ctx.segments + ) + val log = os.read(paths.log) + assert( + evalCount > 0, + log.contains("Scala Native") + ) + } + + 'run - { + 'run_21112_038_debug - (checkRun("2.11.12", "0.3.8", ReleaseMode.Debug)) + 'run_21112_038_release - (checkRun("2.11.12", "0.3.8", ReleaseMode.Release)) + } + } + + def compileClassfiles(parentDir: os.Path) = Set( + parentDir / "ArgsParser$.class", + parentDir / "ArgsParser$.nir", + parentDir / "ArgsParser.class", + parentDir / "Main.class", + parentDir / "Main$.class", + parentDir / "Main$delayedInit$body.class", + parentDir / "Main$.nir", + parentDir / "Main$delayedInit$body.nir" + ) + + def prepareWorkspace(): Unit = { + os.remove.all(workspacePath) + os.makeDir.all(workspacePath / os.up) + os.copy(millSourcePath, workspacePath) + } +} diff --git a/scalanativelib/test/src/mill/scalanativelib/HelloNativeWorldTests.scala b/scalanativelib/test/src/mill/scalanativelib/HelloNativeWorldTests.scala deleted file mode 100644 index 74d4238a..00000000 --- a/scalanativelib/test/src/mill/scalanativelib/HelloNativeWorldTests.scala +++ /dev/null @@ -1,216 +0,0 @@ -package mill.scalanativelib - -import java.util.jar.JarFile - -import mill._ -import mill.define.Discover -import mill.eval.{Evaluator, Result} -import mill.scalalib.{CrossScalaModule, DepSyntax, Lib, PublishModule, TestRunner} -import mill.scalalib.publish.{Developer, License, PomSettings, VersionControl} -import mill.util.{TestEvaluator, TestUtil} -import utest._ - - -import scala.collection.JavaConverters._ -import mill.scalanativelib.api._ - -object HelloNativeWorldTests extends TestSuite { - val workspacePath = TestUtil.getOutPathStatic() / "hello-native-world" - - trait HelloNativeWorldModule extends CrossScalaModule with ScalaNativeModule with PublishModule { - override def millSourcePath = workspacePath - def publishVersion = "0.0.1-SNAPSHOT" - override def mainClass = Some("hello.Main") - } - - object HelloNativeWorld extends TestUtil.BaseModule { - val matrix = for { - scala <- Seq("2.11.12") - scalaNative <- Seq("0.3.8") - mode <- List(ReleaseMode.Debug, ReleaseMode.Release) - } yield (scala, scalaNative, mode) - - object helloNativeWorld extends Cross[BuildModule](matrix:_*) - class BuildModule(val crossScalaVersion: String, sNativeVersion: String, mode: ReleaseMode) extends HelloNativeWorldModule { - override def artifactName = "hello-native-world" - def scalaNativeVersion = sNativeVersion - def releaseMode = T{ mode } - def pomSettings = PomSettings( - organization = "com.lihaoyi", - description = "hello native world ready for real world publishing", - url = "https://github.com/lihaoyi/hello-world-publish", - licenses = Seq(License.Common.Apache2), - versionControl = VersionControl.github("lihaoyi", "hello-world-publish"), - developers = - Seq(Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi")) - ) - } - - object buildUTest extends Cross[BuildModuleUtest](matrix:_*) - class BuildModuleUtest(crossScalaVersion: String, sNativeVersion: String, mode: ReleaseMode) - extends BuildModule(crossScalaVersion, sNativeVersion, mode) { - object test extends super.Tests { - override def sources = T.sources{ millSourcePath / 'src / 'utest } - def testFrameworks = Seq("utest.runner.Framework") - override def ivyDeps = Agg( - ivy"com.lihaoyi::utest::0.6.4" - ) - } - } - - object buildScalaTest extends Cross[BuildModuleScalaTest](matrix:_*) - class BuildModuleScalaTest(crossScalaVersion: String, sNativeVersion: String, mode: ReleaseMode) - extends BuildModule(crossScalaVersion, sNativeVersion, mode) { - object test extends super.Tests { - override def sources = T.sources{ millSourcePath / 'src / 'scalatest } - def testFrameworks = Seq("org.scalatest.tools.Framework") - override def ivyDeps = Agg( - ivy"org.scalatest::scalatest::3.2.0-SNAP10" - ) - } - } - override lazy val millDiscover = Discover[this.type] - } - - val millSourcePath = os.pwd / 'scalanativelib / 'test / 'resources / "hello-native-world" - - val helloWorldEvaluator = TestEvaluator.static(HelloNativeWorld) - - - val mainObject = helloWorldEvaluator.outPath / 'src / "Main.scala" - - def tests: Tests = Tests { - prepareWorkspace() - 'compile - { - def testCompileFromScratch(scalaVersion: String, - scalaNativeVersion: String, - mode: ReleaseMode): Unit = { - val Right((result, evalCount)) = - helloWorldEvaluator(HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode).compile) - - val outPath = result.classes.path - val outputFiles = os.walk(outPath).filter(os.isFile) - val expectedClassfiles = compileClassfiles(outPath / 'hello) - assert( - outputFiles.toSet == expectedClassfiles, - evalCount > 0 - ) - - // don't recompile if nothing changed - val Right((_, unchangedEvalCount)) = - helloWorldEvaluator(HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode).compile) - assert(unchangedEvalCount == 0) - } - - 'fromScratch_21112_037 - testCompileFromScratch("2.11.12", "0.3.8", ReleaseMode.Debug) - } - - 'jar - { - 'containsNirs - { - val Right((result, evalCount)) = - helloWorldEvaluator(HelloNativeWorld.helloNativeWorld("2.11.12", "0.3.8", ReleaseMode.Debug).jar) - val jar = result.path - val entries = new JarFile(jar.toIO).entries().asScala.map(_.getName) - assert(entries.contains("hello/Main$.nir")) - } - } - 'publish - { - def testArtifactId(scalaVersion: String, - scalaNativeVersion: String, - mode: ReleaseMode, - artifactId: String): Unit = { - val Right((result, evalCount)) = helloWorldEvaluator( - HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode: ReleaseMode).artifactMetadata) - assert(result.id == artifactId) - } - 'artifactId_038 - testArtifactId("2.11.12", "0.3.8", ReleaseMode.Debug, "hello-native-world_native0.3_2.11") - } - 'test - { - def runTests(testTask: define.Command[(String, Seq[TestRunner.Result])]): Map[String, Map[String, TestRunner.Result]] = { - val Left(Result.Failure(_, Some(res))) = helloWorldEvaluator(testTask) - - val (doneMsg, testResults) = res - testResults - .groupBy(_.fullyQualifiedName) - .mapValues(_.map(e => e.selector -> e).toMap) - } - - def checkUtest(scalaVersion: String, scalaNativeVersion: String, mode: ReleaseMode) = { - val resultMap = runTests(HelloNativeWorld.buildUTest(scalaVersion, scalaNativeVersion, mode).test.test()) - - val mainTests = resultMap("hellotest.MainTests") - val argParserTests = resultMap("hellotest.ArgsParserTests") - - assert( - mainTests.size == 2, - mainTests("hellotest.MainTests.vmName.containNative").status == "Success", - mainTests("hellotest.MainTests.vmName.containScala").status == "Success", - - argParserTests.size == 2, - argParserTests("hellotest.ArgsParserTests.one").status == "Success", - argParserTests("hellotest.ArgsParserTests.two").status == "Failure" - ) - } - - def checkScalaTest(scalaVersion: String, scalaNativeVersion: String, mode: ReleaseMode) = { - val resultMap = runTests(HelloNativeWorld.buildScalaTest(scalaVersion, scalaNativeVersion, mode).test.test()) - - val mainSpec = resultMap("hellotest.MainSpec") - val argParserSpec = resultMap("hellotest.ArgsParserSpec") - - assert( - mainSpec.size == 2, - mainSpec("vmName should contain Native").status == "Success", - mainSpec("vmName should contain Scala").status == "Success", - - argParserSpec.size == 2, - argParserSpec("parse should one").status == "Success", - argParserSpec("parse should two").status == "Failure" - ) - } - - 'utest_21112_038_debug - (checkUtest("2.11.12", "0.3.8", ReleaseMode.Debug)) - 'utest_21112_038_release - (checkUtest("2.11.12", "0.3.8", ReleaseMode.Release)) - 'scalaTest_21112_038_debug - (checkScalaTest("2.11.12", "0.3.8", ReleaseMode.Debug)) - 'scalaTest_21112_038_release - (checkScalaTest("2.11.12", "0.3.8", ReleaseMode.Release)) - } - - def checkRun(scalaVersion: String, scalaNativeVersion: String, mode: ReleaseMode): Unit = { - val task = HelloNativeWorld.helloNativeWorld(scalaVersion, scalaNativeVersion, mode).run() - - val Right((_, evalCount)) = helloWorldEvaluator(task) - - val paths = Evaluator.resolveDestPaths( - helloWorldEvaluator.outPath, - task.ctx.segments - ) - val log = os.read(paths.log) - assert( - evalCount > 0, - log.contains("Scala Native") - ) - } - - 'run - { - 'run_21112_038_debug - (checkRun("2.11.12", "0.3.8", ReleaseMode.Debug)) - 'run_21112_038_release - (checkRun("2.11.12", "0.3.8", ReleaseMode.Release)) - } - } - - def compileClassfiles(parentDir: os.Path) = Set( - parentDir / "ArgsParser$.class", - parentDir / "ArgsParser$.nir", - parentDir / "ArgsParser.class", - parentDir / "Main.class", - parentDir / "Main$.class", - parentDir / "Main$delayedInit$body.class", - parentDir / "Main$.nir", - parentDir / "Main$delayedInit$body.nir" - ) - - def prepareWorkspace(): Unit = { - os.remove.all(workspacePath) - os.makeDir.all(workspacePath / os.up) - os.copy(millSourcePath, workspacePath) - } -} diff --git a/scalanativelib/worker/0.3/src/ScalaNativeWorkerImpl.scala b/scalanativelib/worker/0.3/src/ScalaNativeWorkerImpl.scala new file mode 100644 index 00000000..b32e84ff --- /dev/null +++ b/scalanativelib/worker/0.3/src/ScalaNativeWorkerImpl.scala @@ -0,0 +1,73 @@ +package mill.scalanativelib.worker + +import java.io.File +import java.lang.System.{err, out} + +import scala.scalanative.build.{Build, Config, Discover, GC, Logger, Mode} +import mill.scalanativelib.api.{NativeConfig, NativeLogLevel, ReleaseMode} +import sbt.testing.Framework + +import scala.scalanative.testinterface.ScalaNativeFramework + + +class ScalaNativeWorkerImpl extends mill.scalanativelib.api.ScalaNativeWorkerApi { + def logger(level: NativeLogLevel) = + Logger( + debugFn = msg => if (level >= NativeLogLevel.Debug) out.println(msg), + infoFn = msg => if (level >= NativeLogLevel.Info) out.println(msg), + warnFn = msg => if (level >= NativeLogLevel.Warn) out.println(msg), + errorFn = msg => if (level >= NativeLogLevel.Error) err.println(msg)) + + def discoverClang: os.Path = os.Path(Discover.clang()) + def discoverClangPP: os.Path = os.Path(Discover.clangpp()) + def discoverTarget(clang: os.Path, workdir: os.Path): String = Discover.targetTriple(clang.toNIO, workdir.toNIO) + def discoverCompileOptions: Seq[String] = Discover.compileOptions() + def discoverLinkingOptions: Seq[String] = Discover.linkingOptions() + def defaultGarbageCollector: String = GC.default.name + + def config(nativeLibJar: os.Path, + mainClass: String, + classpath: Seq[os.Path], + nativeWorkdir: os.Path, + nativeClang: os.Path, + nativeClangPP: os.Path, + nativeTarget: String, + nativeCompileOptions: Seq[String], + nativeLinkingOptions: Seq[String], + nativeGC: String, + nativeLinkStubs: Boolean, + releaseMode: ReleaseMode, + logLevel: NativeLogLevel): NativeConfig = + { + val entry = mainClass + "$" + + val config = + Config.empty + .withNativelib(nativeLibJar.toNIO) + .withMainClass(entry) + .withClassPath(classpath.map(_.toNIO)) + .withWorkdir(nativeWorkdir.toNIO) + .withClang(nativeClang.toNIO) + .withClangPP(nativeClangPP.toNIO) + .withTargetTriple(nativeTarget) + .withCompileOptions(nativeCompileOptions) + .withLinkingOptions(nativeLinkingOptions) + .withGC(GC(nativeGC)) + .withLinkStubs(nativeLinkStubs) + .withMode(Mode(releaseMode.name)) + .withLogger(logger(logLevel)) + NativeConfig(config) + } + + def nativeLink(nativeConfig: NativeConfig, outPath: os.Path): os.Path = { + val config = nativeConfig.config.asInstanceOf[Config] + Build.build(config, outPath.toNIO) + outPath + } + + override def newScalaNativeFrameWork(framework: Framework, id: Int, testBinary: File, + logLevel: NativeLogLevel, envVars: Map[String, String]): Framework = + { + new ScalaNativeFramework(framework, id, logger(logLevel), testBinary, envVars) + } +} diff --git a/scalanativelib/worker/0.3/src/mill/scalanativelib/worker/ScalaNativeWorkerImpl.scala b/scalanativelib/worker/0.3/src/mill/scalanativelib/worker/ScalaNativeWorkerImpl.scala deleted file mode 100644 index b32e84ff..00000000 --- a/scalanativelib/worker/0.3/src/mill/scalanativelib/worker/ScalaNativeWorkerImpl.scala +++ /dev/null @@ -1,73 +0,0 @@ -package mill.scalanativelib.worker - -import java.io.File -import java.lang.System.{err, out} - -import scala.scalanative.build.{Build, Config, Discover, GC, Logger, Mode} -import mill.scalanativelib.api.{NativeConfig, NativeLogLevel, ReleaseMode} -import sbt.testing.Framework - -import scala.scalanative.testinterface.ScalaNativeFramework - - -class ScalaNativeWorkerImpl extends mill.scalanativelib.api.ScalaNativeWorkerApi { - def logger(level: NativeLogLevel) = - Logger( - debugFn = msg => if (level >= NativeLogLevel.Debug) out.println(msg), - infoFn = msg => if (level >= NativeLogLevel.Info) out.println(msg), - warnFn = msg => if (level >= NativeLogLevel.Warn) out.println(msg), - errorFn = msg => if (level >= NativeLogLevel.Error) err.println(msg)) - - def discoverClang: os.Path = os.Path(Discover.clang()) - def discoverClangPP: os.Path = os.Path(Discover.clangpp()) - def discoverTarget(clang: os.Path, workdir: os.Path): String = Discover.targetTriple(clang.toNIO, workdir.toNIO) - def discoverCompileOptions: Seq[String] = Discover.compileOptions() - def discoverLinkingOptions: Seq[String] = Discover.linkingOptions() - def defaultGarbageCollector: String = GC.default.name - - def config(nativeLibJar: os.Path, - mainClass: String, - classpath: Seq[os.Path], - nativeWorkdir: os.Path, - nativeClang: os.Path, - nativeClangPP: os.Path, - nativeTarget: String, - nativeCompileOptions: Seq[String], - nativeLinkingOptions: Seq[String], - nativeGC: String, - nativeLinkStubs: Boolean, - releaseMode: ReleaseMode, - logLevel: NativeLogLevel): NativeConfig = - { - val entry = mainClass + "$" - - val config = - Config.empty - .withNativelib(nativeLibJar.toNIO) - .withMainClass(entry) - .withClassPath(classpath.map(_.toNIO)) - .withWorkdir(nativeWorkdir.toNIO) - .withClang(nativeClang.toNIO) - .withClangPP(nativeClangPP.toNIO) - .withTargetTriple(nativeTarget) - .withCompileOptions(nativeCompileOptions) - .withLinkingOptions(nativeLinkingOptions) - .withGC(GC(nativeGC)) - .withLinkStubs(nativeLinkStubs) - .withMode(Mode(releaseMode.name)) - .withLogger(logger(logLevel)) - NativeConfig(config) - } - - def nativeLink(nativeConfig: NativeConfig, outPath: os.Path): os.Path = { - val config = nativeConfig.config.asInstanceOf[Config] - Build.build(config, outPath.toNIO) - outPath - } - - override def newScalaNativeFrameWork(framework: Framework, id: Int, testBinary: File, - logLevel: NativeLogLevel, envVars: Map[String, String]): Framework = - { - new ScalaNativeFramework(framework, id, logger(logLevel), testBinary, envVars) - } -} -- cgit v1.2.3 From 33fa0c8d2dd6fa8aa733ca0fc9c0d9c138fd5c7d Mon Sep 17 00:00:00 2001 From: Li Haoyi <32282535+lihaoyi-databricks@users.noreply.github.com> Date: Wed, 12 Dec 2018 18:27:19 -0800 Subject: Bump Mill Scala version to 2.12.8 (#507) --- build.sc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sc b/build.sc index 1cbbe992..42e76aba 100755 --- a/build.sc +++ b/build.sc @@ -27,7 +27,7 @@ trait MillPublishModule extends PublishModule{ def javacOptions = Seq("-source", "1.8", "-target", "1.8") } trait MillApiModule extends MillPublishModule with ScalaModule{ - def scalaVersion = T{ "2.12.6" } + def scalaVersion = T{ "2.12.8" } def compileIvyDeps = Agg(ivy"com.lihaoyi::acyclic:0.1.7") def scalacOptions = Seq("-P:acyclic:force") def scalacPluginIvyDeps = Agg(ivy"com.lihaoyi::acyclic:0.1.7") @@ -105,7 +105,7 @@ object main extends MillModule { } object moduledefs extends MillPublishModule with ScalaModule{ - def scalaVersion = T{ "2.12.6" } + def scalaVersion = T{ "2.12.8" } def ivyDeps = Agg( ivy"org.scala-lang:scala-compiler:${scalaVersion()}", ivy"com.lihaoyi::sourcecode:0.1.4", -- cgit v1.2.3 From d2dbfbf4955082d6ac893c8112d92d57b932ecb6 Mon Sep 17 00:00:00 2001 From: Tobias Roeser Date: Fri, 14 Dec 2018 23:18:16 +0100 Subject: Added doc to version target --- main/src/main/MainModule.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/main/src/main/MainModule.scala b/main/src/main/MainModule.scala index 34145668..dbe92cc2 100644 --- a/main/src/main/MainModule.scala +++ b/main/src/main/MainModule.scala @@ -32,6 +32,10 @@ trait MainModule extends mill.Module{ implicit def millDiscover: mill.define.Discover[_] implicit def millScoptTasksReads[T] = new mill.main.Tasks.Scopt[T]() implicit def millScoptEvaluatorReads[T] = new mill.main.EvaluatorScopt[T]() + + /** + * Show the mill version. + */ def version() = mill.T.command { val res = System.getProperty("MILL_VERSION") println(res) -- cgit v1.2.3 From 293528734bb901d9d240782cde9e5f86221713a2 Mon Sep 17 00:00:00 2001 From: Tobias Roeser Date: Fri, 14 Dec 2018 23:18:47 +0100 Subject: Fixed plugin usage docs for mill-osgi --- docs/pages/9 - Contrib Modules.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/pages/9 - Contrib Modules.md b/docs/pages/9 - Contrib Modules.md index 4d811cde..cf63039a 100644 --- a/docs/pages/9 - Contrib Modules.md +++ b/docs/pages/9 - Contrib Modules.md @@ -276,18 +276,21 @@ Project home: https://github.com/lefou/mill-osgi #### Quickstart ```scala -import $ivy.`de.tototec::de.tobiasroeser.mill.osgi:0.0.2` +import mill._, mill.scalalib._ +import $ivy.`de.tototec::de.tobiasroeser.mill.osgi:0.0.5` import de.tobiasroeser.mill.osgi._ object project extends ScalaModule with OsgiBundleModule { def bundleSymbolicName = "com.example.project" - def osgiHeaders = T{ osgiHeaders().copy( + def osgiHeaders = T{ super.osgiHeaders().copy( `Export-Package` = Seq("com.example.api"), `Bundle-Activator` = Some("com.example.internal.Activator") )} + // other settings ... + } ``` -- cgit v1.2.3 From 9cbcfa9c1fb89efd0b4cdafdc4246ce027f74ebb Mon Sep 17 00:00:00 2001 From: Gregor Uhlenheuer Date: Sat, 15 Dec 2018 01:14:31 +0100 Subject: Discover - break overridesRoutes into fixed size chunks (#509) * Discover - break overridesRoutes into fixed size chunks * Discover - simplify lambda creation * add LargeProjectTests * LargeProjectTests: remove Ydelambdafy --- integration/test/resources/large-project/build.sc | 293 +++++++++++++++++++++ .../one/src/main/scala/foo/common/one/Main.scala | 5 + integration/test/src/LargeProjectTests.scala | 19 ++ integration/test/src/forked/Tests.scala | 1 + integration/test/src/local/Tests.scala | 3 +- main/core/src/define/Discover.scala | 7 +- 6 files changed, 325 insertions(+), 3 deletions(-) create mode 100644 integration/test/resources/large-project/build.sc create mode 100644 integration/test/resources/large-project/foo/common/one/src/main/scala/foo/common/one/Main.scala create mode 100644 integration/test/src/LargeProjectTests.scala diff --git a/integration/test/resources/large-project/build.sc b/integration/test/resources/large-project/build.sc new file mode 100644 index 00000000..27844c90 --- /dev/null +++ b/integration/test/resources/large-project/build.sc @@ -0,0 +1,293 @@ +import mill._, scalalib._ + +trait TModule extends SbtModule { + def scalaVersion = "2.12.7" +} + +object foo extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq(foo.common.one) + } + object three extends TModule { + def moduleDeps = Seq(foo.common.two) + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq(foo.common.one) + } + object two extends TModule { + def moduleDeps = Seq(foo.domain.one) + } + object three extends TModule { + def moduleDeps = Seq(foo.domain.two) + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq(foo.domain.three) + } + object two extends TModule { + def moduleDeps = Seq(foo.server.one) + } + object three extends TModule { + def moduleDeps = Seq(foo.server.two) + } + } +} + +object bar extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq(foo.common.three) + } + object two extends TModule { + def moduleDeps = Seq(bar.common.one) + } + object three extends TModule { + def moduleDeps = Seq(bar.common.two) + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq(foo.domain.three) + } + object two extends TModule { + def moduleDeps = Seq(bar.domain.one) + } + object three extends TModule { + def moduleDeps = Seq(bar.domain.two) + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq(foo.server.one) + } + object two extends TModule { + def moduleDeps = Seq(bar.server.one) + } + object three extends TModule { + def moduleDeps = Seq(bar.server.two) + } + } +} + +object ham extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq(bar.common.one) + } + object two extends TModule { + def moduleDeps = Seq(bar.common.two) + } + object three extends TModule { + def moduleDeps = Seq(bar.common.three) + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq(bar.domain.three) + } + object two extends TModule { + def moduleDeps = Seq(bar.domain.two, ham.common.three) + } + object three extends TModule { + def moduleDeps = Seq(bar.domain.two) + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } +} + +object eggs extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } +} + +object salt extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } +} + +object pepper extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } +} + +object oregano extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } +} + +object rosmarin extends Module { + object common extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object domain extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } + object server extends Module { + object one extends TModule { + def moduleDeps = Seq() + } + object two extends TModule { + def moduleDeps = Seq() + } + object three extends TModule { + def moduleDeps = Seq() + } + } +} diff --git a/integration/test/resources/large-project/foo/common/one/src/main/scala/foo/common/one/Main.scala b/integration/test/resources/large-project/foo/common/one/src/main/scala/foo/common/one/Main.scala new file mode 100644 index 00000000..4fd74836 --- /dev/null +++ b/integration/test/resources/large-project/foo/common/one/src/main/scala/foo/common/one/Main.scala @@ -0,0 +1,5 @@ +package foo.common.one + +object Main extends App { + println("large-project") +} diff --git a/integration/test/src/LargeProjectTests.scala b/integration/test/src/LargeProjectTests.scala new file mode 100644 index 00000000..e20bc932 --- /dev/null +++ b/integration/test/src/LargeProjectTests.scala @@ -0,0 +1,19 @@ +package mill.integration + +import mill.util.ScriptTestSuite +import utest._ + +class LargeProjectTests(fork: Boolean) + extends ScriptTestSuite(fork) { + def workspaceSlug: String = "large-project" + def scriptSourcePath: os.Path = os.pwd / 'integration / 'test / 'resources / workspaceSlug + + val tests = Tests{ + initWorkspace() + 'test - { + + assert(eval("foo.common.one.compile")) + } + + } +} diff --git a/integration/test/src/forked/Tests.scala b/integration/test/src/forked/Tests.scala index 41844b58..b31042bb 100644 --- a/integration/test/src/forked/Tests.scala +++ b/integration/test/src/forked/Tests.scala @@ -3,6 +3,7 @@ package mill.integration.forked object AcyclicTests extends mill.integration.AcyclicTests(fork = true) object AmmoniteTests extends mill.integration.AmmoniteTests(fork = true) object BetterFilesTests extends mill.integration.BetterFilesTests(fork = true) +object LargeProjectTests extends mill.integration.LargeProjectTests(fork = true) object JawnTests extends mill.integration.JawnTests(fork = true) object UpickleTests extends mill.integration.UpickleTests(fork = true) object PlayJsonTests extends mill.integration.PlayJsonTests(fork = true) diff --git a/integration/test/src/local/Tests.scala b/integration/test/src/local/Tests.scala index e95aac54..efc45b94 100644 --- a/integration/test/src/local/Tests.scala +++ b/integration/test/src/local/Tests.scala @@ -3,8 +3,9 @@ package mill.integration.local object AcyclicTests extends mill.integration.AcyclicTests(fork = false) object AmmoniteTests extends mill.integration.AmmoniteTests(fork = false) object BetterFilesTests extends mill.integration.BetterFilesTests(fork = false) +object LargeProjectTests extends mill.integration.LargeProjectTests(fork = false) object JawnTests extends mill.integration.JawnTests(fork = false) object UpickleTests extends mill.integration.UpickleTests(fork = false) object PlayJsonTests extends mill.integration.PlayJsonTests(fork = false) object CaffeineTests extends mill.integration.CaffeineTests(fork = false) -object DocAnnotationsTests extends mill.integration.DocAnnotationsTests(fork = false) \ No newline at end of file +object DocAnnotationsTests extends mill.integration.DocAnnotationsTests(fork = false) diff --git a/main/core/src/define/Discover.scala b/main/core/src/define/Discover.scala index f0c668e6..c7dab54c 100644 --- a/main/core/src/define/Discover.scala +++ b/main/core/src/define/Discover.scala @@ -79,9 +79,12 @@ object Discover { } if overridesRoutes.nonEmpty } yield { + // by wrapping the `overridesRoutes` in a lambda function we kind of work around + // the problem of generating a *huge* macro method body that finally exceeds the + // JVM's maximum allowed method size + val overridesLambda = q"(() => $overridesRoutes)()" val lhs = q"classOf[${discoveredModuleType.typeSymbol.asClass}]" - val rhs = q"scala.Seq[(Int, mill.util.Router.EntryPoint[_])](..$overridesRoutes)" - q"$lhs -> $rhs" + q"$lhs -> $overridesLambda" } c.Expr[Discover[T]](q"mill.define.Discover(scala.collection.immutable.Map(..$mapping))") -- cgit v1.2.3 From ea36ea3da18d3720e124b60235e1153f6c31518c Mon Sep 17 00:00:00 2001 From: dohrayme <960977+dohrayme@users.noreply.github.com> Date: Sat, 15 Dec 2018 02:34:31 +0000 Subject: fix GenIdea to create required folders (#510) --- scalalib/src/GenIdeaImpl.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scalalib/src/GenIdeaImpl.scala b/scalalib/src/GenIdeaImpl.scala index 2d76d804..548b8d4e 100644 --- a/scalalib/src/GenIdeaImpl.scala +++ b/scalalib/src/GenIdeaImpl.scala @@ -43,7 +43,7 @@ object GenIdeaImpl { val evaluator = new Evaluator(ctx.home, os.pwd / 'out, os.pwd / 'out, rootModule, ctx.log) for((relPath, xml) <- xmlFileLayout(evaluator, rootModule, jdkInfo)){ - os.write.over(os.pwd/relPath, pp.format(xml)) + os.write.over(os.pwd/relPath, pp.format(xml), createFolders = true) } } -- cgit v1.2.3 From de175e69977082e35539097a54d381e465dddf8e Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Tue, 18 Dec 2018 21:11:47 -0800 Subject: Generalize Zinc Worker (#514) * Generalize Zinc worker - Compiler bridges can now be either pre-compiled or on-demand-compiled - Scala library/compiler jar discovery is now configurable - Zinc compiler cache is now configurable, rather than being hardcoded at n=1 * . * update constructor args * remove duplicate util/AggWrapper.scala file * fix * fix * fix * cleanup --- ci/shared.sc | 2 +- contrib/scalapblib/src/ScalaPBModule.scala | 2 +- contrib/twirllib/src/TwirlModule.scala | 2 +- main/api/src/mill/api/KeyedLockedCache.scala | 44 +++++ main/core/src/define/Graph.scala | 2 +- main/core/src/eval/Evaluator.scala | 2 +- main/core/src/util/AggWrapper.scala | 119 ------------ main/core/src/util/MultiBiMap.scala | 2 +- main/src/main/ReplApplyHandler.scala | 2 +- main/src/main/RunScript.scala | 2 +- main/src/modules/Jvm.scala | 2 +- main/src/modules/Util.scala | 5 +- main/src/package.scala | 4 +- main/test/resources/examples/javac/build.sc | 2 +- main/test/src/define/CacherTests.scala | 2 +- main/test/src/define/GraphTests.scala | 2 +- main/test/src/eval/EvaluationTests.scala | 2 +- main/test/src/eval/JavaCompileJarTests.scala | 7 +- main/test/src/util/TestEvaluator.scala | 2 +- main/test/src/util/TestUtil.scala | 2 +- scalajslib/src/ScalaJSModule.scala | 3 +- scalalib/api/src/ZincWorkerApi.scala | 16 +- scalalib/src/GenIdeaImpl.scala | 4 +- scalalib/src/JavaModule.scala | 2 +- scalalib/src/MiscModule.scala | 2 +- scalalib/src/ScalaModule.scala | 35 +--- scalalib/src/ZincWorkerModule.scala | 59 +++++- .../src/dependency/versions/VersionsFinder.scala | 2 +- scalalib/src/publish/Ivy.scala | 2 +- scalalib/src/publish/Pom.scala | 2 +- scalalib/test/src/ResolveDepsTests.scala | 2 +- scalalib/worker/src/ZincWorkerImpl.scala | 199 +++++++++++---------- scalanativelib/src/ScalaNativeModule.scala | 2 +- 33 files changed, 248 insertions(+), 291 deletions(-) create mode 100644 main/api/src/mill/api/KeyedLockedCache.scala delete mode 100644 main/core/src/util/AggWrapper.scala diff --git a/ci/shared.sc b/ci/shared.sc index 2f133486..a496fd1f 100644 --- a/ci/shared.sc +++ b/ci/shared.sc @@ -74,7 +74,7 @@ def generateEval(dir: Path) = { s"""package mill.main |import mill.eval.Evaluator |import mill.define.Task - |import mill.util.Strict.Agg + |import mill.api.Strict.Agg |class EvalGenerated(evaluator: Evaluator) { | type TT[+X] = Task[X] | ${(1 to 22).map(generate).mkString("\n")} diff --git a/contrib/scalapblib/src/ScalaPBModule.scala b/contrib/scalapblib/src/ScalaPBModule.scala index db5c5c8b..57bfdd40 100644 --- a/contrib/scalapblib/src/ScalaPBModule.scala +++ b/contrib/scalapblib/src/ScalaPBModule.scala @@ -7,7 +7,7 @@ import mill.define.Sources import mill.api.PathRef import mill.scalalib.Lib.resolveDependencies import mill.scalalib._ -import mill.util.Loose +import mill.api.Loose trait ScalaPBModule extends ScalaModule { diff --git a/contrib/twirllib/src/TwirlModule.scala b/contrib/twirllib/src/TwirlModule.scala index 328afc47..985765fc 100644 --- a/contrib/twirllib/src/TwirlModule.scala +++ b/contrib/twirllib/src/TwirlModule.scala @@ -6,7 +6,7 @@ import mill.define.Sources import mill.api.PathRef import mill.scalalib.Lib.resolveDependencies import mill.scalalib._ -import mill.util.Loose +import mill.api.Loose import scala.io.Codec import scala.util.Properties diff --git a/main/api/src/mill/api/KeyedLockedCache.scala b/main/api/src/mill/api/KeyedLockedCache.scala new file mode 100644 index 00000000..47fdd888 --- /dev/null +++ b/main/api/src/mill/api/KeyedLockedCache.scala @@ -0,0 +1,44 @@ +package mill.api + +/** + * A combination lock & cache; users provide a key, value-factory, and a + * body function to be called with the value. [[KeyedLockedCache]] ensures that + * the body function is called with the computed/cached value sequentially. + */ +trait KeyedLockedCache[T]{ + def withCachedValue[V](key: Long)(f: => T)(f2: T => V): V +} + +object KeyedLockedCache{ + class RandomBoundedCache[T](hotParallelism: Int, coldCacheSize: Int) extends KeyedLockedCache[T]{ + private[this] val random = new scala.util.Random(313373) + val available = new java.util.concurrent.Semaphore(hotParallelism) + + // Awful asymptotic complexity, but our caches are tiny n < 10 so it doesn't matter + var cache = Array.fill[Option[(Long, T)]](coldCacheSize)(None) + + def withCachedValue[V](key: Long)(f: => T)(f2: T => V): V = { + available.acquire() + val pickedValue = synchronized{ + cache.indexWhere(_.exists(_._1 == key)) match { + case -1 => f + case i => + val (k, v) = cache(i).get + cache(i) = None + v + } + } + val result = f2(pickedValue) + synchronized{ + cache.indexWhere(_.isEmpty) match{ + // Random eviction #YOLO + case -1 => cache(random.nextInt(cache.length)) = Some((key, pickedValue)) + case i => cache(i) = Some((key, pickedValue)) + } + } + + available.release() + result + } + } +} diff --git a/main/core/src/define/Graph.scala b/main/core/src/define/Graph.scala index 3119f2fb..5b29bd7b 100644 --- a/main/core/src/define/Graph.scala +++ b/main/core/src/define/Graph.scala @@ -2,7 +2,7 @@ package mill.define import mill.eval.Tarjans import mill.util.MultiBiMap -import mill.util.Strict.Agg +import mill.api.Strict.Agg object Graph { diff --git a/main/core/src/eval/Evaluator.scala b/main/core/src/eval/Evaluator.scala index 8709064e..dbaf9433 100644 --- a/main/core/src/eval/Evaluator.scala +++ b/main/core/src/eval/Evaluator.scala @@ -10,7 +10,7 @@ import mill.define.{Ctx => _, _} import mill.api.Result.OuterStack import mill.util import mill.util._ -import mill.util.Strict.Agg +import mill.api.Strict.Agg import scala.collection.mutable import scala.util.control.NonFatal diff --git a/main/core/src/util/AggWrapper.scala b/main/core/src/util/AggWrapper.scala deleted file mode 100644 index 6c107875..00000000 --- a/main/core/src/util/AggWrapper.scala +++ /dev/null @@ -1,119 +0,0 @@ -package mill.util - - - -import scala.collection.mutable -object Strict extends AggWrapper(true) -object Loose extends AggWrapper(false) -sealed class AggWrapper(strictUniqueness: Boolean){ - /** - * A collection with enforced uniqueness, fast contains and deterministic - * ordering. Raises an exception if a duplicate is found; call - * `toSeq.distinct` if you explicitly want to make it swallow duplicates - */ - trait Agg[V] extends TraversableOnce[V]{ - def contains(v: V): Boolean - def items: Iterator[V] - def indexed: IndexedSeq[V] - def flatMap[T](f: V => TraversableOnce[T]): Agg[T] - def map[T](f: V => T): Agg[T] - def filter(f: V => Boolean): Agg[V] - def withFilter(f: V => Boolean): Agg[V] - def collect[T](f: PartialFunction[V, T]): Agg[T] - def zipWithIndex: Agg[(V, Int)] - def reverse: Agg[V] - def zip[T](other: Agg[T]): Agg[(V, T)] - def ++[T >: V](other: TraversableOnce[T]): Agg[T] - def length: Int - } - - object Agg{ - def empty[V]: Agg[V] = new Agg.Mutable[V] - implicit def jsonFormat[T: upickle.default.ReadWriter]: upickle.default.ReadWriter[Agg[T]] = - upickle.default.readwriter[Seq[T]].bimap[Agg[T]]( - _.toList, - Agg.from(_) - ) - - def apply[V](items: V*) = from(items) - - implicit def from[V](items: TraversableOnce[V]): Agg[V] = { - val set = new Agg.Mutable[V]() - items.foreach(set.append) - set - } - - - class Mutable[V]() extends Agg[V]{ - - private[this] val set0 = mutable.LinkedHashSet.empty[V] - def contains(v: V) = set0.contains(v) - def append(v: V) = if (!contains(v)){ - set0.add(v) - - }else if (strictUniqueness){ - throw new Exception("Duplicated item inserted into OrderedSet: " + v) - } - def appendAll(vs: Seq[V]) = vs.foreach(append) - def items = set0.iterator - def indexed: IndexedSeq[V] = items.toIndexedSeq - def set: collection.Set[V] = set0 - - def map[T](f: V => T): Agg[T] = { - val output = new Agg.Mutable[T] - for(i <- items) output.append(f(i)) - output - } - def flatMap[T](f: V => TraversableOnce[T]): Agg[T] = { - val output = new Agg.Mutable[T] - for(i <- items) for(i0 <- f(i)) output.append(i0) - output - } - def filter(f: V => Boolean): Agg[V] = { - val output = new Agg.Mutable[V] - for(i <- items) if (f(i)) output.append(i) - output - } - def withFilter(f: V => Boolean): Agg[V] = filter(f) - - def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x)) - - def zipWithIndex = { - var i = 0 - this.map{ x => - i += 1 - (x, i-1) - } - } - - def reverse = Agg.from(indexed.reverseIterator) - - def zip[T](other: Agg[T]) = Agg.from(items.zip(other.items)) - def ++[T >: V](other: TraversableOnce[T]) = Agg.from(items ++ other) - def length: Int = set0.size - - // Members declared in scala.collection.GenTraversableOnce - def isTraversableAgain: Boolean = items.isTraversableAgain - def toIterator: Iterator[V] = items.toIterator - def toStream: Stream[V] = items.toStream - - // Members declared in scala.collection.TraversableOnce - def copyToArray[B >: V](xs: Array[B], start: Int,len: Int): Unit = items.copyToArray(xs, start, len) - def exists(p: V => Boolean): Boolean = items.exists(p) - def find(p: V => Boolean): Option[V] = items.find(p) - def forall(p: V => Boolean): Boolean = items.forall(p) - def foreach[U](f: V => U): Unit = items.foreach(f) - def hasDefiniteSize: Boolean = items.hasDefiniteSize - def isEmpty: Boolean = items.isEmpty - def seq: scala.collection.TraversableOnce[V] = items - def toTraversable: Traversable[V] = items.toTraversable - - override def hashCode() = items.map(_.hashCode()).sum - override def equals(other: Any) = other match{ - case s: Agg[_] => items.sameElements(s.items) - case _ => super.equals(other) - } - override def toString = items.mkString("Agg(", ", ", ")") - } - } -} diff --git a/main/core/src/util/MultiBiMap.scala b/main/core/src/util/MultiBiMap.scala index 73bb42c4..51ea63f2 100644 --- a/main/core/src/util/MultiBiMap.scala +++ b/main/core/src/util/MultiBiMap.scala @@ -1,7 +1,7 @@ package mill.util import scala.collection.mutable -import Strict.Agg +import mill.api.Strict.Agg /** * A map from keys to collections of values: you can assign multiple values diff --git a/main/src/main/ReplApplyHandler.scala b/main/src/main/ReplApplyHandler.scala index a8e467d4..786a1409 100644 --- a/main/src/main/ReplApplyHandler.scala +++ b/main/src/main/ReplApplyHandler.scala @@ -6,7 +6,7 @@ import mill.define.Segment.Label import mill.define._ import mill.eval.{Evaluator, Result} -import mill.util.Strict.Agg +import mill.api.Strict.Agg import scala.collection.mutable object ReplApplyHandler{ diff --git a/main/src/main/RunScript.scala b/main/src/main/RunScript.scala index 47526631..b858c8b9 100644 --- a/main/src/main/RunScript.scala +++ b/main/src/main/RunScript.scala @@ -11,7 +11,7 @@ import mill.define._ import mill.eval.{Evaluator, PathRef, Result} import mill.util.{EitherOps, ParseArgs, Watched} import mill.api.Logger -import mill.util.Strict.Agg +import mill.api.Strict.Agg import scala.collection.mutable import scala.reflect.ClassTag diff --git a/main/src/modules/Jvm.scala b/main/src/modules/Jvm.scala index 1a51ed8b..e17631e3 100644 --- a/main/src/modules/Jvm.scala +++ b/main/src/modules/Jvm.scala @@ -15,7 +15,7 @@ import mill.main.client.InputPumper import mill.eval.{PathRef, Result} import mill.util.Ctx import mill.api.IO -import mill.util.Loose.Agg +import mill.api.Loose.Agg import scala.collection.mutable import scala.collection.JavaConverters._ diff --git a/main/src/modules/Util.scala b/main/src/modules/Util.scala index 2b98a304..029626fe 100644 --- a/main/src/modules/Util.scala +++ b/main/src/modules/Util.scala @@ -3,7 +3,8 @@ package mill.modules import coursier.Repository import mill.api.{PathRef, IO} -import mill.util.{Ctx, Loose} +import mill.util.Ctx +import mill.api.Loose object Util { @@ -55,7 +56,7 @@ object Util { val localPath = sys.props(key) if (localPath != null) { mill.api.Result.Success( - Loose.Agg.from(localPath.split(',').map(p => PathRef(os.Path(p), quick = true))) + mill.api.Loose.Agg.from(localPath.split(',').map(p => PathRef(os.Path(p), quick = true))) ) } else { mill.modules.Jvm.resolveDependencies( diff --git a/main/src/package.scala b/main/src/package.scala index 0ccd094f..6bcb1bdf 100644 --- a/main/src/package.scala +++ b/main/src/package.scala @@ -7,6 +7,6 @@ package object mill extends JsonFormatters{ type PathRef = mill.api.PathRef type Module = define.Module type Cross[T] = define.Cross[T] - type Agg[T] = util.Loose.Agg[T] - val Agg = util.Loose.Agg + type Agg[T] = mill.api.Loose.Agg[T] + val Agg = mill.api.Loose.Agg } diff --git a/main/test/resources/examples/javac/build.sc b/main/test/resources/examples/javac/build.sc index 2ed9f915..17366219 100644 --- a/main/test/resources/examples/javac/build.sc +++ b/main/test/resources/examples/javac/build.sc @@ -2,7 +2,7 @@ import mill.T import mill.eval.JavaCompileJarTests.compileAll import mill.api.PathRef import mill.modules.Jvm -import mill.util.Loose +import mill.api.Loose def sourceRootPath = millSourcePath / 'src def resourceRootPath = millSourcePath / 'resources diff --git a/main/test/src/define/CacherTests.scala b/main/test/src/define/CacherTests.scala index 59ebf3f6..1524e5c1 100644 --- a/main/test/src/define/CacherTests.scala +++ b/main/test/src/define/CacherTests.scala @@ -1,7 +1,7 @@ package mill.define import mill.util.{DummyLogger, TestEvaluator, TestUtil} -import mill.util.Strict.Agg +import mill.api.Strict.Agg import mill.T import mill.api.Result.Success import utest._ diff --git a/main/test/src/define/GraphTests.scala b/main/test/src/define/GraphTests.scala index 224ce59f..b36dbf95 100644 --- a/main/test/src/define/GraphTests.scala +++ b/main/test/src/define/GraphTests.scala @@ -4,7 +4,7 @@ package mill.define import mill.eval.Evaluator import mill.util.{TestGraphs, TestUtil} import utest._ -import mill.util.Strict.Agg +import mill.api.Strict.Agg object GraphTests extends TestSuite{ val tests = Tests{ diff --git a/main/test/src/eval/EvaluationTests.scala b/main/test/src/eval/EvaluationTests.scala index 74f9088c..7f924db2 100644 --- a/main/test/src/eval/EvaluationTests.scala +++ b/main/test/src/eval/EvaluationTests.scala @@ -5,7 +5,7 @@ import mill.util.TestUtil.{Test, test} import mill.define.{Discover, Graph, Target, Task} import mill.{Module, T} import mill.util.{DummyLogger, TestEvaluator, TestGraphs, TestUtil} -import mill.util.Strict.Agg +import mill.api.Strict.Agg import utest._ import utest.framework.TestPath diff --git a/main/test/src/eval/JavaCompileJarTests.scala b/main/test/src/eval/JavaCompileJarTests.scala index 426c6ea6..0f9002df 100644 --- a/main/test/src/eval/JavaCompileJarTests.scala +++ b/main/test/src/eval/JavaCompileJarTests.scala @@ -4,12 +4,13 @@ import mill.define.{Discover, Input, Target, Task} import mill.modules.Jvm import mill.api.Ctx.Dest import mill.{Module, T} -import mill.util.{DummyLogger, Loose, TestEvaluator, TestUtil} -import mill.util.Strict.Agg +import mill.util.{DummyLogger, TestEvaluator, TestUtil} +import mill.api.Strict.Agg +import mill.api.Loose import utest._ import mill._ object JavaCompileJarTests extends TestSuite{ - def compileAll(sources: mill.util.Loose.Agg[PathRef])(implicit ctx: Dest) = { + def compileAll(sources: mill.api.Loose.Agg[PathRef])(implicit ctx: Dest) = { os.makeDir.all(ctx.dest) os.proc("javac", sources.map(_.path.toString()).toSeq, "-d", ctx.dest).call(ctx.dest) diff --git a/main/test/src/util/TestEvaluator.scala b/main/test/src/util/TestEvaluator.scala index 9a235679..81c8fe12 100644 --- a/main/test/src/util/TestEvaluator.scala +++ b/main/test/src/util/TestEvaluator.scala @@ -3,7 +3,7 @@ package mill.util import mill.define.{Input, Target, Task} import mill.api.Result.OuterStack import mill.eval.{Evaluator, Result} -import mill.util.Strict.Agg +import mill.api.Strict.Agg import utest.assert import utest.framework.TestPath diff --git a/main/test/src/util/TestUtil.scala b/main/test/src/util/TestUtil.scala index baab2992..462b7f5c 100644 --- a/main/test/src/util/TestUtil.scala +++ b/main/test/src/util/TestUtil.scala @@ -5,7 +5,7 @@ import mill.define._ import mill.api.Result import mill.api.Result.OuterStack import utest.assert -import mill.util.Strict.Agg +import mill.api.Strict.Agg import utest.framework.TestPath import scala.collection.mutable diff --git a/scalajslib/src/ScalaJSModule.scala b/scalajslib/src/ScalaJSModule.scala index 8568c39b..137e8ee2 100644 --- a/scalajslib/src/ScalaJSModule.scala +++ b/scalajslib/src/ScalaJSModule.scala @@ -7,7 +7,8 @@ import mill.eval.{PathRef, Result} import mill.api.Result.Success import mill.scalalib.Lib.resolveDependencies import mill.scalalib.{DepSyntax, Lib, TestModule, TestRunner} -import mill.util.{Ctx, Loose} +import mill.util.Ctx +import mill.api.Loose import mill.scalajslib.api._ trait ScalaJSModule extends scalalib.ScalaModule { outer => diff --git a/scalalib/api/src/ZincWorkerApi.scala b/scalalib/api/src/ZincWorkerApi.scala index c5230ec5..d42be9f3 100644 --- a/scalalib/api/src/ZincWorkerApi.scala +++ b/scalalib/api/src/ZincWorkerApi.scala @@ -3,14 +3,16 @@ package mill.scalalib.api import mill.api.Loose.Agg import mill.api.PathRef import mill.api.JsonFormatters._ - +object ZincWorkerApi{ + type Ctx = mill.api.Ctx.Dest with mill.api.Ctx.Log with mill.api.Ctx.Home +} trait ZincWorkerApi { /** Compile a Java-only project */ def compileJava(upstreamCompileOutput: Seq[CompilationResult], sources: Agg[os.Path], compileClasspath: Agg[os.Path], javacOptions: Seq[String]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] + (implicit ctx: ZincWorkerApi.Ctx): mill.api.Result[CompilationResult] /** Compile a mixed Scala/Java or Scala-only project */ def compileMixed(upstreamCompileOutput: Seq[CompilationResult], @@ -18,21 +20,21 @@ trait ZincWorkerApi { compileClasspath: Agg[os.Path], javacOptions: Seq[String], scalaVersion: String, + scalaOrganization: String, scalacOptions: Seq[String], - compilerBridgeSources: os.Path, compilerClasspath: Agg[os.Path], scalacPluginClasspath: Agg[os.Path]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] + (implicit ctx: ZincWorkerApi.Ctx): mill.api.Result[CompilationResult] def discoverMainClasses(compilationResult: CompilationResult) - (implicit ctx: mill.api.Ctx): Seq[String] + (implicit ctx: ZincWorkerApi.Ctx): Seq[String] def docJar(scalaVersion: String, - compilerBridgeSources: os.Path, + scalaOrganization: String, compilerClasspath: Agg[os.Path], scalacPluginClasspath: Agg[os.Path], args: Seq[String]) - (implicit ctx: mill.api.Ctx): Boolean + (implicit ctx: ZincWorkerApi.Ctx): Boolean } diff --git a/scalalib/src/GenIdeaImpl.scala b/scalalib/src/GenIdeaImpl.scala index 548b8d4e..b8f9d35e 100644 --- a/scalalib/src/GenIdeaImpl.scala +++ b/scalalib/src/GenIdeaImpl.scala @@ -5,8 +5,8 @@ import coursier.{Cache, CoursierPaths, Repository} import mill.define._ import mill.eval.{Evaluator, PathRef, Result} import mill.api.Ctx.{Home, Log} -import mill.util.Strict.Agg -import mill.util.{Loose, Strict} +import mill.api.Strict.Agg +import mill.api.{Loose, Strict} import mill.{T, scalalib} import scala.util.Try diff --git a/scalalib/src/JavaModule.scala b/scalalib/src/JavaModule.scala index 78be8893..72c0a5a6 100644 --- a/scalalib/src/JavaModule.scala +++ b/scalalib/src/JavaModule.scala @@ -9,7 +9,7 @@ import mill.modules.{Assembly, Jvm} import mill.modules.Jvm.{createAssembly, createJar} import Lib._ import mill.scalalib.publish.{Artifact, Scope} -import mill.util.Loose.Agg +import mill.api.Loose.Agg /** * Core configuration required to compile a single Scala compilation target diff --git a/scalalib/src/MiscModule.scala b/scalalib/src/MiscModule.scala index c6449d6e..bf64f1f3 100644 --- a/scalalib/src/MiscModule.scala +++ b/scalalib/src/MiscModule.scala @@ -4,7 +4,7 @@ package scalalib import mill.define.Cross.Resolver import mill.define.{Cross, Task} import mill.eval.{PathRef, Result} -import mill.util.Loose.Agg +import mill.api.Loose.Agg object CrossModuleBase{ def scalaVersionPaths(scalaVersion: String, f: String => os.Path) = { for(segments <- scalaVersion.split('.').inits.filter(_.nonEmpty)) diff --git a/scalalib/src/ScalaModule.scala b/scalalib/src/ScalaModule.scala index 9d669bf4..5fad1664 100644 --- a/scalalib/src/ScalaModule.scala +++ b/scalalib/src/ScalaModule.scala @@ -8,7 +8,7 @@ import mill.modules.Jvm import mill.modules.Jvm.createJar import mill.scalalib.api.Util.isDotty import Lib._ -import mill.util.Loose.Agg +import mill.api.Loose.Agg import mill.api.DummyInputStream /** @@ -79,36 +79,7 @@ trait ScalaModule extends JavaModule { outer => def scalaDocOptions = T{ scalacOptions() } - private val Milestone213 = raw"""2.13.(\d+)-M(\d+)""".r - def scalaCompilerBridgeSources = T { - val (scalaVersion0, scalaBinaryVersion0) = scalaVersion() match { - case Milestone213(_, _) => ("2.13.0-M2", "2.13.0-M2") - case _ => (scalaVersion(), mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion())) - } - - val (bridgeDep, bridgeName, bridgeVersion) = - if (isDotty(scalaVersion0)) { - val org = scalaOrganization() - val name = "dotty-sbt-bridge" - val version = scalaVersion() - (ivy"$org:$name:$version", name, version) - } else { - val org = "org.scala-sbt" - val name = "compiler-bridge" - val version = Versions.zinc - (ivy"$org::$name:$version", s"${name}_$scalaBinaryVersion0", version) - } - - resolveDependencies( - repositories, - Lib.depToDependency(_, scalaVersion0, platformSuffix()), - Seq(bridgeDep), - sources = true - ).map(deps => - mill.scalalib.api.Util.grepJar(deps.map(_.path), bridgeName, bridgeVersion, sources = true) - ) - } /** * The local classpath of Scala compiler plugins on-disk; you can add @@ -159,8 +130,8 @@ trait ScalaModule extends JavaModule { outer => compileClasspath().map(_.path), javacOptions(), scalaVersion(), + scalaOrganization(), scalacOptions(), - scalaCompilerBridgeSources(), scalaCompilerClasspath().map(_.path), scalacPluginClasspath().map(_.path), ) @@ -187,7 +158,7 @@ trait ScalaModule extends JavaModule { outer => else { zincWorker.worker().docJar( scalaVersion(), - scalaCompilerBridgeSources(), + scalaOrganization(), scalaCompilerClasspath().map(_.path), scalacPluginClasspath().map(_.path), files ++ options diff --git a/scalalib/src/ZincWorkerModule.scala b/scalalib/src/ZincWorkerModule.scala index 5ca824ce..97d84aaf 100644 --- a/scalalib/src/ZincWorkerModule.scala +++ b/scalalib/src/ZincWorkerModule.scala @@ -4,9 +4,12 @@ import coursier.Cache import coursier.maven.MavenRepository import mill.Agg import mill.T +import mill.api.KeyedLockedCache import mill.define.{Discover, Worker} import mill.scalalib.Lib.resolveDependencies -import mill.util.Loose +import mill.scalalib.api.Util.isDotty +import mill.scalalib.api.ZincWorkerApi +import mill.api.Loose import mill.util.JsonFormatters._ object ZincWorkerModule extends mill.define.ExternalModule with ZincWorkerModule{ @@ -40,11 +43,61 @@ trait ZincWorkerModule extends mill.Module{ getClass.getClassLoader ) val cls = cl.loadClass("mill.scalalib.worker.ZincWorkerImpl") - val instance = cls.getConstructor(classOf[mill.api.Ctx], classOf[Array[String]]) - .newInstance(T.ctx(), compilerInterfaceClasspath().map(_.path.toString).toArray[String]) + val instance = cls.getConstructor( + classOf[ + Either[ + (ZincWorkerApi.Ctx, Array[os.Path], (String, String) => os.Path), + String => os.Path + ] + ], + classOf[(Agg[os.Path], String) => os.Path], + classOf[(Agg[os.Path], String) => os.Path], + classOf[KeyedLockedCache[_]] + ) + .newInstance( + Left(( + T.ctx(), + compilerInterfaceClasspath().map(_.path).toArray, + (x: String, y: String) => scalaCompilerBridgeSourceJar(x, y).asSuccess.get.value + )), + mill.scalalib.api.Util.grepJar(_, "scala-library", _, sources = false), + mill.scalalib.api.Util.grepJar(_, "scala-compiler", _, sources = false), + new KeyedLockedCache.RandomBoundedCache(1, 1) + ) instance.asInstanceOf[mill.scalalib.api.ZincWorkerApi] } + private val Milestone213 = raw"""2.13.(\d+)-M(\d+)""".r + def scalaCompilerBridgeSourceJar(scalaVersion: String, + scalaOrganization: String) = { + val (scalaVersion0, scalaBinaryVersion0) = scalaVersion match { + case Milestone213(_, _) => ("2.13.0-M2", "2.13.0-M2") + case _ => (scalaVersion, mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion)) + } + + val (bridgeDep, bridgeName, bridgeVersion) = + if (isDotty(scalaVersion0)) { + val org = scalaOrganization + val name = "dotty-sbt-bridge" + val version = scalaVersion + (ivy"$org:$name:$version", name, version) + } else { + val org = "org.scala-sbt" + val name = "compiler-bridge" + val version = Versions.zinc + (ivy"$org::$name:$version", s"${name}_$scalaBinaryVersion0", version) + } + + resolveDependencies( + repositories, + Lib.depToDependency(_, scalaVersion0, ""), + Seq(bridgeDep), + sources = true + ).map(deps => + mill.scalalib.api.Util.grepJar(deps.map(_.path), bridgeName, bridgeVersion, sources = true) + ) + } + def compilerInterfaceClasspath = T{ resolveDependencies( repositories, diff --git a/scalalib/src/dependency/versions/VersionsFinder.scala b/scalalib/src/dependency/versions/VersionsFinder.scala index a831ffc3..a9ecc763 100644 --- a/scalalib/src/dependency/versions/VersionsFinder.scala +++ b/scalalib/src/dependency/versions/VersionsFinder.scala @@ -5,7 +5,7 @@ import mill.eval.Evaluator import mill.scalalib.dependency.metadata.MetadataLoaderFactory import mill.scalalib.{Dep, JavaModule, Lib} import mill.api.Ctx.{Home, Log} -import mill.util.{Loose, Strict} +import mill.api.{Loose, Strict} private[dependency] object VersionsFinder { diff --git a/scalalib/src/publish/Ivy.scala b/scalalib/src/publish/Ivy.scala index 22e26ff6..e06efadd 100644 --- a/scalalib/src/publish/Ivy.scala +++ b/scalalib/src/publish/Ivy.scala @@ -1,6 +1,6 @@ package mill.scalalib.publish -import mill.util.Loose.Agg +import mill.api.Loose.Agg import scala.xml.PrettyPrinter diff --git a/scalalib/src/publish/Pom.scala b/scalalib/src/publish/Pom.scala index 57a0e196..a7f1f6fc 100644 --- a/scalalib/src/publish/Pom.scala +++ b/scalalib/src/publish/Pom.scala @@ -1,6 +1,6 @@ package mill.scalalib.publish -import mill.util.Loose.Agg +import mill.api.Loose.Agg import scala.xml.{Atom, Elem, NodeSeq, PrettyPrinter} diff --git a/scalalib/test/src/ResolveDepsTests.scala b/scalalib/test/src/ResolveDepsTests.scala index 78361625..ce905907 100644 --- a/scalalib/test/src/ResolveDepsTests.scala +++ b/scalalib/test/src/ResolveDepsTests.scala @@ -4,7 +4,7 @@ import coursier.Cache import coursier.maven.MavenRepository import mill.api.Result.{Failure, Success} import mill.eval.{PathRef, Result} -import mill.util.Loose.Agg +import mill.api.Loose.Agg import utest._ object ResolveDepsTests extends TestSuite { diff --git a/scalalib/worker/src/ZincWorkerImpl.scala b/scalalib/worker/src/ZincWorkerImpl.scala index 705d4682..c37ef162 100644 --- a/scalalib/worker/src/ZincWorkerImpl.scala +++ b/scalalib/worker/src/ZincWorkerImpl.scala @@ -4,13 +4,13 @@ import java.io.File import java.util.Optional import mill.api.Loose.Agg -import mill.api.PathRef +import mill.api.{KeyedLockedCache, PathRef} import xsbti.compile.{CompilerCache => _, FileAnalysisStore => _, ScalaInstance => _, _} -import mill.scalalib.api.Util.{isDotty, grepJar, scalaBinaryVersion} +import mill.scalalib.api.Util.{grepJar, isDotty, scalaBinaryVersion} import sbt.internal.inc._ import sbt.internal.util.{ConsoleOut, MainAppender} import sbt.util.LogExchange -import mill.scalalib.api.CompilationResult +import mill.scalalib.api.{CompilationResult, ZincWorkerApi} case class MockedLookup(am: File => Optional[CompileAnalysis]) extends PerClasspathEntryLookup { override def analysis(classpathEntry: File): Optional[CompileAnalysis] = am(classpathEntry) @@ -19,10 +19,16 @@ case class MockedLookup(am: File => Optional[CompileAnalysis]) extends PerClassp Locate.definesClass(classpathEntry) } -class ZincWorkerImpl(ctx0: mill.api.Ctx, - compilerBridgeClasspath: Array[String]) extends mill.scalalib.api.ZincWorkerApi{ +class ZincWorkerImpl(compilerBridge: Either[ + (ZincWorkerApi.Ctx, Array[os.Path], (String, String) => os.Path), + String => os.Path + ], + libraryJarNameGrep: (Agg[os.Path], String) => os.Path, + compilerJarNameGrep: (Agg[os.Path], String) => os.Path, + compilerCache: KeyedLockedCache[Compilers]) + extends ZincWorkerApi{ private val ic = new sbt.internal.inc.IncrementalCompilerImpl() - val javaOnlyCompilers = { + lazy val javaOnlyCompilers = { // Keep the classpath as written by the user val classpathOptions = ClasspathOptions.of(false, false, false, false, false) @@ -42,68 +48,68 @@ class ZincWorkerImpl(ctx0: mill.api.Ctx, ) } - @volatile var mixedCompilersCache = Option.empty[(Long, Compilers)] - def docJar(scalaVersion: String, - compilerBridgeSources: os.Path, + scalaOrganization: String, compilerClasspath: Agg[os.Path], scalacPluginClasspath: Agg[os.Path], args: Seq[String]) - (implicit ctx: mill.api.Ctx): Boolean = { - val compilers: Compilers = prepareCompilers( + (implicit ctx: ZincWorkerApi.Ctx): Boolean = { + withCompilers( scalaVersion, - compilerBridgeSources, + scalaOrganization, compilerClasspath, - scalacPluginClasspath - ) - val scaladocClass = compilers.scalac().scalaInstance().loader().loadClass("scala.tools.nsc.ScalaDoc") - val scaladocMethod = scaladocClass.getMethod("process", classOf[Array[String]]) - scaladocMethod.invoke(scaladocClass.newInstance(), args.toArray).asInstanceOf[Boolean] + scalacPluginClasspath, + ) { compilers: Compilers => + val scaladocClass = compilers.scalac().scalaInstance().loader().loadClass("scala.tools.nsc.ScalaDoc") + val scaladocMethod = scaladocClass.getMethod("process", classOf[Array[String]]) + scaladocMethod.invoke(scaladocClass.newInstance(), args.toArray).asInstanceOf[Boolean] + } } /** Compile the bridge if it doesn't exist yet and return the output directory. - * TODO: Proper invalidation, see #389 - */ - def compileZincBridgeIfNeeded(scalaVersion: String, - sourcesJar: os.Path, - compilerJars: Array[File]): os.Path = { - val workingDir = ctx0.dest / scalaVersion - val compiledDest = workingDir / 'compiled - if (!os.exists(workingDir)) { - - ctx0.log.info("Compiling compiler interface...") - - os.makeDir.all(workingDir) - os.makeDir.all(compiledDest) - - val sourceFolder = mill.api.IO.unpackZip(sourcesJar)(workingDir) - val classloader = mill.api.ClassLoader.create(compilerJars.map(_.toURI.toURL), null)(ctx0) - val compilerMain = classloader.loadClass( - if (isDotty(scalaVersion)) - "dotty.tools.dotc.Main" - else - "scala.tools.nsc.Main" - ) - val argsArray = Array[String]( - "-d", compiledDest.toString, - "-classpath", (compilerJars ++ compilerBridgeClasspath).mkString(File.pathSeparator) - ) ++ os.walk(sourceFolder.path).filter(_.ext == "scala").map(_.toString) - - compilerMain.getMethod("process", classOf[Array[String]]) - .invoke(null, argsArray) + * TODO: Proper invalidation, see #389 + */ + def compileZincBridgeIfNeeded(scalaVersion: String, scalaOrganization: String, compilerJars: Array[File]): os.Path = { + compilerBridge match{ + case Right(compiled) => compiled(scalaVersion) + case Left((ctx0, compilerBridgeClasspath, srcJars)) => + val workingDir = ctx0.dest / scalaVersion + val compiledDest = workingDir / 'compiled + if (!os.exists(workingDir)) { + ctx0.log.info("Compiling compiler interface...") + + os.makeDir.all(workingDir) + os.makeDir.all(compiledDest) + + val sourceFolder = mill.api.IO.unpackZip(srcJars(scalaVersion, scalaOrganization))(workingDir) + val classloader = mill.api.ClassLoader.create(compilerJars.map(_.toURI.toURL), null)(ctx0) + val compilerMain = classloader.loadClass( + if (isDotty(scalaVersion)) "dotty.tools.dotc.Main" + else "scala.tools.nsc.Main" + ) + val argsArray = Array[String]( + "-d", compiledDest.toString, + "-classpath", (compilerJars ++ compilerBridgeClasspath).mkString(File.pathSeparator) + ) ++ os.walk(sourceFolder.path).filter(_.ext == "scala").map(_.toString) + + compilerMain.getMethod("process", classOf[Array[String]]) + .invoke(null, argsArray) + } + compiledDest } - compiledDest + } - def discoverMainClasses(compilationResult: CompilationResult)(implicit ctx: mill.api.Ctx): Seq[String] = { + def discoverMainClasses(compilationResult: CompilationResult) + (implicit ctx: ZincWorkerApi.Ctx): Seq[String] = { def toScala[A](o: Optional[A]): Option[A] = if (o.isPresent) Some(o.get) else None toScala(FileAnalysisStore.binary(compilationResult.analysisFile.toIO).get()) .map(_.getAnalysis) .flatMap{ case analysis: Analysis => - Some(analysis.infos.allInfos.values.map(_.getMainClasses).flatten.toSeq.sorted) + Some(analysis.infos.allInfos.values.flatMap(_.getMainClasses).toSeq.sorted) case _ => None } @@ -114,7 +120,7 @@ class ZincWorkerImpl(ctx0: mill.api.Ctx, sources: Agg[os.Path], compileClasspath: Agg[os.Path], javacOptions: Seq[String]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { + (implicit ctx: ZincWorkerApi.Ctx): mill.api.Result[CompilationResult] = { compileInternal( upstreamCompileOutput, sources, @@ -130,73 +136,70 @@ class ZincWorkerImpl(ctx0: mill.api.Ctx, compileClasspath: Agg[os.Path], javacOptions: Seq[String], scalaVersion: String, + scalaOrganization: String, scalacOptions: Seq[String], - compilerBridgeSources: os.Path, compilerClasspath: Agg[os.Path], scalacPluginClasspath: Agg[os.Path]) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { - val compilers: Compilers = prepareCompilers( + (implicit ctx: ZincWorkerApi.Ctx): mill.api.Result[CompilationResult] = { + withCompilers( scalaVersion, - compilerBridgeSources, + scalaOrganization, compilerClasspath, - scalacPluginClasspath - ) - - compileInternal( - upstreamCompileOutput, - sources, - compileClasspath, - javacOptions, - scalacOptions = scalacPluginClasspath.map(jar => s"-Xplugin:${jar}").toSeq ++ scalacOptions, - compilers - ) + scalacPluginClasspath, + ) {compilers: Compilers => + compileInternal( + upstreamCompileOutput, + sources, + compileClasspath, + javacOptions, + scalacOptions = scalacPluginClasspath.map(jar => s"-Xplugin:$jar").toSeq ++ scalacOptions, + compilers + ) + } } - private def prepareCompilers(scalaVersion: String, - compilerBridgeSources: os.Path, + private def withCompilers[T](scalaVersion: String, + scalaOrganization: String, compilerClasspath: Agg[os.Path], scalacPluginClasspath: Agg[os.Path]) - (implicit ctx: mill.api.Ctx)= { + (f: Compilers => T) + (implicit ctx: ZincWorkerApi.Ctx)= { val combinedCompilerClasspath = compilerClasspath ++ scalacPluginClasspath val combinedCompilerJars = combinedCompilerClasspath.toArray.map(_.toIO) - val compilerBridge = compileZincBridgeIfNeeded( + val compiledCompilerBridge = compileZincBridgeIfNeeded( scalaVersion, - compilerBridgeSources, + scalaOrganization, compilerClasspath.toArray.map(_.toIO) ) - val compilerBridgeSig = os.mtime(compilerBridge) + + val compilerBridgeSig = os.mtime(compiledCompilerBridge) val compilersSig = compilerBridgeSig + combinedCompilerClasspath.map(p => p.toString().hashCode + os.mtime(p)).sum - val compilers = mixedCompilersCache match { - case Some((k, v)) if k == compilersSig => v - case _ => - val compilerName = - if (isDotty(scalaVersion)) - s"dotty-compiler_${scalaBinaryVersion(scalaVersion)}" - else - "scala-compiler" - val scalaInstance = new ScalaInstance( - version = scalaVersion, - loader = mill.api.ClassLoader.create(combinedCompilerJars.map(_.toURI.toURL), null), - libraryJar = grepJar(compilerClasspath, "scala-library", scalaVersion).toIO, - compilerJar = grepJar(compilerClasspath, compilerName, scalaVersion).toIO, - allJars = combinedCompilerJars, - explicitActual = None - ) - val compilers = ic.compilers( - scalaInstance, - ClasspathOptionsUtil.boot, - None, - ZincUtil.scalaCompiler(scalaInstance, compilerBridge.toIO) - ) - mixedCompilersCache = Some((compilersSig, compilers)) - compilers - } - compilers + compilerCache.withCachedValue(compilersSig){ + val compilerJar = + if (isDotty(scalaVersion)) + grepJar(compilerClasspath, s"dotty-compiler_${scalaBinaryVersion(scalaVersion)}", scalaVersion) + else + compilerJarNameGrep(compilerClasspath, scalaVersion) + val scalaInstance = new ScalaInstance( + version = scalaVersion, + loader = mill.api.ClassLoader.create(combinedCompilerJars.map(_.toURI.toURL), null), + libraryJar = libraryJarNameGrep(compilerClasspath, scalaVersion).toIO, + compilerJar = compilerJar.toIO, + allJars = combinedCompilerJars, + explicitActual = None + ) + ic.compilers( + scalaInstance, + ClasspathOptionsUtil.boot, + None, + ZincUtil.scalaCompiler(scalaInstance, compiledCompilerBridge.toIO) + ) + }(f) } private def compileInternal(upstreamCompileOutput: Seq[CompilationResult], @@ -205,7 +208,7 @@ class ZincWorkerImpl(ctx0: mill.api.Ctx, javacOptions: Seq[String], scalacOptions: Seq[String], compilers: Compilers) - (implicit ctx: mill.api.Ctx): mill.api.Result[CompilationResult] = { + (implicit ctx: ZincWorkerApi.Ctx): mill.api.Result[CompilationResult] = { os.makeDir.all(ctx.dest) val logger = { diff --git a/scalanativelib/src/ScalaNativeModule.scala b/scalanativelib/src/ScalaNativeModule.scala index a7a2b96b..289ba759 100644 --- a/scalanativelib/src/ScalaNativeModule.scala +++ b/scalanativelib/src/ScalaNativeModule.scala @@ -9,7 +9,7 @@ import mill.define.{Target, Task} import mill.api.Result import mill.modules.Jvm import mill.scalalib.{Dep, DepSyntax, Lib, SbtModule, ScalaModule, TestModule, TestRunner} -import mill.util.Loose.Agg +import mill.api.Loose.Agg import sbt.testing.{AnnotatedFingerprint, SubclassFingerprint} import sbt.testing.Fingerprint import upickle.default.{ReadWriter => RW, macroRW} -- cgit v1.2.3