From b9b2806c1faf208d96cfe9c610e5461ec0e74471 Mon Sep 17 00:00:00 2001 From: Ammonite Travis Bot Date: Tue, 24 Sep 2019 12:25:17 +0000 Subject: first commit --- VisualizeCompile.svg | 361 ++++++++++++++++++ VisualizeCore.svg | 805 +++++++++++++++++++++++++++++++++++++++ VisualizePlan.svg | 355 +++++++++++++++++ example-1.zip | Bin 0 -> 1039 bytes example-2.zip | Bin 0 -> 1219 bytes favicon.ico | Bin 0 -> 206 bytes index.html | 628 ++++++++++++++++++++++++++++++ logo-white.svg | 1 + page/common-project-layouts.html | 355 +++++++++++++++++ page/configuring-mill.html | 447 ++++++++++++++++++++++ page/contrib-modules.html | 649 +++++++++++++++++++++++++++++++ page/cross-builds.html | 220 +++++++++++ page/extending-mill.html | 210 ++++++++++ page/mill-internals.html | 264 +++++++++++++ page/modules.html | 221 +++++++++++ page/tasks.html | 324 ++++++++++++++++ page/thirdparty-modules.html | 351 +++++++++++++++++ 17 files changed, 5191 insertions(+) create mode 100644 VisualizeCompile.svg create mode 100644 VisualizeCore.svg create mode 100644 VisualizePlan.svg create mode 100644 example-1.zip create mode 100644 example-2.zip create mode 100644 favicon.ico create mode 100644 index.html create mode 100644 logo-white.svg create mode 100644 page/common-project-layouts.html create mode 100644 page/configuring-mill.html create mode 100644 page/contrib-modules.html create mode 100644 page/cross-builds.html create mode 100644 page/extending-mill.html create mode 100644 page/mill-internals.html create mode 100644 page/modules.html create mode 100644 page/tasks.html create mode 100644 page/thirdparty-modules.html diff --git a/VisualizeCompile.svg b/VisualizeCompile.svg new file mode 100644 index 00000000..a74cdb42 --- /dev/null +++ b/VisualizeCompile.svg @@ -0,0 +1,361 @@ + + +example1 + + + +moduledefs.compile + +moduledefs.compile + + + +main.client.compile + +main.client.compile + + + +testng.compile + +testng.compile + + + +core.compile + +core.compile + + + +core.compile->moduledefs.compile + + + + + +main.compile + +main.compile + + + +main.compile->main.client.compile + + + + + +main.compile->core.compile + + + + + +main.test.compile + +main.test.compile + + + +main.test.compile->main.compile + + + + + +main.client.test.compile + +main.client.test.compile + + + +main.client.test.compile->main.client.compile + + + + + +scalalib.compile + +scalalib.compile + + + +scalalib.compile->main.compile + + + + + +main.graphviz.compile + +main.graphviz.compile + + + +main.graphviz.compile->scalalib.compile + + + + + +main.graphviz.test.compile + +main.graphviz.test.compile + + + +main.graphviz.test.compile->main.test.compile + + + + + +main.graphviz.test.compile->main.graphviz.compile + + + + + +scalalib.test.compile + +scalalib.test.compile + + + +scalalib.test.compile->main.test.compile + + + + + +scalalib.test.compile->scalalib.compile + + + + + +scalalib.worker.compile + +scalalib.worker.compile + + + +scalalib.worker.compile->scalalib.compile + + + + + +scalalib.worker.test.compile + +scalalib.worker.test.compile + + + +scalalib.worker.test.compile->main.test.compile + + + + + +scalalib.worker.test.compile->scalalib.worker.compile + + + + + +core.test.compile + +core.test.compile + + + +core.test.compile->main.test.compile + + + + + +scalajslib.compile + +scalajslib.compile + + + +scalajslib.compile->scalalib.compile + + + + + +dev.compile + +dev.compile + + + +dev.compile->scalajslib.compile + + + + + +dev.test.compile + +dev.test.compile + + + +dev.test.compile->main.test.compile + + + + + +dev.test.compile->dev.compile + + + + + +scalajslib.test.compile + +scalajslib.test.compile + + + +scalajslib.test.compile->main.test.compile + + + + + +scalajslib.test.compile->scalajslib.compile + + + + + +scalajslib.worker[0.6].compile + +scalajslib.worker[0.6].compile + + + +scalajslib.worker[0.6].compile->scalajslib.compile + + + + + +scalajslib.worker[0.6].test.compile + +scalajslib.worker[0.6].test.compile + + + +scalajslib.worker[0.6].test.compile->main.test.compile + + + + + +scalajslib.worker[0.6].test.compile->scalajslib.worker[0.6].compile + + + + + +scalajslib.worker[1.0].compile + +scalajslib.worker[1.0].compile + + + +scalajslib.worker[1.0].compile->scalajslib.compile + + + + + +scalajslib.worker[1.0].test.compile + +scalajslib.worker[1.0].test.compile + + + +scalajslib.worker[1.0].test.compile->main.test.compile + + + + + +scalajslib.worker[1.0].test.compile->scalajslib.worker[1.0].compile + + + + + +twirllib.compile + +twirllib.compile + + + +twirllib.compile->scalalib.compile + + + + + +twirllib.test.compile + +twirllib.test.compile + + + +twirllib.test.compile->main.test.compile + + + + + +twirllib.test.compile->twirllib.compile + + + + + +integration.compile + +integration.compile + + + +integration.compile->scalajslib.compile + + + + + +integration.test.compile + +integration.test.compile + + + +integration.test.compile->main.test.compile + + + + + +integration.test.compile->integration.compile + + + + + diff --git a/VisualizeCore.svg b/VisualizeCore.svg new file mode 100644 index 00000000..272fe080 --- /dev/null +++ b/VisualizeCore.svg @@ -0,0 +1,805 @@ + + +example1 + + + +core.scalaVersion + +core.scalaVersion + + + +core.sources + +core.sources + + + +core.generatedSources + +core.generatedSources + + + +core.platformSuffix + +core.platformSuffix + + + +core.transitiveLocalClasspath + +core.transitiveLocalClasspath + + + +core.resources + +core.resources + + + +core.unmanagedClasspath + +core.unmanagedClasspath + + + +core.ivyDeps + +core.ivyDeps + + + +core.scalacOptions + +core.scalacOptions + + + +core.scalacPluginIvyDeps + +core.scalacPluginIvyDeps + + + +core.javacOptions + +core.javacOptions + + + +core.upstreamCompileOutput + +core.upstreamCompileOutput + + + +core.mainClass + +core.mainClass + + + +core.artifactName + +core.artifactName + + + +core.crossFullScalaVersion + +core.crossFullScalaVersion + + + +core.publishVersion + +core.publishVersion + + + +core.runIvyDeps + +core.runIvyDeps + + + +core.forkArgs + +core.forkArgs + + + +core.forkEnv + +core.forkEnv + + + +core.testArgs + +core.testArgs + + + +core.allSources + +core.allSources + + + +core.allSources->core.sources + + + + + +core.allSources->core.generatedSources + + + + + +core.allSourceFiles + +core.allSourceFiles + + + +core.allSourceFiles->core.allSources + + + + + +core.scalaCompilerBridgeSources + +core.scalaCompilerBridgeSources + + + +core.scalaCompilerBridgeSources->core.scalaVersion + + + + + +core.scalaCompilerBridgeSources->core.platformSuffix + + + + + +core.compileIvyDeps + +core.compileIvyDeps + + + +core.compileIvyDeps->core.scalaVersion + + + + + +core.scalaLibraryIvyDeps + +core.scalaLibraryIvyDeps + + + +core.scalaLibraryIvyDeps->core.scalaVersion + + + + + +core.transitiveIvyDeps + +core.transitiveIvyDeps + + + +core.transitiveIvyDeps->core.ivyDeps + + + + + +core.compileClasspath + +core.compileClasspath + + + +core.compileClasspath->core.platformSuffix + + + + + +core.compileClasspath->core.transitiveLocalClasspath + + + + + +core.compileClasspath->core.resources + + + + + +core.compileClasspath->core.unmanagedClasspath + + + + + +core.compileClasspath->core.compileIvyDeps + + + + + +core.compileClasspath->core.scalaLibraryIvyDeps + + + + + +core.compileClasspath->core.transitiveIvyDeps + + + + + +core.scalaCompilerClasspath + +core.scalaCompilerClasspath + + + +core.scalaCompilerClasspath->core.scalaVersion + + + + + +core.scalaCompilerClasspath->core.platformSuffix + + + + + +core.scalacPluginClasspath + +core.scalacPluginClasspath + + + +core.scalacPluginClasspath->core.scalaVersion + + + + + +core.scalacPluginClasspath->core.platformSuffix + + + + + +core.scalacPluginClasspath->core.scalacPluginIvyDeps + + + + + +core.compile + +core.compile + + + +core.compile->core.scalacOptions + + + + + +core.compile->core.javacOptions + + + + + +core.compile->core.upstreamCompileOutput + + + + + +core.compile->core.allSourceFiles + + + + + +core.compile->core.scalaCompilerBridgeSources + + + + + +core.compile->core.compileClasspath + + + + + +core.compile->core.scalaCompilerClasspath + + + + + +core.compile->core.scalacPluginClasspath + + + + + +core.localClasspath + +core.localClasspath + + + +core.localClasspath->core.compile + + + + + +core.jar + +core.jar + + + +core.jar->core.mainClass + + + + + +core.jar->core.localClasspath + + + + + +core.pomSettings + +core.pomSettings + + + +core.pomSettings->core.artifactName + + + + + +core.artifactScalaVersion + +core.artifactScalaVersion + + + +core.artifactScalaVersion->core.scalaVersion + + + + + +core.artifactScalaVersion->core.crossFullScalaVersion + + + + + +core.artifactSuffix + +core.artifactSuffix + + + +core.artifactSuffix->core.artifactScalaVersion + + + + + +core.artifactId + +core.artifactId + + + +core.artifactId->core.artifactName + + + + + +core.artifactId->core.artifactSuffix + + + + + +core.artifactMetadata + +core.artifactMetadata + + + +core.artifactMetadata->core.publishVersion + + + + + +core.artifactMetadata->core.pomSettings + + + + + +core.artifactMetadata->core.artifactId + + + + + +core.pom + +core.pom + + + +core.pom->core.ivyDeps + + + + + +core.pom->core.artifactMetadata + + + + + +core.ivy + +core.ivy + + + +core.ivy->core.ivyDeps + + + + + +core.ivy->core.artifactMetadata + + + + + +core.upstreamAssemblyClasspath + +core.upstreamAssemblyClasspath + + + +core.upstreamAssemblyClasspath->core.platformSuffix + + + + + +core.upstreamAssemblyClasspath->core.transitiveLocalClasspath + + + + + +core.upstreamAssemblyClasspath->core.unmanagedClasspath + + + + + +core.upstreamAssemblyClasspath->core.runIvyDeps + + + + + +core.upstreamAssemblyClasspath->core.scalaLibraryIvyDeps + + + + + +core.upstreamAssemblyClasspath->core.transitiveIvyDeps + + + + + +core.runClasspath + +core.runClasspath + + + +core.runClasspath->core.localClasspath + + + + + +core.runClasspath->core.upstreamAssemblyClasspath + + + + + +core.finalMainClassOpt + +core.finalMainClassOpt + + + +core.finalMainClassOpt->core.mainClass + + + + + +core.finalMainClassOpt->core.compile + + + + + +core.docJar + +core.docJar + + + +core.docJar->core.scalacOptions + + + + + +core.docJar->core.allSources + + + + + +core.docJar->core.compileClasspath + + + + + +core.docJar->core.scalaCompilerClasspath + + + + + +core.docJar->core.scalacPluginClasspath + + + + + +core.ammoniteReplClasspath + +core.ammoniteReplClasspath + + + +core.ammoniteReplClasspath->core.runIvyDeps + + + + + +core.ammoniteReplClasspath->core.localClasspath + + + + + +core.publishSelfDependency + +core.publishSelfDependency + + + +core.publishSelfDependency->core.publishVersion + + + + + +core.publishSelfDependency->core.pomSettings + + + + + +core.publishSelfDependency->core.artifactId + + + + + +core.sourceJar + +core.sourceJar + + + +core.sourceJar->core.resources + + + + + +core.sourceJar->core.allSources + + + + + +core.publishArtifacts + +core.publishArtifacts + + + +core.publishArtifacts->core.jar + + + + + +core.publishArtifacts->core.pom + + + + + +core.publishArtifacts->core.docJar + + + + + +core.publishArtifacts->core.sourceJar + + + + + +core.finalMainClass + +core.finalMainClass + + + +core.finalMainClass->core.finalMainClassOpt + + + + + +core.prependShellScript + +core.prependShellScript + + + +core.prependShellScript->core.mainClass + + + + + +core.prependShellScript->core.forkArgs + + + + + +core.upstreamAssembly + +core.upstreamAssembly + + + +core.upstreamAssembly->core.mainClass + + + + + +core.upstreamAssembly->core.upstreamAssemblyClasspath + + + + + +core.assembly + +core.assembly + + + +core.assembly->core.localClasspath + + + + + +core.assembly->core.prependShellScript + + + + + +core.assembly->core.upstreamAssembly + + + + + +core.launcher + +core.launcher + + + +core.launcher->core.forkArgs + + + + + +core.launcher->core.runClasspath + + + + + +core.launcher->core.finalMainClass + + + + + diff --git a/VisualizePlan.svg b/VisualizePlan.svg new file mode 100644 index 00000000..9f13384f --- /dev/null +++ b/VisualizePlan.svg @@ -0,0 +1,355 @@ + + +example1 + + + +mill.scalalib.ZincWorkerModule.classpath + +mill.scalalib.ZincWorkerModule.classpath + + + +mill.scalalib.ZincWorkerModule.compilerInterfaceClasspath + +mill.scalalib.ZincWorkerModule.compilerInterfaceClasspath + + + +mill.scalalib.ZincWorkerModule.worker + +mill.scalalib.ZincWorkerModule.worker + + + +mill.scalalib.ZincWorkerModule.worker->mill.scalalib.ZincWorkerModule.classpath + + + + + +mill.scalalib.ZincWorkerModule.worker->mill.scalalib.ZincWorkerModule.compilerInterfaceClasspath + + + + + +moduledefs.scalaVersion + +moduledefs.scalaVersion + + + +moduledefs.sources + +moduledefs.sources + + + +moduledefs.generatedSources + +moduledefs.generatedSources + + + +moduledefs.allSources + +moduledefs.allSources + + + +moduledefs.allSources->moduledefs.sources + + + + + +moduledefs.allSources->moduledefs.generatedSources + + + + + +moduledefs.allSourceFiles + +moduledefs.allSourceFiles + + + +moduledefs.allSourceFiles->moduledefs.allSources + + + + + +moduledefs.scalaOrganization + +moduledefs.scalaOrganization + + + +moduledefs.scalaOrganization->moduledefs.scalaVersion + + + + + +moduledefs.platformSuffix + +moduledefs.platformSuffix + + + +moduledefs.scalaCompilerBridgeSources + +moduledefs.scalaCompilerBridgeSources + + + +moduledefs.scalaCompilerBridgeSources->moduledefs.scalaOrganization + + + + + +moduledefs.scalaCompilerBridgeSources->moduledefs.platformSuffix + + + + + +moduledefs.transitiveLocalClasspath + +moduledefs.transitiveLocalClasspath + + + +moduledefs.resources + +moduledefs.resources + + + +moduledefs.unmanagedClasspath + +moduledefs.unmanagedClasspath + + + +moduledefs.compileIvyDeps + +moduledefs.compileIvyDeps + + + +moduledefs.scalaLibraryIvyDeps + +moduledefs.scalaLibraryIvyDeps + + + +moduledefs.scalaLibraryIvyDeps->moduledefs.scalaOrganization + + + + + +moduledefs.ivyDeps + +moduledefs.ivyDeps + + + +moduledefs.ivyDeps->moduledefs.scalaVersion + + + + + +moduledefs.transitiveIvyDeps + +moduledefs.transitiveIvyDeps + + + +moduledefs.transitiveIvyDeps->moduledefs.ivyDeps + + + + + +moduledefs.compileClasspath + +moduledefs.compileClasspath + + + +moduledefs.compileClasspath->moduledefs.platformSuffix + + + + + +moduledefs.compileClasspath->moduledefs.transitiveLocalClasspath + + + + + +moduledefs.compileClasspath->moduledefs.resources + + + + + +moduledefs.compileClasspath->moduledefs.unmanagedClasspath + + + + + +moduledefs.compileClasspath->moduledefs.compileIvyDeps + + + + + +moduledefs.compileClasspath->moduledefs.scalaLibraryIvyDeps + + + + + +moduledefs.compileClasspath->moduledefs.transitiveIvyDeps + + + + + +moduledefs.scalaCompilerClasspath + +moduledefs.scalaCompilerClasspath + + + +moduledefs.scalaCompilerClasspath->moduledefs.scalaOrganization + + + + + +moduledefs.scalaCompilerClasspath->moduledefs.platformSuffix + + + + + +moduledefs.scalacOptions + +moduledefs.scalacOptions + + + +moduledefs.scalacPluginIvyDeps + +moduledefs.scalacPluginIvyDeps + + + +moduledefs.scalacPluginClasspath + +moduledefs.scalacPluginClasspath + + + +moduledefs.scalacPluginClasspath->moduledefs.scalaOrganization + + + + + +moduledefs.scalacPluginClasspath->moduledefs.platformSuffix + + + + + +moduledefs.scalacPluginClasspath->moduledefs.scalacPluginIvyDeps + + + + + +moduledefs.javacOptions + +moduledefs.javacOptions + + + +moduledefs.upstreamCompileOutput + +moduledefs.upstreamCompileOutput + + + +moduledefs.compile + +moduledefs.compile + + + +moduledefs.compile->mill.scalalib.ZincWorkerModule.worker + + + + + +moduledefs.compile->moduledefs.allSourceFiles + + + + + +moduledefs.compile->moduledefs.scalaCompilerBridgeSources + + + + + +moduledefs.compile->moduledefs.compileClasspath + + + + + +moduledefs.compile->moduledefs.scalaCompilerClasspath + + + + + +moduledefs.compile->moduledefs.scalacOptions + + + + + +moduledefs.compile->moduledefs.scalacPluginClasspath + + + + + +moduledefs.compile->moduledefs.javacOptions + + + + + +moduledefs.compile->moduledefs.upstreamCompileOutput + + + + + diff --git a/example-1.zip b/example-1.zip new file mode 100644 index 00000000..4048adb7 Binary files /dev/null and b/example-1.zip differ diff --git a/example-2.zip b/example-2.zip new file mode 100644 index 00000000..7a640469 Binary files /dev/null and b/example-2.zip differ diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 00000000..82430a78 Binary files /dev/null and b/favicon.ico differ diff --git a/index.html b/index.html new file mode 100644 index 00000000..84e98fad --- /dev/null +++ b/index.html @@ -0,0 +1,628 @@ +Intro to Mill

Intro to Mill

Configuring Mill

Mill is your shiny new Java/Scala build tool! Scared of SBT? Melancholy over Maven? Grumbling about Gradle? Baffled by Bazel? Give Mill a try!

+

Mill aims for simplicity by re-using concepts you are already familiar with, borrowing ideas from modern tools like Bazel, to let you build your projects in a way that's simple, fast, and predictable.

+

Mill has built in support for the Scala programming language, and can serve as a replacement for SBT, but can also be extended to support any other language or platform via modules (written in Java or Scala) or through external subprocesses.

Installation

OS X

+

Installation via homebrew:

+
brew install mill
+

Arch Linux

+

Arch Linux has an AUR package for mill:

+
pacaur -S mill
+

FreeBSD

+

Installation via pkg(8):

+
pkg install mill
+

Windows

+

To get started, download Mill from: https://github.com/lihaoyi/mill/releases/download/0.5.1/0.5.1-assembly, and save it as mill.bat.

+

If you're using Scoop you can install Mill via

+
scoop install mill
+
+

Mill also works on a sh environment on Windows (e.g., MSYS2, Cygwin, Git-Bash, WSL; to get started, follow the instructions in the manual section below. Note that:

+ +
sed -i '0,/-cp "\$0"/{s/-cp "\$0"/-cp `cygpath -w "\$0"`/}; 0,/-cp "\$0"/{s/-cp "\$0"/-cp `cygpath -w "\$0"`/}' /usr/local/bin/mill
+

Docker

+

You can download and run a Docker image containing OpenJDK, Scala and Mill using

+
docker pull nightscape/scala-mill
+docker run -it nightscape/scala-mill
+

Manual

+

To get started, download Mill and install it into your system via the following curl/chmod command:

+
sudo curl -L https://github.com/lihaoyi/mill/releases/download/0.5.1/0.5.1 > /usr/local/bin/mill && sudo chmod +x /usr/local/bin/mill
+

Bootstrap Scripts (Linux/OS-X Only)

+

If you are using Mill in a codebase, you can commit the bootstrap launcher as a ./mill script in the project folder:

+
curl -L https://github.com/lihaoyi/mill/releases/download/0.5.1/0.5.1 > mill && chmod +x mill
+
+

Now, anyone who wants to work with the project can simply use the ./mill script directly:

+
./mill version
+./mill __.compile
+
+

The mill command will automatically use the version specified by the bootstrap script, even if you installed it via other means. The ./mill file has a version number embedded within it, which you can update simply by editing the script. Note this only works for versions 0.5.0 and above.

+

Bootstrap scripts are also useful for running Mill in CI, ensuring that your Jenkins/Travis/etc. box has the correct version of Mill present to build/compile/test your code.

Getting Started

+

The simplest Mill build for a Java project looks as follows:

+
// build.sc
+import mill._, scalalib._
+
+object foo extends JavaModule {
+
+}
+
+

The simplest Mill build for a Scala project looks as follows:

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+}
+
+

Both of these would build a project laid out as follows:

+
build.sc
+foo/
+    src/
+        FileA.java
+        FileB.scala
+    resources/
+        ...
+out/
+    foo/
+        ... 
+
+

You can download an example project with this layout here:

+ +

The source code for this module would live in the foo/src/ folder, matching the name you assigned to the module. Output for this module (compiled files, resolved dependency lists, ...) would live in out/foo/.

+

This can be run from the Bash shell via:

+
$ mill foo.compile                 # compile sources into classfiles
+
+$ mill foo.run                     # run the main method, if any
+
+$ mill foo.runBackground           # run the main method in the background
+
+$ mill foo.launcher                # prepares a foo/launcher/dest/run you can run later
+
+$ mill foo.jar                     # bundle the classfiles into a jar
+
+$ mill foo.assembly                # bundle classfiles and all dependencies into a jar
+
+$ mill -i foo.console              # start a Scala console within your project (in interactive mode: "-i")
+ 
+$ mill -i foo.repl                 # start an Ammonite REPL within your project (in interactive mode: "-i")
+
+

You can run mill resolve __ to see a full list of the different tasks that are available, mill resolve foo._ to see the tasks within foo, mill inspect +foo.compile to inspect a task's doc-comment documentation or what it depends on, or mill show foo.scalaVersion to show the output of any task.

+

The most common tasks that Mill can run are cached targets, such as compile, and un-cached commands such as foo.run. Targets do not re-evaluate unless one of their inputs changes, where-as commands re-run every time.

Output

+

Mill puts all its output in the top-level out/ folder. The above commands would end up in:

+
out/
+    foo/
+        compile/
+        run/
+        jar/
+        assembly/
+
+

Within the output folder for each task, there's a meta.json file containing the metadata returned by that task, and a dest/ folder containing any files that the task generates. For example, out/foo/compile/dest/ contains the compiled classfiles, while out/foo/assembly/dest/ contains the self-contained assembly with the project's classfiles jar-ed up with all its dependencies.

+

Given a task foo.bar, all its output and results can be found be within its respective out/foo/bar/ folder.

Multiple Modules

Java Example

+
// build.sc
+import mill._, scalalib._
+
+object foo extends JavaModule
+object bar extends JavaModule {
+  def moduleDeps = Seq(foo)
+}
+

Scala Example

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+}
+object bar extends ScalaModule {
+  def moduleDeps = Seq(foo)
+  def scalaVersion = "2.12.4"
+}
+
+

You can define multiple modules the same way you define a single module, using def moduleDeps to define the relationship between them. The above builds expects the following project layout:

+
build.sc
+foo/
+    src/
+        Main.scala
+    resources/
+        ...
+bar/
+    src/
+        Main2.scala
+    resources/
+        ...
+out/
+    foo/
+        ... 
+    bar/
+        ... 
+
+

And can be built/run using:

+
$ mill foo.compile        
+$ mill bar.compile        
+
+$ mill foo.run            
+$ mill bar.run            
+
+$ mill foo.jar            
+$ mill bar.jar            
+
+$ mill foo.assembly        
+$ mill bar.assembly        
+
+

Mill's evaluator will ensure that the modules are compiled in the right order, and re-compiled as necessary when source code in each module changes.

+

Modules can also be nested:

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  object bar extends ScalaModule {
+    def moduleDeps = Seq(foo)
+    def scalaVersion = "2.12.4"
+  }
+}
+
+

Which would result in a similarly nested project layout:

+
build.sc
+foo/
+    src/
+        Main.scala
+    resources/
+        ...
+    bar/
+        src/
+            Main2.scala
+        resources/
+            ...
+out/
+    foo/
+        ...
+        bar/
+            ...
+
+

Where the nested modules can be run via:

+
$ mill foo.compile        
+$ mill foo.bar.compile        
+
+$ mill foo.run            
+$ mill foo.bar.run            
+
+$ mill foo.jar            
+$ mill foo.bar.jar            
+
+$ mill foo.assembly        
+$ mill foo.bar.assembly        
+

Watch and Re-evaluate

+

You can use the --watch flag to make Mill watch a task's inputs, re-evaluating the task as necessary when the inputs change:

+
$ mill --watch foo.compile 
+$ mill --watch foo.run 
+$ mill -w foo.compile 
+$ mill -w foo.run 
+
+

Mill's --watch flag watches both the files you are building using Mill, as well as Mill's own build.sc file and anything it imports, so any changes to your build.sc will automatically get picked up.

+

For long-running processes like web-servers, you can use .runBackground to make sure they re-compile and re-start when code changes, forcefully terminating the previous process even though it may be still alive:

+
$ mill -w foo.compile 
+$ mill -w foo.runBackground 
+

Command-line Tools

+

Mill comes built in with a small number of useful command-line utilities:

all

+
mill all foo.{compile,run}
+mill all "foo.{compile,run}"
+mill all foo.compile foo.run
+mill all _.compile # run compile for every top-level module
+mill all __.compile  # run compile for every module
+
+

all runs multiple tasks in a single command

resolve

+
$ mill resolve _
+main
+moduledefs
+core
+scalaworker
+scalalib
+scalajslib
+integration
+testRepos
+...
+
+$ mill resolve _.compile
+
+main.compile
+moduledefs.compile
+core.compile
+scalalib.worker.compile
+scalalib.compile
+scalajslib.compile
+integration.compile
+
+$ mill resolve core._
+
+core.test
+core.compile
+core.publishVersion
+core.runClasspath
+core.testArgs
+core.sources
+...
+
+

resolve lists the tasks that match a particular query, without running them. This is useful for "dry running" an mill all command to see what would be run before you run them, or to explore what modules or tasks are available from the command line using resolve _, resolve foo._, etc.

+
mill resolve foo.{compile,run}
+mill resolve "foo.{compile,run}"
+mill resolve foo.compile foo.run
+mill resolve _.compile          # list the compile tasks for every top-level module
+mill resolve __.compile         # list the compile tasks for every module
+mill resolve _                  # list every top level module or task
+mill resolve foo._              # list every task directly within the foo module
+mill resolve __                 # list every module or task recursively
+mill resolve foo.__             # list every task recursively within the foo module
+

inspect

+
$ mill inspect core.run
+
+core.run(ScalaModule.scala:211)
+Inputs:
+    core.mainClass
+    core.runClasspath
+    core.forkArgs
+    core.forkEnv
+
+

inspect is a more verbose version of resolve. In addition to printing out the name of one-or-more tasks, it also displays its source location and a list of input tasks. This is very useful for debugging and interactively exploring the structure of your build from the command line.

+

inspect also works with the same _/__ wildcard/query syntaxes that all/resolve do:

+
mill inspect foo.compile
+mill inspect foo.{compile,run}
+mill inspect "foo.{compile,run}"
+mill inspect foo.compile foo.run
+mill inspect _.compile
+mill inspect __.compile
+mill inspect _
+mill inspect foo._
+mill inspect __
+mill inspect foo._
+

show

+
$ mill show core.scalaVersion
+"2.12.4"
+
+

By default, Mill does not print out the metadata from evaluating a task. Most people would not be interested in e.g. viewing the metadata related to incremental compilation: they just want to compile their code! However, if you want to inspect the build to debug problems, you can make Mill show you the metadata output for a task using the show command:

+

All tasks return values that can be shown, not just configuration values. e.g. compile returns that path to the classes and analysisFile that are produced by the compilation:

+
$ mill show foo.compile
+{
+    "analysisFile": "/Users/lihaoyi/Dropbox/Github/test/out/foo/compile/dest/zinc",
+    "classes": {
+        "path": "/Users/lihaoyi/Dropbox/Github/test/out/foo/compile/dest/classes"
+    }
+}
+
+

show is generally useful as a debugging tool, to see what is going on in your build:

+
$ mill show foo.sources
+[
+    "/Users/lihaoyi/Dropbox/Github/test/foo/src"
+]
+
+$ mill show foo.compileDepClasspath
+[
+    ".../org/scala-lang/scala-compiler/2.12.4/scala-compiler-2.12.4.jar",
+    ".../org/scala-lang/scala-library/2.12.4/scala-library-2.12.4.jar",
+    ".../org/scala-lang/scala-reflect/2.12.4/scala-reflect-2.12.4.jar",
+    ".../org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar"
+]
+
+

show is also useful for interacting with Mill from external tools, since the JSON it outputs is structured and easily parsed & manipulated.

path

+
$ mill path core.assembly core.sources
+core.sources
+core.allSources
+core.allSourceFiles
+core.compile
+core.localClasspath
+core.assembly
+
+

mill path prints out a dependency chain between the first task and the second. It is very useful for exploring the build graph and trying to figure out how data gets from one task to another. If there are multiple possible dependency chains, one of them is picked arbitrarily.

plan

+
$ mill plan moduledefs.compileClasspath
+moduledefs.transitiveLocalClasspath
+moduledefs.resources
+moduledefs.unmanagedClasspath
+moduledefs.scalaVersion
+moduledefs.platformSuffix
+moduledefs.compileIvyDeps
+moduledefs.scalaLibraryIvyDeps
+moduledefs.ivyDeps
+moduledefs.transitiveIvyDeps
+moduledefs.compileClasspath
+
+

mill plan foo prints out what tasks would be evaluated, in what order, if you ran mill foo, but without actually running them. This is a useful tool for debugging your build: e.g. if you suspect a task foo is running things that it shouldn't be running, a quick mill plan will list out all the upstream tasks that foo needs to run, and you can then follow up with mill path on any individual upstream task to see exactly how foo depends on it.

visualize

+
$ mill show visualize core._
+[
+    ".../out/visualize/dest/out.txt",
+    ".../out/visualize/dest/out.dot",
+    ".../out/visualize/dest/out.json",
+    ".../out/visualize/dest/out.png",
+    ".../out/visualize/dest/out.svg"
+]
+
+

mill show visualize takes a subset of the Mill build graph (e.g. core._ is every task directly under the core module) and draws out their relationships in .svg and .png form for you to inspect. It also generates .txt, .dot and .json for easy processing by downstream tools.

+

The above command generates the following diagram:

+

VisualizeCore.svg

+

Another use case is to view the relationships between modules:

+
$ mill show visualize __.compile
+
+

This command diagrams the relationships between the compile tasks of each module, which illustrates which module depends on which other module's compilation output:

+

VisualizeCompile.svg

visualizePlan

+
$ mill show visualizePlan moduledefs.compile
+[
+    ".../out/visualizePlan/dest/out.txt",
+    ".../out/visualizePlan/dest/out.dot",
+    ".../out/visualizePlan/dest/out.json",
+    ".../out/visualizePlan/dest/out.png",
+    ".../out/visualizePlan/dest/out.svg"
+]
+
+

mill show visualizePlan is similar to mill show visualize except that it shows a graph of the entire build plan, including tasks not directly resolved by the query. Tasks directly resolved are shown with a solid border, and dependencies are shown with a dotted border.

+

The above command generates the following diagram:

+

VisualizePlan.svg

clean

+
$ mill clean
+
+

clean deletes all the cached outputs of previously executed tasks. It can apply to the entire project, entire modules, or specific tasks.

+
mill clean                     # clean all outputs
+mill clean foo                 # clean all outputs for module 'foo' (including nested modules)
+mill clean foo.compile         # only clean outputs for task 'compile' in module 'foo'
+mill clean foo.{compile,run}
+mill clean "foo.{compile,run}"
+mill clean foo.compile foo.run
+mill clean _.compile
+mill clean __.compile
+

Search for dependency updates

+
$ mill mill.scalalib.Dependency/updates
+
+

Mill can search for updated versions of your project's dependencies, if available from your project's configured repositories. Note that it uses heuristics based on common versionning schemes, so it may not work as expected for dependencies with particularly weird version numbers.

+

Current limitations: - Only works for JavaModules (including ScalaModules, CrossScalaModules, etc.) and Maven repositories. - Always applies to all modules in the build. - Doesn't apply to $ivy dependencies used in the build definition itself.

+
mill mill.scalalib.Dependency/updates
+mill mill.scalalib.Dependency/updates --allowPreRelease true # also show pre-release versions
+

IntelliJ Support

+

Mill supports IntelliJ by default. Use mill mill.scalalib.GenIdea/idea to generate an IntelliJ project config for your build.

+

This also configures IntelliJ to allow easy navigate & code-completion within your build file itself.

The Build Repl

+
$ mill -i
+Loading...
+@ foo
+res1: foo.type = ammonite.predef.build#foo:2
+Commands:
+    .runLocal(args: String*)()
+    .run(args: String*)()
+    .runMainLocal(mainClass: String, args: String*)()
+    .runMain(mainClass: String, args: String*)()
+    .console()()
+Targets:
+    .allSources()
+    .artifactId()
+    .artifactName()
+...
+
+@ foo.compile
+res3: mill.package.T[mill.scalalib.CompilationResult] = mill.scalalib.ScalaModule#compile:152
+Inputs:
+    foo.scalaVersion
+    foo.allSources
+    foo.compileDepClasspath
+...
+    
+@ foo.compile()
+res2: mill.scalalib.CompilationResult = CompilationResult(
+  root/'Users/'lihaoyi/'Dropbox/'Github/'test/'out/'foo/'compile/'dest/'zinc,
+  PathRef(root/'Users/'lihaoyi/'Dropbox/'Github/'test/'out/'foo/'compile/'dest/'classes, false)
+)
+
+

You can run mill -i to open a build REPL; this is a Scala console with your build.sc loaded, which lets you run tasks interactively. The task-running syntax is slightly different from the command-line, but more in-line with how you would depend on tasks from within your build file.

+

You can use this REPL to interactively explore your build to see what is available.

Deploying your code

+

The two most common things to do once your code is complete is to make an assembly (e.g. for deployment/installation) or publishing (e.g. to Maven Central). Mill comes with both capabilities built in.

+

Mill comes built-in with the ability to make assemblies. Given a simple Mill build:

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+}
+
+

You can make a self-contained assembly via:

+
$ mill foo.assembly
+
+$ ls -lh out/foo/assembly/dest/out.jar
+-rw-r--r--  1 lihaoyi  staff   5.0M Feb 17 11:14 out/foo/assembly/dest/out.jar
+
+

You can then move the out.jar file anywhere you would like, and run it standalone using java:

+
$ java -cp out/foo/assembly/dest/out.jar foo.Example
+Hello World!
+
+

To publish to Maven Central, you need to make foo also extend Mill's PublishModule trait:

+
// build.sc
+import mill._, scalalib._, publish._
+
+object foo extends ScalaModule with PublishModule {
+  def scalaVersion = "2.12.4"
+  def publishVersion = "0.0.1"
+
+  def pomSettings = PomSettings(
+    description = "Hello",
+    organization = "com.lihaoyi",
+    url = "https://github.com/lihaoyi/example",
+    licenses = Seq(License.MIT),
+    versionControl = VersionControl.github("lihaoyi", "example"),
+    developers = Seq(
+      Developer("lihaoyi", "Li Haoyi","https://github.com/lihaoyi")
+    )
+  )
+}
+
+

You can change the name of the published artifact (artifactId in the Maven POM) by overriding artifactName in the module you want to publish.

+

You can download an example project with this layout here:

+ +

Which you can then publish using the mill foo.publish command, which takes your sonatype credentials (e.g. lihaoyi:foobarbaz) and GPG password as inputs:

+
$ mill foo.publish
+Missing arguments: (--sonatypeCreds: String, --release: Boolean)
+
+Arguments provided did not match expected signature:
+
+publish
+  --sonatypeCreds  String (format: "username:password")
+  --gpgPassphrase  String (default null)
+  --gpgKeyName     String (default null)
+  --signed         Boolean (default true)
+  --release        Boolean
+
+

You also need to specify release as true or false, depending on whether you just want to stage your module on oss.sonatype.org or you want Mill to complete the release process to Maven Central.

+

If you are publishing multiple artifacts, you can also use mill mill.scalalib.PublishModule/publishAll as described here

Structure of the out/ folder

+

The out/ folder contains all the generated files & metadata for your build. It is structured with one folder per Target/Command, that is run, e.g.:

+ +

There are also top-level build-related files in the out/ folder, prefixed as mill-*. The most useful is mill-profile.json, which logs the tasks run and time taken for the last Mill command you executed. This is very useful if you want to find out exactly what tasks are being run and Mill is being slow.

+

Each folder currently contains the following files:

+ +

The out/ folder is intentionally kept simplistic and user-readable. If your build is not behaving as you would expect, feel free to poke around the various dest/ folders to see what files are being created, or the meta.json files to see what is being returned by a particular task. You can also simply delete folders within out/ if you want to force portions of your project to be re-built, e.g. deleting the out/main/ or out/main/test/compile/ folders.

Overriding Mill Versions

+

Apart from downloading and installing new versions of Mill globally, there are a few ways of selecting/updating your Mill version:

+ +
echo "0.5.0" > .mill-version
+
+

.mill-version takes precedence over the version of Mill specified in the ./mill script.

+ +
MILL_VERSION=0.5.0-3-4faefb mill __.compile
+ ```
+
+or
+
+

MILL_VERSION=0.5.0-3-4faefb ./mill __.compile ```

+

to override the Mill version manually. This takes precedence over the version specified in ./mill or .mill-version

+

Note that both of these overrides only work for versions 0.5.0 and above.

Development Releases

+

In case you want to try out the latest features and improvements that are currently in master, unstable versions of Mill are available as binaries named #.#.#-n-hash linked to the latest tag. Installing the latest unstable release is recommended for bootstrapping mill.

+

The easiest way to use a development release is by updating the Bootstrap Script, or Overriding Mill Versions via an environment variable or .mill-version file.

+

Come by our Gitter Channel if you want to ask questions or say hi!


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Configuring Mill
\ No newline at end of file diff --git a/logo-white.svg b/logo-white.svg new file mode 100644 index 00000000..a681aa9f --- /dev/null +++ b/logo-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/page/common-project-layouts.html b/page/common-project-layouts.html new file mode 100644 index 00000000..d816d363 --- /dev/null +++ b/page/common-project-layouts.html @@ -0,0 +1,355 @@ +Common Project Layouts

Common Project Layouts

Configuring MillTasks

Common Project Layouts

+

Earlier, we have shown how to work with the Mill default Scala module layout. Here we will explore some other common project layouts that you may want in your Scala build:

Java Project with Test Suite

+
trait JUnitTests extends TestModule {
+  def testFrameworks = Seq("com.novocode.junit.JUnitFramework")
+  def ivyDeps = Agg(ivy"com.novocode:junit-interface:0.11")
+}
+
+object core extends JavaModule {
+  object test extends Tests with JUnitTests
+}
+object app extends JavaModule {
+  def moduleDeps = Seq(core)
+  object test extends Tests with JUnitTests
+}
+
+

This build is a two-module Java project with junit test suites. It expects the following filesystem layout:

+
build.sc
+app/
+    src/hello/
+        Main.java
+    test/src/hello/
+            MyAppTests.java
+core/
+    src/hello/
+        Core.java
+    test/src/hello/
+            MyCoreTests.java
+
+

You can then run the junit tests using mill app.test or mill core.test, and configure which exact tests you want to run using the flags defined on the JUnit Test Interface.

+

For a more more complex, real-world example of a Java build, check out our example build for the popular Caffeine project:

+

Cross Scala-Version Modules

+
import mill._
+import mill.scalalib._
+object foo extends Cross[FooModule]("2.10.6", "2.11.11", "2.12.4")
+class FooModule(val crossScalaVersion: String) extends CrossScalaModule {
+   ...
+   object test extends Tests {
+     ...
+   }
+}
+
+

Mill provides a CrossScalaModule template, which can be used with Cross to cross-build Scala modules across different versions of Scala. The default configuration for CrossScalaModule expects a filesystem layout as follows:

+
build.sc
+foo/
+    src/
+    src-2.10/
+    src-2.11/
+    src-2.12/
+    test/
+        src/
+        src-2.10/
+        src-2.11/
+        src-2.12/
+
+

Code common to all Scala versions lives in src, while code specific to one version lives in src-x.y.

Scala.js Modules

+
import mill._
+import mill.scalajslib._
+
+object foo extends ScalaJSModule {
+  def scalaVersion = "2.12.4"
+  def scalaJSVersion = "0.6.22"
+}
+
+

ScalaJSModule is a variant of ScalaModule that builds your code using Scala.js. In addition to the standard foo.compile and foo.run commands (the latter of which runs your code on Node.js, which must be pre-installed) ScalaJSModule also exposes the foo.fastOpt and foo.fullOpt tasks for generating the optimized Javascript file.

Scala Native Modules

+
import mill._, scalalib._, scalanativelib._
+
+object hello extends ScalaNativeModule {
+  def scalaVersion = "2.11.12"
+  def scalaNativeVersion = "0.3.8"
+  def logLevel = NativeLogLevel.Info // optional
+  def releaseMode = ReleaseMode.Debug // optional
+}
+
+
.
+├── build.sc
+└── hello
+    ├── src
+    │   └── hello
+    │       └── Hello.scala
+
+
// hello/src/hello/Hello.scala
+package hello
+import scalatags.Text.all._
+object Hello{
+  def main(args: Array[String]): Unit = {
+    println("Hello! " + args.toList)
+    println(div("one"))
+  }
+}
+
+

The normal commands mill hello.compile, mill hello.run, all work. If you want to build a standalone executable, you can use mill show hello.nativeLink to create it.

+

ScalaNativeModule builds scala sources to executable binaries using Scala Native. You will need to have the relevant parts of the LLVM toolchain installed on your system. Optimized binaries can be built by setting releaseMode (see above) and more verbose logging can be enabled using logLevel. Currently two test frameworks are supported utest and scalatest. Support for scalacheck should be possible when the relevant artifacts have been published for scala native.

+

Here's a slightly larger example, demonstrating how to use third party dependencies (note the two sets of double-colons :: necessary) and a test suite:

+
import mill._, scalalib._, scalanativelib._
+
+object hello extends ScalaNativeModule {
+  def scalaNativeVersion = "0.3.8"
+  def scalaVersion = "2.11.12"
+  def ivyDeps = Agg(ivy"com.lihaoyi::scalatags::0.6.7")
+  object test extends Tests{
+    def ivyDeps = Agg(ivy"com.lihaoyi::utest::0.6.3")
+    def testFrameworks = Seq("utest.runner.Framework")
+  }
+}
+
+
.
+├── build.sc
+└── hello
+    ├── src
+    │   └── hello
+    │       └── Hello.scala
+    └── test
+        └── src
+            └── HelloTests.scala
+
+
// hello/test/src/HelloTests.scala
+package hello
+import utest._
+import scalatags.Text.all._
+object HelloTests extends TestSuite{
+  val tests = Tests{
+    'pass - {
+      assert(div("1").toString == "<div>1</div>")
+    }
+    'fail - {
+      assert(123 == 1243)
+    }
+  }
+}
+
+

The same mill hello.compile or mill hello.run still work, as does `mill +hello.test to run the test suite defined here.

SBT-Compatible Modules

+
import mill._
+import mill.scalalib._
+
+object foo extends SbtModule {
+  def scalaVersion = "2.12.4"
+}
+
+

These are basically the same as normal ScalaModules, but configured to follow the SBT project layout:

+
build.sc
+foo/
+    src/
+        main/
+            scala/
+        test/
+            scala/
+
+

Useful if you want to migrate an existing project built with SBT without having to re-organize all your files

SBT-Compatible Cross Scala-Version Modules

+
import mill._
+import mill.scalalib._
+object foo extends Cross[FooModule]("2.10.6", "2.11.11", "2.12.4")
+class FooModule(val crossScalaVersion: String) extends CrossSbtModule {
+   ...
+   object test extends Tests {
+     ...
+   }
+}
+
+

A CrossSbtModule is a version of CrossScalaModule configured with the SBT project layout:

+
build.sc
+foo/
+    src/
+        main/
+            scala/
+            scala-2.10/
+            scala-2.11/
+            scala-2.12/
+        test/
+            scala/
+            scala-2.10/
+            scala-2.11/
+            scala-2.12/
+

Publishing

+
import mill._
+import mill.scalalib._
+import mill.scalalib.publish._
+object foo extends ScalaModule with PublishModule {
+  def scalaVersion = "2.12.4"
+  def publishVersion = "0.0.1"
+  def pomSettings = PomSettings(
+    description = "My first library",
+    organization = "com.lihaoyi",
+    url = "https://github.com/lihaoyi/mill",
+    licenses = Seq(License.MIT),
+    versionControl = VersionControl.github("lihaoyi", "mill"),
+    developers = Seq(
+      Developer("lihaoyi", "Li Haoyi","https://github.com/lihaoyi")
+    )
+  )
+}
+
+

You can make a module publishable by extending PublishModule.

+

PublishModule then needs you to define a publishVersion and pomSettings. The artifactName defaults to the name of your module (in this case foo) but can be overriden. The organization is defined in pomSettings.

+

Once you've mixed in PublishModule, you can publish your libraries to maven central via:

+
mill mill.scalalib.PublishModule/publishAll \
+        lihaoyi:$SONATYPE_PASSWORD \
+        $GPG_PASSWORD \ 
+        foo.publishArtifacts
+
+

This uploads them to oss.sonatype.org where you can log-in and stage/release them manually. You can also pass in the --release true flag to perform the staging/release automatically:

+
mill mill.scalalib.PublishModule/publishAll \
+        lihaoyi:$SONATYPE_PASSWORD \
+        $GPG_PASSWORD \ 
+        foo.publishArtifacts \
+        --release true
+
+

If you want to publish/release multiple modules, you can use the _ or __ wildcard syntax:

+
mill mill.scalalib.PublishModule/publishAll \
+        lihaoyi:$SONATYPE_PASSWORD \
+        $GPG_PASSWORD \ 
+        __.publishArtifacts \
+        --release true
+

Example Builds

+

Mill comes bundled with example builds for existing open-source projects, as integration tests and examples:

Acyclic

+ +

A small single-module cross-build, with few sources, minimal dependencies, and wired up for publishing to Maven Central.

Better-Files

+ +

A collection of small modules compiled for a single Scala version.

+

Also demonstrates how to define shared configuration in a trait, enable Scala compiler flags, and download artifacts as part of the build.

Jawn

+ +

A collection of relatively small modules, all cross-built across the same few versions of Scala.

Upickle

+ +

A single cross-platform Scala.js/Scala-JVM module cross-built against multiple versions of Scala, including the setup necessary for publishing to Maven Central.

Ammonite

+ +

A relatively complex build with numerous submodules, some cross-built across Scala major versions while others are cross-built against Scala minor versions.

+

Also demonstrates how to pass one module's compiled artifacts to the run/test commands of another, via their forkEnv.


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Configuring MillTasks
\ No newline at end of file diff --git a/page/configuring-mill.html b/page/configuring-mill.html new file mode 100644 index 00000000..70cff6f3 --- /dev/null +++ b/page/configuring-mill.html @@ -0,0 +1,447 @@ +Configuring Mill

Configuring Mill

Intro to MillCommon Project Layouts

You can configure your Mill build in a number of ways:

Compilation & Execution Flags

+
import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  
+  def scalacOptions = Seq("-Ydelambdafy:inline")
+  
+  def forkArgs = Seq("-Xmx4g")
+  
+  def forkEnv = Map("HELLO_MY_ENV_VAR" -> "WORLD")
+}
+
+

You can pass flags to the Scala compiler via scalacOptions. By default, foo.run runs the compiled code in a subprocess, and you can pass in JVM flags via forkArgs or environment-variables via forkEnv.

+

You can also run your code via

+
mill foo.runLocal
+
+

Which runs it in-process within an isolated classloader. This may be faster since you avoid the JVM startup, but does not support forkArgs or forkEnv.

+

If you want to pass main-method arguments to run or runLocal, simply pass them after the foo.run/foo.runLocal:

+
mill foo.run arg1 arg2 arg3
+mill foo.runLocal arg1 arg2 arg3
+

Adding Ivy Dependencies

+
import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def ivyDeps = Agg(
+    ivy"com.lihaoyi::upickle:0.5.1",
+    ivy"com.lihaoyi::pprint:0.5.2",
+    ivy"com.lihaoyi::fansi:0.2.4",
+    ivy"${scalaOrganization()}:scala-reflect:${scalaVersion()}"
+  )
+}
+
+

You can define the ivyDeps field to add ivy dependencies to your module. The ivy"com.lihaoyi::upickle:0.5.1" syntax (with ::) represents Scala dependencies; for Java dependencies you would use a single : e.g. ivy"com.lihaoyi:upickle:0.5.1". If you have dependencies cross-published against the full Scala version (eg. 2.12.4 instead of just 2.12), you can use ::: as in ivy"org.scalamacros:::paradise:2.1.1".

+

To select the test-jars from a dependency use the following syntax: ivy"org.apache.spark::spark-sql:2.4.0;classifier=tests.

+

By default these are resolved from maven central, but you can add your own resolvers by overriding the repositories definition in the module:

+
import coursier.maven.MavenRepository
+
+def repositories = super.repositories ++ Seq(
+  MavenRepository("https://oss.sonatype.org/content/repositories/releases")
+)
+
+

To add custom resolvers to the initial bootstrap of the build, you can create a custom ZincWorkerModule, and override the zincWorker method in your ScalaModule by pointing it to that custom object:

+
import coursier.maven.MavenRepository
+
+object CustomZincWorkerModule extends ZincWorkerModule {
+  def repositories() = super.repositories ++ Seq(
+    MavenRepository("https://oss.sonatype.org/content/repositories/releases")
+  )  
+}
+
+object YourBuild extends ScalaModule {
+  def zincWorker = CustomZincWorkerModule
+  // ... rest of your build definitions
+}
+

Adding a Test Suite

+
import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+
+  object test extends Tests { 
+    def ivyDeps = Agg(ivy"com.lihaoyi::utest:0.6.0")
+    def testFrameworks = Seq("utest.runner.Framework")
+  }
+}
+
+

You can define a test suite by creating a nested module extending Tests, and specifying the ivy coordinates and name of your test framework. This expects the tests to be laid out as follows:

+
build.sc
+foo/
+    src/
+        Main.scala
+    resources/
+        ...
+    test/
+        src/
+            MainTest.scala
+        resources/
+            ...
+out/
+    foo/
+        ...
+        test/
+            ...
+
+

The above example can be run via

+
mill foo.test
+
+

By default, tests are run in a subprocess, and forkArg and forkEnv can be overriden to pass JVM flags & environment variables. You can also use

+
mill foo.test.testLocal
+
+

To run tests in-process in an isolated classloader.

+

If you want to pass any arguments to the test framework, simply put them after foo.test in the command line. e.g. uTest lets you pass in a selector to decide which test to run, which in Mill would be:

+
mill foo.test foo.MyTestSuite.testCaseName
+
+

You can define multiple test suites if you want, e.g.:

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+
+  object test extends Tests { 
+    def ivyDeps = Agg(ivy"com.lihaoyi::utest:0.6.0")
+    def testFrameworks = Seq("utest.runner.Framework")
+  }
+  object integration extends Tests { 
+    def ivyDeps = Agg(ivy"com.lihaoyi::utest:0.6.0")
+    def testFrameworks = Seq("utest.runner.Framework")
+  }
+}
+
+

Each of which will expect their sources to be in their respective foo/test and foo/integration folder.

+

Tests modules are ScalaModules like any other, and all the same configuration options apply.

Custom Test Frameworks

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def ivyDeps = Agg(ivy"org.scalatest::scalatest:3.0.4")
+  def testFrameworks = Seq("org.scalatest.tools.Framework")
+}
+
+

Integrating with test frameworks like Scalatest is simply a matter of adding it to ivyDeps and specifying the testFrameworks you want to use. After that you can add a test suite and mill foo.test as usual, passing args to the test suite via mill foo.test arg1 arg2 arg3

Scala Compiler Plugins

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  
+  def compileIvyDeps = Agg(ivy"com.lihaoyi::acyclic:0.1.7")
+  def scalacOptions = Seq("-P:acyclic:force")
+  def scalacPluginIvyDeps = Agg(ivy"com.lihaoyi::acyclic:0.1.7")
+}
+
+

You can use Scala compiler plugins by setting scalacPluginIvyDeps. The above example also adds the plugin to compileIvyDeps, since that plugin's artifact is needed on the compilation classpath (though not at runtime).

Reformatting your code

+

Mill supports code formatting via scalafmt out of the box.

+

To have a formatting per-module you need to make your module extend mill.scalalib.scalafmt.ScalafmtModule:

+
// build.sc
+import mill._, scalalib._, scalafmt._
+
+object foo extends ScalaModule with ScalafmtModule {
+  def scalaVersion = "2.12.4"
+}
+
+

Now you can reformat code with mill foo.reformat command.

+

You can also reformat your project's code globally with mill mill.scalalib.scalafmt.ScalafmtModule/reformatAll __.sources command. It will reformat all sources that matches __.sources query.

+

If you add a .scalafmt.conf file at the root of you project, it will be used to configure formatting. It can contain a version key to specify the scalafmt version used to format your code. See the scalafmt configuration documentation for details.

Common Configuration

+
// build.sc
+import mill._, scalalib._
+
+trait CommonModule extends ScalaModule {
+  def scalaVersion = "2.12.4"
+}
+ 
+object foo extends CommonModule
+object bar extends CommonModule {
+  def moduleDeps = Seq(foo)
+}
+
+

You can extract out configuration common to multiple modules into a trait that those modules extend. This is useful for providing convenience & ensuring consistent configuration: every module often has the same scala-version, uses the same testing framework, etc. and all that can be extracted out into the trait.

Global configuration

+

Mill builds on ammonite which allows you to define global configuration. Depending on how you start mill 2 different files will be loaded. For interactive mode it's ~/.mill/ammonite/predef.sc and from the command line it's ~/.mill/ammonite/predefScript.sc. You might want to create a symlink from one to the other to avoid duplication.

+

Example ~/.mill/ammonite/predef.sc

+
val nexusUser = "myuser"
+val nexusPassword = "mysecret"
+
+

Everything declared in the above file will be available to any build you run.

+
  def repositories = super.repositories ++ Seq(
+    // login and pass are globally configured
+    MavenRepository("https://nexus.mycompany.com/repository/maven-releases", authentication = Some(coursier.core.Authentication(nexusUser, nexusPassword)))
+  )
+

Custom Tasks

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+}
+
+def lineCount = T {
+  
+  foo.sources().flatMap(ref => os.walk(ref.path)).filter(_.isFile).flatMap(read.lines).size
+}
+
+def printLineCount() = T.command {
+  println(lineCount())
+}
+
+

You can define new cached Targets using the T {...} syntax, depending on existing Targets e.g. foo.sources via the foo.sources() syntax to extract their current value, as shown in lineCount above. The return-type of a Target has to be JSON-serializable (using uPickle) and the Target is cached when first run until its inputs change (in this case, if someone edits the foo.sources files which live in foo/src. Cached Targets cannot take parameters.

+

You can print the value of your custom target using show, e.g.

+
mill show lineCount
+
+

You can define new un-cached Commands using the T.command {...} syntax. These are un-cached and re-evaluate every time you run them, but can take parameters. Their return type needs to be JSON-writable as well, or (): Unit if you want to return nothing.

+

Your custom targets can depend on each other using the def bar = T {... foo() +...} syntax, and you can create arbitrarily long chains of dependent targets. Mill will handle the re-evaluation and caching of the targets' output for you, and will provide you a T.ctx().dest folder for you to use as scratch space or to store files you want to return.

+

Custom targets and commands can contain arbitrary code. Whether you want to download files (e.g. using mill.modules.Util.download), shell-out to Webpack to compile some Javascript, generate sources to feed into a compiler, or create some custom jar/zip assembly with the files you want (e.g. using mill.modules.Jvm.createJar), all of these can simply be custom targets with your code running in the T {...} block.

Custom Modules

+
// build.sc
+import mill._, scalalib._
+
+object qux extends Module {
+  object foo extends ScalaModule {
+    def scalaVersion = "2.12.4"
+  }
+  object bar extends ScalaModule {
+    def moduleDeps = Seq(foo)
+    def scalaVersion = "2.12.4"
+  }
+}
+
+

Not every Module needs to be a ScalaModule; sometimes you just want to group things together for neatness. In the above example, you can run foo and bar namespaced inside qux:

+
mill qux.foo.compile
+mill qux.bar.run
+
+

You can also define your own module traits, with their own set of custom tasks, to represent other things e.g. Javascript bundles, docker image building,:

+
// build.sc
+trait MySpecialModule extends Module {
+  ...
+}
+object foo extends MySpecialModule
+object bar extends MySpecialModule
+

Module/Task Names

+
// build.sc
+import mill._
+import mill.scalalib._
+
+object `hyphenated-module` extends Module {
+  def `hyphenated-target` = T{
+    println("This is a hyphenated target in a hyphenated module.")
+  }
+}
+
+object unhyphenatedModule extends Module {
+  def unhyphenated_target = T{
+    println("This is an unhyphenated target in an unhyphenated module.")
+  }
+  def unhyphenated_target2 = T{
+    println("This is the second unhyphenated target in an unhyphenated module.")
+  }
+}
+
+

Mill modules and tasks may be composed of any of the following characters types:

+ +

Due to Scala naming restrictions, module and task names with hyphens must be surrounded by back-ticks (`).

+

Using hyphenated names at the command line is unaffected by these restrictions.

+
mill hyphenated-module.hyphenated-target
+mill unhyphenatedModule.unhyphenated_target
+mill unhyphenatedModule.unhyphenated_target2
+

Overriding Tasks

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def compile = T {
+    println("Compiling...")
+    super.compile()
+  }
+  def run(args: String*) = T.command {
+    println("Running..." + args.mkString(" "))
+    super.run(args:_*)
+  }
+}
+
+

You can re-define targets and commands to override them, and use super if you want to refer to the originally defined task. The above example shows how to override compile and run to add additional logging messages, but you can also override ScalaModule#generatedSources to feed generated code to your compiler, ScalaModule#prependShellScript to make your assemblies executable, or ScalaModule#console to use the Ammonite REPL instead of the normal Scala REPL.

+

In Mill builds the override keyword is optional.

Unmanaged Jars

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def unmanagedClasspath = T {
+    if (!ammonite.ops.exists(millSourcePath / "lib")) Agg()
+    else Agg.from(ammonite.ops.ls(millSourcePath / "lib").map(PathRef(_)))
+  }
+}
+
+

You can override unmanagedClasspath to point it at any jars you place on the filesystem, e.g. in the above snippet any jars that happen to live in the foo/lib/ folder.

Defining a Main Class

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def mainClass = Some("foo.bar.Baz")
+}
+
+

Mill's foo.run by default will discover which main class to run from your compilation output, but if there is more than one or the main class comes from some library you can explicitly specify which one to use. This also adds the main class to your foo.jar and foo.assembly jars.

Merge/exclude files from assembly

+

When you make a runnable jar of your project with assembly command, you may want to exclude some files from a final jar (like signature files, and manifest files from library jars), and merge duplicated files (for instance reference.conf files from library dependencies).

+

By default mill excludes all *.sf, *.dsa, *.rsa, and META-INF/MANIFEST.MF files from assembly, and concatenates all reference.conf files. You can also define your own merge/exclude rules.

+
// build.sc
+import mill._, scalalib._
+import mill.modules.Assembly._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def assemblyRules = Seq(
+    Rule.Append("application.conf"), // all application.conf files will be concatenated into single file
+    Rule.AppendPattern(".*\\.conf"), // all *.conf files will be concatenated into single file
+    Rule.ExcludePattern("*.temp") // all *.temp files will be excluded from a final jar
+  )
+}
+
+

To exclude Scala library from assembly

+
// build.sc
+import mill._, scalalib._
+import mill.modules.Assembly._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+
+  def scalaLibraryIvyDeps = T { Agg.empty }
+}
+

Downloading Non-Maven Jars

+
// build.sc
+import mill._, scalalib._
+
+object foo extends ScalaModule {
+  def scalaVersion = "2.12.4"
+  def unmanagedClasspath = Agg(
+    mill.modules.Util.download(
+      "https://github.com/williamfiset/FastJavaIO/releases/download/v1.0/fastjavaio.jar",
+      "fastjavaio.jar"
+    )
+  )
+}
+
+

You can also override unmanagedClasspath to point it at jars that you want to download from arbitrary URLs. Note that targets like unmanagedClasspath are cached, so your jar is downloaded only once and re-used indefinitely after that.


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Intro to MillCommon Project Layouts
\ No newline at end of file diff --git a/page/contrib-modules.html b/page/contrib-modules.html new file mode 100644 index 00000000..22abad5e --- /dev/null +++ b/page/contrib-modules.html @@ -0,0 +1,649 @@ +Contrib Modules

Contrib Modules

Mill InternalsThirdparty Modules

The plugins in this section are developed/maintained in the mill git tree.

+

When using one of these, it is important that the versions you load match your mill version. To facilitate this, Mill will automatically replace the $MILL_VERSION literal in your ivy imports with the correct value.

+

For instance :

+
import $ivy.`com.lihaoyi::mill-contrib-bloop:$MILL_VERSION`
+

Bloop

+

This plugin generates bloop configuration from your build file, which lets you use the bloop CLI for compiling, and makes your scala code editable in Metals

Quickstart

+
// build.sc (or any other .sc file it depends on, including predef)
+import $ivy.`com.lihaoyi::mill-contrib-bloop:$MILL_VERSION`
+
+

Then in your terminal :

+
> mill mill.contrib.Bloop/install
+
+

It generate correct bloop config for any JavaModule, ScalaModule, ScalaJsModule or ScalaNativeModule under the .bloop folder

Mix-in

+

You can mix-in the Bloop.Module trait with any JavaModule to quickly access the deserialised configuration for that particular module:

+
// build.sc
+import mill._
+import mill.scalalib._
+import mill.contrib.Bloop
+
+object MyModule extends ScalaModule with Bloop.Module {
+  def myTask = T { bloop.config() }
+}
+

Note regarding metals

+

Generating the bloop config should be enough for metals to pick it up and for features to start working in vscode (or the bunch of other editors metals supports). However, note that this applies only to your project sources. Your mill/ammonite related .sc files are not yet supported by metals.

+

The generated bloop config references the semanticDB compiler plugin required by metals to function. If need be, the version of semanticDB can be overriden by extending mill.contrib.bloop.BloopImpl in your own space.

Note regarding current mill support in bloop

+

The mill-bloop integration currently present in the bloop codebase will be deprecated in favour of this implementation.

BuildInfo

+

Generate scala code from your buildfile. This plugin generates a single object containing information from your build.

+

To declare a module that uses BuildInfo you must extend the mill.contrib.buildinfo.BuildInfo trait when defining your module.

+

Quickstart:

+
// build.sc
+import $ivy.`com.lihaoyi::mill-contrib-buildinfo:$MILL_VERSION`
+import mill.contrib.buildinfo.BuildInfo
+
+object project extends BuildInfo {
+  val name = "poject-name"
+  def  buildInfoMembers: T[Map[String, String]] = T {
+    Map(
+      "name" -> name),
+      "scalaVersion" -> scalaVersion()
+    )
+  }
+}
+

Configuration options

+

Docker

+

Automatically build docker images from your mill project.

+

Requires the docker CLI to be installed.

+

In the simplest configuration just extend DockerModule and declare a DockerConfig object.

+
import mill._, scalalib._
+
+import ivy`com.lihaoyi::mill-contrib-docker:VERSION`
+import contrib.docker.DockerModule
+
+object foo extends JavaModule with DockerModule {
+  object docker extends DockerConfig
+}
+
+

Then

+
$ mill foo.docker.build
+$ docker run foo
+

Configuration

+

Configure the image by overriding tasks in the DockerConfig object

+
object docker extends DockerConfig {
+  // Override tags to set the output image name
+  def tags = List("aws_account_id.dkr.ecr.region.amazonaws.com/hello-repository")
+
+  def baseImage = "openjdk:11"
+
+  // Configure whether the docker build should check the remote registry for a new version of the base image before building.
+  // By default this is true if the base image is using a latest tag
+  def pullBaseImage = true
+}
+
+

Run mill in interactive mode to see the docker client output, like mill -i foo.docker.build.

Flyway

+

Enables you to configure and run Flyway commands from your mill build file. The flyway module currently supports the most common flyway use cases with file based migrations.

+

Configure flyway by overriding settings in your module. For example

+
// build.sc
+
+import mill._, scalalib._
+
+import ivy`com.lihaoyi::mill-contrib-flyway:$MILL_VERSION`
+import contrib.flyway.FlywayModule
+
+object foo extends ScalaModule with FlywayModule {
+  def scalaVersion = "2.12.8"
+
+  //region flyway
+  def flywayUrl = "jdbc:postgresql:myDb" // required
+  def flywayDriverDeps = Agg(ivy"org.postgresql:postgresql:42.2.5") // required
+  def flywayUser = "postgres" // optional
+  // def flywayPassword = "" // optional
+  //endregion
+}
+
+

Flyway will look for migration files in db/migration in all resources folders by default. This should work regardless of if you are using a mill or sbt project layout.

+

You can then run common flyway commands like

+
mill foo.flywayClean
+mill foo.flywayInfo
+mill foo.flywayMigrate
+
+
+

REMINDER: You should never hard-code credentials or check them into a version control system. You should write some code to populate the settings for flyway instead. For example def flywayPassword = T.input(T.ctx().env("FLYWAY_PASSWORD"))

+

Play Framework

+

This module adds basic Play Framework support to mill:

+ +

There is no specific Play Java support, building a Play Java application will require a bit of customization (mostly adding the proper dependencies).

Using the plugin

+

There are 2 base modules and 2 helper traits in this plugin, all of which can be found in mill.playlib.

+

The base modules:

+ +

The two helper traits:

+

Using PlayModule

+

In order to use the PlayModule for your application, you need to provide the scala, Play and Twirl versions. You also need to define your own test object which extends the provided PlayTests trait.

+
// build.sc
+import mill._
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+
+object core extends PlayModule {
+    //config
+    override def scalaVersion= T{"2.12.8"}
+    override def playVersion= T{"2.7.0"}
+    override def twirlVersion= T{"1.4.0"}
+
+    object test extends PlayTests
+}
+
+

Using the above definition, your build will be configured to use the default Play layout:

+
.
+├── build.sc
+└── core
+    ├── app
+    │   ├── controllers
+    │   └── views
+    ├── conf
+    │   └── application.conf
+    │   └── routes
+    │   └── ...
+    ├── logs
+    ├── public
+    │   ├── images
+    │   ├── javascripts
+    │   └── stylesheets
+    └── test
+        └── controllers
+
+

The following compile dependencies will automatically be added to your build:

+
ivy"com.typesafe.play::play:${playVersion()}",
+ivy"com.typesafe.play::play-guice:${playVersion()}",
+ivy"com.typesafe.play::play-server:${playVersion()}",
+ivy"com.typesafe.play::play-logback:${playVersion()}"
+
+

Scala test will be setup as the default test framework and the following test dependencies will be added (the actual version depends on the version of Play you are pulling 2.6.x or 2.7.x):

+
ivy"org.scalatestplus.play::scalatestplus-play::4.0.1"
+
+

In order to have a working start command the following runtime dependency is also added:

+
ivy"com.typesafe.play::play-akka-http-server:${playVersion()}"
+

Using PlayApiModule

+

The PlayApiModule trait behaves the same as the PlayModule trait but it won't process .scala .html files and you don't need to define the `twirlVersion:

+
// build.sc
+import mill._
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+
+object core extends PlayApiModule {
+    //config
+    override def scalaVersion= T{"2.12.8"}
+    override def playVersion= T{"2.7.0"}
+
+    object test extends PlayTests
+}
+

Play configuration options

+

The Play modules themselves don't have specific configuration options at this point but the router module configuration options and the Twirl module configuration options are applicable.

Additional play libraries

+

The following helpers are available to provide additional Play Framework dependencies:

+ +

If you want to add an optional library using the helper you can do so by overriding ivyDeps like in the following example build:

+
// build.sc
+import mill._
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+
+object core extends PlayApiModule {
+    //config
+    override def scalaVersion= T{"2.12.8"}
+    override def playVersion= T{"2.7.0"}
+
+    object test extends PlayTests
+
+    override def ivyDeps = T{ super.ivyDeps() ++ Agg(ws(), filters()) }
+}
+

Commands equivalence

+

Mill commands are targets on a named build. For example if your build is called core:

+

Using SingleModule

+

The SingleModule trait allows you to have the build descriptor at the same level as the source code on the filesystem. You can move from there to a multi-module build either by refactoring your directory layout into multiple subdirectories or by using mill's nested modules feature.

+

Looking back at the sample build definition in Using PlayModule:

+
// build.sc
+import mill._
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+
+object core extends PlayModule {
+    //config
+    override def scalaVersion= T{"2.12.8"}
+    override def playVersion= T{"2.7.0"}
+    override def twirlVersion= T{"1.4.0"}
+
+    object test extends PlayTests
+}
+
+

The directory layout was:

+
.
+├── build.sc
+└── core
+    ├── app
+    │   ├── controllers
+    │   └── views
+    ├── conf
+    │   └── application.conf
+    │   └── routes
+    │   └── ...
+    ├── logs
+    ├── public
+    │   ├── images
+    │   ├── javascripts
+    │   └── stylesheets
+    └── test
+        └── controllers
+
+

by mixing in the SingleModule trait in your build:

+
// build.sc
+import mill._
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+
+object core extends PlayModule with SingleModule {
+	//config
+	override def scalaVersion= T{"2.12.8"}
+	override def playVersion= T{"2.7.0"}
+	override def twirlVersion= T{"1.4.0"}
+
+	object test extends PlayTests
+}
+
+

the layout becomes:

+
.
+└── core
+    ├── build.sc
+    ├── app
+    │   ├── controllers
+    │   └── views
+    ├── conf
+    │   └── application.conf
+    │   └── routes
+    │   └── ...
+    ├── logs
+    ├── public
+    │   ├── images
+    │   ├── javascripts
+    │   └── stylesheets
+    └── test
+        └── controllers
+

Using the router module directly

+

If you want to use the router module in a project which doesn't use the default Play layout, you can mix-in the mill.playlib.routesModule trait directly when defining your module. Your app must define playVersion and scalaVersion.

+
// build.sc
+import mill._
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+
+object app extends ScalaModule with RouterModule {
+  def playVersion= T{"2.7.0"}
+  def scalaVersion= T{"2.12.8"}
+}
+
Router Configuration options
+
Details
+

The following filesystem layout is expected by default:

+
.
+├── app
+│   └── routes
+│       └── routes
+└── build.sc
+
+

RouterModule adds the compileRouter task to the module:

+
mill app.compileRouter
+
+

(it will be automatically run whenever you compile your module)

+

This task will compile routes templates into the out/app/compileRouter/dest directory. This directory must be added to the generated sources of the module to be compiled and made accessible from the rest of the code. This is done by default in the trait, but if you need to have a custom override for generatedSources you can get the list of files from routerClasses

+

To add additional imports to all of the routes:

+
// build.sc
+import mill.scalalib._
+
+import $ivy.`com.lihaoyi::mill-contrib-playlib:$MILL_VERSION`,  mill.playlib._
+
+object app extends ScalaModule with RouterModule {
+  def playVersion = "2.7.0"
+  override def routesAdditionalImport = Seq("my.additional.stuff._", "my.other.stuff._")
+}
+

ScalaPB

+

This module allows ScalaPB to be used in Mill builds. ScalaPB is a Protocol Buffers compiler plugin that generates Scala case classes, encoders and decoders for protobuf messages.

+

To declare a module that uses ScalaPB you can extend the mill.contrib.scalapblib.ScalaPBModule trait when defining your module.

+

This creates a Scala module which compiles .proto files in the protobuf folder of the module with ScalaPB and adds the resulting .scala sources to your module's generatedSources.

+
// build.sc
+
+import $ivy.`com.lihaoyi::mill-contrib-scalapblib:$MILL_VERSION`
+import contrib.scalapblib._
+
+object example extends ScalaPBModule {
+  def scalaVersion = "2.12.6"
+  def scalaPBVersion = "0.7.4"
+}
+
+

This defines a project with the following layout:

+
build.sc
+example/
+    src/
+    protobuf/
+    resources/
+

Configuration options

+ +

If you'd like to configure the options that are passed to the ScalaPB compiler directly, you can override the scalaPBOptions task, for example:

+
// build.sc
+
+import $ivy.`com.lihaoyi::mill-contrib-scalapblib:$MILL_VERSION`
+import contrib.scalapblib._
+
+object example extends ScalaPBModule {
+  def scalaVersion = "2.12.6"
+  def scalaPBVersion = "0.7.4"
+  override def scalaPBOptions = "flat_package,java_conversions"
+}
+

Scoverage

+

This module allows you to generate code coverage reports for Scala projects with Scoverage via the scalac-scoverage-plugin.

+

To declare a module for which you want to generate coverage reports you can extends the mill.contrib.scoverage.ScoverageModule trait when defining your module. Additionally, you must define a submodule that extends the ScoverageTests trait that belongs to your instance of ScoverageModule.

+
import $ivy.`com.lihaoyi::mill-contrib-scoverage:$MILL_VERSION`
+import mill.contrib.scoverage.ScoverageModule
+
+object foo extends ScoverageModule  {
+  def scalaVersion = "2.12.9"
+  def scoverageVersion = "1.4.0"
+
+  object test extends ScoverageTests {
+    def ivyDeps = Agg(ivy"org.scalatest::scalatest:3.0.8")
+    def testFrameworks = Seq("org.scalatest.tools.Framework")
+  }
+}
+
+

In addition to the normal tasks available to your Scala module, Scoverage modules introduce a few new tasks and changes the behavior of an existing one.

+
mill foo.scoverage.compile      # compiles your module with test instrumentation
+                                # (you don't have to run this manually, running the test task will force its invocation)
+
+mill foo.test                   # tests your project and collects metrics on code coverage
+mill foo.scoverage.htmlReport   # uses the metrics collected by a previous test run to generate a coverage report in html format
+mill foo.scoverage.xmlReport    # uses the metrics collected by a previous test run to generate a coverage report in xml format
+
+

The measurement data is available at out/foo/scoverage/data/, the html report is saved in out/foo/scoverage/htmlReport/, and the xml report is saved in out/foo/scoverage/xmlReport/.

TestNG

+

Provides support for TestNG.

+

To use TestNG as test framework, you need to add it to the TestModule.testFrameworks property.

+
// build.sc
+import mill.scalalib._
+
+object project extends ScalaModule {
+  object test extends Tests{
+    def testFrameworks = Seq("mill.testng.TestNGFramework")
+  }
+}
+

Tut

+

This module allows Tut to be used in Mill builds. Tut is a documentation tool which compiles and evaluates Scala code in documentation files and provides various options for configuring how the results will be displayed in the compiled documentation.

+

To declare a module that uses Tut you can extend the mill.contrib.tut.TutModule trait when defining your module.

+

This creates a Scala module which compiles markdown, HTML and .txt files in the tut folder of the module with Tut.

+

By default the resulting documents are simply placed in the Mill build output folder but they can be placed elsewhere by overriding the tutTargetDirectory task.

+
// build.sc
+
+import $ivy.`com.lihaoyi::mill-contrib-tut:$MILL_VERSION`
+import contrib.tut._
+
+object example extends TutModule {
+  def scalaVersion = "2.12.6"
+  def tutVersion = "0.6.7"
+}
+
+

This defines a project with the following layout:

+
build.sc
+example/
+    src/
+    tut/
+    resources/
+
+

In order to compile documentation we can execute the tut task in the module:

+
sh> mill example.tut
+

Configuration options

+

Twirl

+

Twirl templates support.

+

To declare a module that needs to compile twirl templates you must extend the mill.twirllib.TwirlModule trait when defining your module. Also note that twirl templates get compiled into scala code, so you also need to extend ScalaModule.

+
// build.sc
+import mill.scalalib._
+
+import $ivy.`com.lihaoyi::mill-contrib-twirllib:$MILL_VERSION`,  mill.twirllib._
+
+object app extends ScalaModule with TwirlModule {
+// ...
+}
+

Twirl configuration options

+

Details

+

The following filesystem layout is expected:

+
build.sc
+app/
+    views/
+        view1.scala.html
+        view2.scala.html
+
+

TwirlModule adds the compileTwirl task to the module:

+
mill app.compileTwirl
+
+

(it will be automatically run whenever you compile your module)

+

This task will compile *.scala.html templates (and others, like *.scala.txt) into the out/app/compileTwirl/dest directory. This directory must be added to the generated sources of the module to be compiled and made accessible from the rest of the code:

+
// build.sc
+import mill.scalalib._
+
+import $ivy.`com.lihaoyi::mill-contrib-twirllib:$MILL_VERSION`,  mill.twirllib._
+
+object app extends ScalaModule with TwirlModule {
+  def twirlVersion = "1.3.15"
+  def generatedSources = T{ Seq(compileTwirl().classes) }
+}
+
+

To add additional imports to all of the twirl templates:

+
// build.sc
+import mill.scalalib._
+
+import $ivy.`com.lihaoyi::mill-contrib-twirllib:$MILL_VERSION`,  mill.twirllib._
+
+object app extends ScalaModule with TwirlModule {
+  def twirlVersion = "1.3.15"
+  override def twirlAdditionalImports = Seq("my.additional.stuff._", "my.other.stuff._")
+  def generatedSources = T{ Seq(compileTwirl().classes) }
+}
+
+

as the result all templates will get this line at the top:

+
@import "my.additional.stuff._"
+@import "my.other.stuff._"
+
+

Besides that, twirl compiler has default imports, at the moment these:

+
Seq(
+    "_root_.play.twirl.api.TwirlFeatureImports._",
+    "_root_.play.twirl.api.TwirlHelperImports._",
+    "_root_.play.twirl.api.Html",
+    "_root_.play.twirl.api.JavaScript",
+    "_root_.play.twirl.api.Txt",
+    "_root_.play.twirl.api.Xml"
+)
+
+

These imports will always be added to every template. You don't need to list them if you override twirlAdditionalImports.

Example

+

There's an example project


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Mill InternalsThirdparty Modules
\ No newline at end of file diff --git a/page/cross-builds.html b/page/cross-builds.html new file mode 100644 index 00000000..52e2cac3 --- /dev/null +++ b/page/cross-builds.html @@ -0,0 +1,220 @@ +Cross Builds

Cross Builds

ModulesExtending Mill

Mill handles cross-building of all sorts via the Cross[T] module.

Defining Cross Modules

+

You can use this as follows:

+
object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12")
+class FooModule(crossVersion: String) extends Module {
+  def suffix = T { crossVersion }
+  def bigSuffix = T { suffix().toUpperCase() }
+}
+
+

This defines three copies of FooModule: "210", "211" and "212", each of which has their own suffix target. You can then run them via

+
mill show foo[2.10].suffix
+mill show foo[2.10].bigSuffix
+mill show foo[2.11].suffix
+mill show foo[2.11].bigSuffix
+mill show foo[2.12].suffix
+mill show foo[2.12].bigSuffix
+
+

The modules each also have a millSourcePath of

+
foo/2.10
+foo/2.11
+foo/2.12
+
+

And the suffix targets will have the corresponding output paths for their metadata and files:

+
foo/2.10/suffix
+foo/2.10/bigSuffix
+foo/2.11/suffix
+foo/2.11/bigSuffix
+foo/2.12/suffix
+foo/2.12/bigSuffix
+
+

You can also have a cross-build with multiple inputs:

+
val crossMatrix = for {
+  crossVersion <- Seq("210", "211", "212")
+  platform <- Seq("jvm", "js", "native")
+  if !(platform == "native" && crossVersion != "212")
+} yield (crossVersion, platform)
+
+object foo extends mill.Cross[FooModule](crossMatrix:_*)
+class FooModule(crossVersion: String, platform: String) extends Module {
+  def suffix = T { crossVersion + "_" + platform }
+}
+
+

Here, we define our cross-values programmatically using a for-loop that spits out tuples instead of individual values. Our FooModule template class then takes two parameters instead of one. This creates the following modules each with their own suffix target:

+
mill show foo[210,jvm].suffix
+mill show foo[211,jvm].suffix
+mill show foo[212,jvm].suffix
+mill show foo[210,js].suffix
+mill show foo[211,js].suffix
+mill show foo[212,js].suffix
+mill show foo[212,native].suffix
+

Using Cross Modules from Outside

+

You can refer to targets defined in cross-modules as follows:

+
object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12")
+class FooModule(crossVersion: String) extends Module {
+  def suffix = T { crossVersion }
+}
+
+def bar = T { "hello " + foo("2.10").suffix } 
+
+

Here, foo("2.10") references the "2.10" instance of FooModule. You can refer to whatever versions of the cross-module you want, even using multiple versions of the cross-module in the same target:

+
object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12")
+class FooModule(crossVersion: String) extends Module {
+  def suffix = T { crossVersion }
+}
+
+def bar = T { "hello " + foo("2.10").suffix + " world " + foo("2.12").suffix }
+

Using Cross Modules from other Cross Modules

+

Targets in cross-modules can depend on one another the same way that external targets:

+
object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12")
+class FooModule(crossVersion: String) extends Module {
+  def suffix = T { crossVersion }
+}
+
+object bar extends mill.Cross[BarModule]("2.10", "2.11", "2.12")
+class BarModule(crossVersion: String) extends Module {
+  def bigSuffix = T { foo(crossVersion).suffix().toUpperCase() }
+}
+
+

Here, you can run:

+
mill show foo[2.10].suffix
+mill show foo[2.11].suffix
+mill show foo[2.12].suffix
+mill show bar[2.10].bigSuffix
+mill show bar[2.11].bigSuffix
+mill show bar[2.12].bigSuffix
+

Cross Resolvers

+

You can define an implicit mill.define.Cross.Resolver within your cross-modules, which would let you use a shorthand foo() syntax when referring to other cross-modules with an identical set of cross values:

+
trait MyModule extends Module {
+  def crossVersion: String
+  implicit object resolver extends mill.define.Cross.Resolver[MyModule] {
+    def resolve[V <: MyModule](c: Cross[V]): V = c.itemMap(List(crossVersion))
+  }
+}
+
+object foo extends mill.Cross[FooModule]("2.10", "2.11", "2.12")
+class FooModule(val crossVersion: String) extends MyModule {
+  def suffix = T { crossVersion }
+}
+
+object bar extends mill.Cross[BarModule]("2.10", "2.11", "2.12")
+class BarModule(val crossVersion: String) extends MyModule {
+  def longSuffix = T { "_" + foo().suffix() }
+}
+
+

While the example resolver simply looks up the target Cross value for the cross-module instance with the same crossVersion, you can make the resolver arbitrarily complex. E.g. the resolver for mill.scalalib.CrossSbtModule looks for a cross-module instance whose scalaVersion is binary compatible (e.g. 2.10.5 is compatible with 2.10.3) with the current cross-module.


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


ModulesExtending Mill
\ No newline at end of file diff --git a/page/extending-mill.html b/page/extending-mill.html new file mode 100644 index 00000000..f6c7d0e7 --- /dev/null +++ b/page/extending-mill.html @@ -0,0 +1,210 @@ +Extending Mill

Extending Mill

Cross BuildsMill Internals

There are many different ways of extending Mill, depending on how much customization and flexibility you need. This page will go through your options from the easiest/least-flexible to the hardest/most-flexible.

Custom Targets & Commands

+

The simplest way of adding custom functionality to Mill is to define a custom Target or Command:

+
def foo = T { ... }
+def bar(x: Int, s: String) = T.command { ... }
+
+

These can depend on other Targets, contain arbitrary code, and be placed top-level or within any module. If you have something you just want to do that isn't covered by the built-in ScalaModules/ScalaJSModules, simply write a custom Target (for cached computations) or Command (for un-cached actions) and you're done.

+

For subprocess/filesystem operations, you can use the Ammonite-Ops library that comes bundled with Mill, or even plain java.nio/java.lang.Process. Each target gets its own T.ctx().dest folder that you can use to place files without worrying about colliding with other targets.

+

This covers use cases like:

Compile some Javascript with Webpack and put it in your runtime classpath:

+
def doWebpackStuff(sources: Seq[PathRef]): PathRef = ???
+
+def javascriptSources = T.sources { millSourcePath / "js" }
+def compiledJavascript = T { doWebpackStuff(javascriptSources()) }  
+object foo extends ScalaModule {
+  def runClasspath = T { super.runClasspath() ++ compiledJavascript() }
+}
+

Deploy your compiled assembly to AWS

+
object foo extends ScalaModule {
+
+}
+
+def deploy(assembly: PathRef, credentials: String) = ???
+
+def deployFoo(credentials: String) = T.command { deployFoo(foo.assembly()) }
+

Custom Workers

+

Custom Targets & Commands are re-computed from scratch each time; sometimes you want to keep values around in-memory when using --watch or the Build REPL. E.g. you may want to keep a webpack process running so webpack's own internal caches are hot and compilation is fast:

+
def webpackWorker = T.worker {
+  // Spawn a process using java.lang.Process and return it
+}
+
+def javascriptSources = T.sources { millSourcePath / "js" }
+
+def doWebpackStuff(webpackProcess: Process, sources: Seq[PathRef]): PathRef = ???
+
+def compiledJavascript = T { doWebpackStuff(webpackWorker(), javascriptSources()) }
+
+

Mill itself uses T.workers for its built-in Scala support: we keep the Scala compiler in memory between compilations, rather than discarding it each time, in order to improve performance.

Custom Modules

+
trait FooModule extends mill.Module {
+  def bar = T { "hello" }
+  def baz = T { "world" }
+}
+
+

Custom modules are useful if you have a common set of tasks that you want to re-used across different parts of your build. You simply define a trait inheriting from mill.Module, and then use that trait as many times as you want in various objects:

+
object foo1 extends FooModule
+object foo2 extends FooModule {
+  def qux = T { "I am Cow" }
+}  
+
+

You can also define a trait extending the built-in ScalaModule if you have common configuration you want to apply to all your ScalaModules:

+
trait FooModule extends ScalaModule {
+  def scalaVersion = "2.11.11"
+  object test extends Tests {
+    def ivyDeps = Agg(ivy"org.scalatest::scalatest:3.0.4")
+    def testFrameworks = Seq("org.scalatest.tools.Framework")
+  }
+}
+

import $file

+

If you want to define some functionality that can be used both inside and outside the build, you can create a new foo.sc file next to your build.sc, import $file.foo, and use it in your build.sc file:

+
// foo.sc
+def fooValue() = 31337 
+
+
// build.sc
+import $file.foo
+def printFoo() = T.command { println(foo.fooValue()) }
+
+

Mill's import $file syntax supports the full functionality of Ammonite Scripts

import $ivy

+

If you want to pull in artifacts from the public repositories (e.g. Maven Central) for use in your build, you can simply use import $ivy:

+
// build.sc
+import $ivy.`com.lihaoyi::scalatags:0.6.2`
+
+def generatedHtml = T {
+  import scalatags.Text.all._
+  html(
+    head(),
+    body(
+      h1("Hello"),
+      p("World")
+    )
+  ).render  
+}
+
+

This creates the generatedHtml target which can then be used however you would like: written to a file, further processed, etc.

+

If you want to publish re-usable libraries that other people can use in their builds, simply publish your code as a library to maven central.

+

For more information, see Ammonite's Ivy Dependencies documentation.

Evaluator Commands (experimental)

+

Evaluator Command are experimental and suspected to change. See issue #502 for details.

+

You can define a command that takes in the current Evaluator as an argument, which you can use to inspect the entire build, or run arbitrary tasks. For example, here is the mill.scalalib.GenIdea/idea command which uses this to traverse the module-tree and generate an Intellij project config for your build.

+
def idea(ev: Evaluator) = T.command {
+  mill.scalalib.GenIdea(
+    implicitly,
+    ev.rootModule,
+    ev.discover
+  )
+}
+
+

Many built-in tools are implemented as custom evaluator commands: all, inspect, resolve, show. If you want a way to run Mill commands and programmatically manipulate the tasks and outputs, you do so with your own evaluator command.


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Cross BuildsMill Internals
\ No newline at end of file diff --git a/page/mill-internals.html b/page/mill-internals.html new file mode 100644 index 00000000..acaccbf9 --- /dev/null +++ b/page/mill-internals.html @@ -0,0 +1,264 @@ +Mill Internals

Mill Internals

Extending MillContrib Modules

Mill Design Principles

+

A lot of Mill's design principles are intended to fix SBT's flaws, as described in the blog post What's wrong with SBT, building on the best ideas from tools like CBT and Bazel, and the ideas from my blog post Build Tools as Pure Functional Programs. Before working on Mill, read through that post to understand where it is coming from!

Dependency graph first

+

Mill's most important abstraction is the dependency graph of Tasks. Constructed using the T {...} T.task {...} T.command {...} syntax, these track the dependencies between steps of a build, so those steps can be executed in the correct order, queried, or parallelized.

+

While Mill provides helpers like ScalaModule and other things you can use to quickly instantiate a bunch of related tasks (resolve dependencies, find sources, compile, package into jar, ...) these are secondary. When Mill executes, the dependency graph is what matters: any other mode of organization (hierarchies, modules, inheritance, etc.) is only important to create this dependency graph of Tasks.

Builds are hierarchical

+

The syntax for running targets from the command line mill Foo.bar.baz is the same as referencing a target in Scala code, Foo.bar.baz

+

Everything that you can run from the command line lives in an object hierarchy in your build.sc file. Different parts of the hierarchy can have different Targets available: just add a new def foo = T {...} somewhere and you'll be able to run it.

+

Cross builds, using the Cross data structure, are just another kind of node in the object hierarchy. The only difference is syntax: from the command line you'd run something via mill core.cross[a].printIt while from code you use core.cross("a").printIt due to different restrictions in Scala/Bash syntax.

Caching by default

+

Every Target in a build, defined by def foo = T {...}, is cached by default. Currently this is done using a foo/meta.json file in the out/ folder. The Target is also provided a foo/ path on the filesystem dedicated to it, for it to store output files etc.

+

This happens whether you want it to or not. Every Target is cached, not just the "slow" ones like compile or assembly.

+

Caching is keyed on the .hashCode of the returned value. For Targets returning the contents of a file/folder on disk, they return PathRef instances whose hashcode is based on the hash of the disk contents. Serialization of the returned values is tentatively done using uPickle.

Short-lived build processes

+

The Mill build process is meant to be run over and over, not only as a long-lived daemon/console. That means we must minimize the startup time of the process, and that a new process must be able to re-construct the in-memory data structures where a previous process left off, in order to continue the build.

+

Re-construction is done via the hierarchical nature of the build: each Target foo.bar.baz has a fixed position in the build hierarchy, and thus a fixed position on disk out/foo/bar/baz/meta.json. When the old process dies and a new process starts, there will be a new instance of Target with the same implementation code and same position in the build hierarchy: this new Target can then load the out/foo/bar/baz/meta.json file and pick up where the previous process left off.

+

Minimizing startup time means aggressive caching, as well as minimizing the total amount of bytecode used: Mill's current 1-2s startup time is dominated by JVM classloading. In future, we may have a long lived console or nailgun/drip-based server/client models to speed up interactive usage, but we should always keep "cold" startup as fast as possible.

Static dependency graph and Applicative tasks

+

Tasks are Applicative, not Monadic. There is .map, .zip, but no .flatMap operation. That means that we can know the structure of the entire dependency graph before we start executing Tasks. This lets us perform all sorts of useful operations on the graph before running it:

+ +

In order to avoid making people using .map and .zip all over the place when defining their Tasks, we use the T {...}/T.task {...}/T.command {...} macros which allow you to use Task#apply() within the block to "extract" a value.

+
def test() = T.command {
+  TestRunner.apply(
+   "mill.UTestFramework",
+   runDepClasspath().map(_.path) :+ compile().path,
+   Seq(compile().path)
+  
+}
+
+

This is roughly equivalent to the following:

+
def test() = T.command { T.zipMap(runDepClasspath, compile, compile) { 
+  (runDepClasspath1, compile2, compile3) =>
+  TestRunner.apply(
+    "mill.UTestFramework",
+    runDepClasspath1.map(_.path) :+ compile2.path,
+    Seq(compile3.path)
+  )
+}
+
+

This is similar to SBT's :=/.value macros, or scala-async's async/await. Like those, the T {...} macro should let users program most of their code in a "direct" style and have it "automatically" lifted into a graph of Tasks.

How Mill aims for Simple

+

Why should you expect that the Mill build tool can achieve simple, easy & flexible, where other build tools in the past have failed?

+

Build tools inherently encompass a huge number of different concepts:

+ +

These are a lot of questions to answer, and we haven't even started talking about the actually compiling/running any code yet! If each such facet of a build was modelled separately, it's easy to have an explosion of different concepts that would make a build tool hard to understand.

+

Before you continue, take a moment to think: how would you answer to each of those questions using an existing build tool you are familiar with? Different tools like SBT, Fake, Gradle or Grunt have very different answers.

+

Mill aims to provide the answer to these questions using as few, as familiar core concepts as possible. The entire Mill build is oriented around a few concepts:

+ +

These concepts are already familiar to anyone experienced in Scala (or any other programming language...), but are enough to answer all of the complicated build-related questions listed above.

The Object Hierarchy

+

The module hierarchy is the graph of objects, starting from the root of the build.sc file, that extend mill.Module. At the leaves of the hierarchy are the Targets you can run.

+

A Target's position in the module hierarchy tells you many things. For example, a Target at position core.test.compile would:

+ +

From the position of any Target within the object hierarchy, you immediately know how to run it, find its output files, find any caches, or refer to it from other Targets. You know up-front where the Target's data "lives" on disk, and are sure that it will never clash with any other Target's data.

The Call Graph

+

The Scala call graph of "which target references which other target" is core to how Mill operates. This graph is reified via the T {...} macro to make it available to the Mill execution engine at runtime. The call graph tells you:

+ +

The call graph within your Scala code is essentially a data-flow graph: by defining a snippet of code:

+
val b = ...
+val c = ...
+val d = ...
+val a = f(b, c, d)
+
+

you are telling everyone that the value a depends on the values of b c and d, processed by f. A build tool needs exactly the same data structure: knowing what Target depends on what other Targets, and what processing it does on its inputs!

+

With Mill, you can take the Scala call graph, wrap everything in the T {...} macro, and get a Target-dependency graph that matches exactly the call-graph you already had:

+
val b = T { ... }
+val c = T { ... }
+val d = T { ... }
+val a = T { f(b(), c(), d()) }
+
+

Thus, if you are familiar with how data flows through a normal Scala program, you already know how data flows through a Mill build! The Mill build evaluation may be incremental, it may cache things, it may read and write from disk, but the fundamental syntax, and the data-flow that syntax represents, is unchanged from your normal Scala code.

Instantiating Traits & Classes

+

Classes and traits are a common way of re-using common data structures in Scala: if you have a bunch of fields which are related and you want to make multiple copies of those fields, you put them in a class/trait and instantiate it over and over.

+

In Mill, inheriting from traits is the primary way for re-using common parts of a build:

+ +

In normal Scala, you bundle up common fields & functionality into a class you can instantiate over and over, and you can override the things you want to customize. Similarly, in Mill, you bundle up common parts of a build into traits you can instantiate over and over, and you can override the things you want to customize. "Subprojects", "cross-builds", and many other concepts are reduced to simply instantiating a trait over and over, with tweaks.

Prior Work

SBT

+

Mill is built as a substitute for SBT, whose problems are described here. Nevertheless, Mill takes on some parts of SBT (builds written in Scala, Task graph with an Applicative "idiom bracket" macro) where it makes sense.

Bazel

+

Mill is largely inspired by Bazel. In particular, the single-build-hierarchy, where every Target has an on-disk-cache/output-folder according to their position in the hierarchy, comes from Bazel.

+

Bazel is a bit odd in its own right. The underlying data model is good (hierarchy + cached dependency graph) but getting there is hell. It (like SBT) is also a 3-layer interpretation model, but layers 1 & 2 are almost exactly the same: mutable python which performs global side effects (layer 3 is the same dependency-graph evaluator as SBT/mill).

+

You end up having to deal with a non-trivial python codebase where everything happens via:

+
do_something(name="blah")
+
+

or

+
do_other_thing(dependencies=["blah"])
+
+
+

where "blah" is a global identifier that is often constructed programmatically via string concatenation and passed around. This is quite challenging.

+

Having the two layers be “just python” is great since people know python, but I think unnecessary two have two layers ("evaluating macros" and "evaluating rule impls") that are almost exactly the same, and I think making them interact via return values rather than via a global namespace of programmatically-constructed strings would make it easier to follow.

+

With Mill, I’m trying to collapse Bazel’s Python layer 1 & 2 into just 1 layer of Scala, and have it define its dependency graph/hierarchy by returning values, rather than by calling global-side-effecting APIs. I've had trouble trying to teach people how-to-bazel at work, and am pretty sure we can make something that's easier to use.

Scala.Rx

+

Mill's "direct-style" applicative syntax is inspired by my old Scala.Rx project. While there are differences (Mill captures the dependency graph lexically using Macros, Scala.Rx captures it at runtime), they are pretty similar.

+

The end-goal is the same: to write code in a "direct style" and have it automatically "lifted" into a dependency graph, which you can introspect and use for incremental updates at runtime.

+

Scala.Rx is itself build upon the 2010 paper Deprecating the Observer Pattern.

CBT

+

Mill looks a lot like CBT. The inheritance based model for customizing Modules/ScalaModules comes straight from there, as does the "command line path matches Scala selector path" idea. Most other things are different though: the reified dependency graph, the execution model, the caching module all follow Bazel more than they do CBT


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Extending MillContrib Modules
\ No newline at end of file diff --git a/page/modules.html b/page/modules.html new file mode 100644 index 00000000..e708f9b6 --- /dev/null +++ b/page/modules.html @@ -0,0 +1,221 @@ +Modules

Modules

TasksCross Builds

Mill modules are objects extending mill.Module, and let you group related tasks together to keep things neat and organized. Mill's comes with built in modules such as mill.scalalib.ScalaModule and mill.scalalib.CrossSbtModule, but you can use modules for other purposes as well.

Using Modules

+

The path to a Mill module from the root of your build file corresponds to the path you would use to run tasks within that module from the command line. e.g. for the following build:

+
object foo extends mill.Module {
+  def bar = T { "hello" }
+  object baz extends mill.Module {
+    def qux = T { "world" } 
+  } 
+}
+
+

You would be able to run the two targets via mill foo.bar or mill +foo.baz.qux. You can use mill show foo.bar or mill show foo.baz.qux to make Mill echo out the string value being returned by each Target. The two targets will store their output metadata & files at ./out/foo/bar and ./out/foo/baz/qux respectively.

+

Modules also provide a way to define and re-use common collections of tasks, via Scala traits. For example, you can define your own FooModule trait:

+
trait FooModule extends mill.Module {
+  def bar = T { "hello" }
+  def baz = T { "world" }
+}
+
+

And use it to define multiple modules with the same bar and baz targets, along with any other customizations such as qux:

+
object foo1 extends FooModule
+object foo2 extends FooModule {
+  def qux = T { "I am Cow" }
+}  
+
+

This would make the following targets available from the command line

+ +

The built-in mill.scalalib package uses this to define mill.scalalib.ScalaModule, mill.scalalib.SbtModule and mill.scalalib.TestScalaModule, all of which contain a set of "standard" operations such as compile jar or assembly that you may expect from a typical Scala module.

+

When defining your own module abstractions, in general you should use traits and not classes, except in the case of Cross Builds.

Overriding Targets

+
trait BaseModule extends Module {
+  def foo = T { Seq("base") }
+  def cmd(i: Int) = T.command { Seq("base" + i) }
+}
+
+object canOverrideSuper with BaseModule {
+  def foo = T { super.foo() ++ Seq("object") }
+  def cmd(i: Int) = T.command { super.cmd(i)() ++ Seq("object" + i) }
+}
+
+

You can override targets and commands to customize them or change what they do. The overriden version is available via super. You can omit the override keyword in Mill builds.

millSourcePath

+

Each Module has a millSourcePath field that corresponds to the path that module expects its input files to be on disk. Re-visiting our examples above:

+
object foo extends mill.Module {
+  def bar = T { "hello" }
+  object baz extends mill.Module {
+    def qux = T { "world" } 
+  } 
+}
+
+

The foo module has a millSourcePath of ./foo, while the foo.baz module has a millSourcePath of ./foo/baz.

+

You can use millSourcePath to automatically set the source folders of your modules to match the build structure. You are not forced to rigidly use millSourcePath to define the source folders of all your code, but it can simplify the common case where you probably want your build-layout and on-disk-layout to be the same.

+

E.g. for mill.scalalib.ScalaModule, the Scala source code is assumed by default to be in millSourcePath/"src" while resources are automatically assumed to be in millSourcePath/"resources".

+

You can override millSourcePath:

+
object foo extends mill.Module {
+  def millSourcePath = super.millSourcePath / "lols"
+  def bar = T { "hello" }
+  object baz extends mill.Module {
+    def qux = T { "world" } 
+  } 
+}
+
+

And any overrides propagate down to the module's children: in the above example, module foo would have its millSourcePath be ./foo/lols while modulefoo.baz would have its millSourcePath be ./foo/lols/baz.

+

Note that millSourcePath is generally only used for a module's input source files. Output is always in the out/ folder and cannot be changed, e.g. even with the overriden millSourcePath the output paths are still the default ./out/foo/bar and ./out/foo/baz/qux folders.

External Modules

+

Libraries for use in Mill can define ExternalModules: Modules which are shared between all builds which use that library:

+
package foo
+import mill._
+
+object Bar extends mill.define.ExternalModule {
+  def baz = T { 1 }
+  def qux() = T.command { println(baz() + 1) }
+
+  lazy val millDiscover = mill.define.Discover[this.type]
+}
+
+

In the above example, foo.Bar is an ExternalModule living within the foo Java package, containing the baz target and qux command. Those can be run from the command line via:

+
mill foo.Bar/baz
+mill foo.Bar/qux
+
+

ExternalModules are useful for someone providing a library for use with Mill that is shared by the entire build: for example, mill.scalalib.ZincWorkerApi/zincWorker provides a shared Scala compilation service & cache that is shared between all ScalaModules, and mill.scalalib.GenIdea/idea lets you generate IntelliJ projects without needing to define your own T.command in your build.sc file

Foreign Modules

+

Mill can load other mill projects from external (or sub) folders, using Ammonite's $file magic import, allowing to depend on foreign modules. This allows, for instance, to depend on other projects' sources, or split your build logic into smaller files.

+

For instance, assuming the following stucture :

+
foo/
+    build.sc
+    bar/
+        build.sc 
+baz/
+    build.sc     
+
+

you can write the following in foo/build.sc :

+

+import $file.bar.build
+import $file.^.baz.build
+import mill._ 
+
+def someFoo = T {
+
+    ^.baz.build.someBaz(...)
+    bar.build.someBar(...) 
+    ...
+}
+
+

The output of the foreign tasks will be cached under foo/out/foreign-modules/.


About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


TasksCross Builds
\ No newline at end of file diff --git a/page/tasks.html b/page/tasks.html new file mode 100644 index 00000000..864870f7 --- /dev/null +++ b/page/tasks.html @@ -0,0 +1,324 @@ +Tasks

Tasks

Common Project LayoutsModules

One of Mill's core abstractions is its Task Graph: this is how Mill defines, orders and caches work it needs to do, and exists independently of any support for building Scala.

+

The following is a simple self-contained example using Mill to compile Java:

+
, mill._
+
+// sourceRoot -> allSources -> classFiles
+//                                |
+//                                v
+//           resourceRoot ---->  jar
+
+def sourceRoot = T.sources { os.pwd / 'src }
+
+def resourceRoot = T.sources { os.pwd / 'resources }
+
+def allSources = T { sourceRoot().flatMap(p => os.walk(p.path)).map(PathRef(_)) }
+
+def classFiles = T { 
+  os.makeDir.all(T.ctx().dest)
+  
+  %("javac", allSources().map(_.path.toString()), "-d", T.ctx().dest)(wd = T.ctx().dest)
+  PathRef(T.ctx().dest) 
+}
+
+def jar = T { Jvm.createJar(Loose.Agg(classFiles().path) ++ resourceRoot().map(_.path)) }
+
+def run(mainClsName: String) = T.command {
+  os.proc('java, "-cp", classFiles().path, mainClsName).call()
+}
+
+

Here, we have two T.sourcess, sourceRoot and resourceRoot, which act as the roots of our task graph. allSources depends on sourceRoot by calling sourceRoot() to extract its value, classFiles depends on allSources the same way, and jar depends on both classFiles and resourceRoot.

+

Filesystem operations in Mill are done using the Ammonite-Ops library.

+

The above build defines the following task graph:

+
sourceRoot -> allSources -> classFiles
+                               |
+                               v
+          resourceRoot ---->  jar
+
+

When you first evaluate jar (e.g. via mill jar at the command line), it will evaluate all the defined targets: sourceRoot, allSources, classFiles, resourceRoot and jar.

+

Subsequent mill jars will evaluate only as much as is necessary, depending on what input sources changed:

+ +

Apart from the foo() call-sites which define what each targets depend on, the code within each T {...} wrapper is arbitrary Scala code that can compute an arbitrary result from its inputs.

Different Kinds of Tasks

+

There are three primary kinds of Tasks that you should care about:

+

Targets

+
def allSources = T { os.walk(sourceRoot().path).map(PathRef(_)) }
+
+

Targets are defined using the def foo = T {...} syntax, and dependencies on other targets are defined using foo() to extract the value from them. Apart from the foo() calls, the T {...} block contains arbitrary code that does some work and returns a result.

+

Each target, e.g. classFiles, is assigned a path on disk as scratch space & to store its output files at out/classFiles/dest/, and its returned metadata is automatically JSON-serialized and stored at out/classFiles/meta.json. The return-value of targets has to be JSON-serializable via uPickle.

+

In case you want return your own case class (e.g. MyCaseClass), you can make it JSON-serializable by adding the following implicit def to its companion object:

+
object MyCaseClass {
+  implicit def rw: upickle.default.ReadWriter[MyCaseClass] = upickle.default.macroRW
+}
+
+

If you want to return a file or a set of files as the result of a Target, write them to disk within your T.ctx().dest available through the Task Context API and return a PathRef to the files you wrote.

+

If a target's inputs change but its output does not, e.g. someone changes a comment within the source files that doesn't affect the classfiles, then downstream targets do not re-evaluate. This is determined using the .hashCode of the Target's return value. For targets returning ammonite.ops.Paths that reference files on disk, you can wrap the Path in a PathRef (shown above) whose .hashCode() will include the hashes of all files on disk at time of creation.

+

The graph of inter-dependent targets is evaluated in topological order; that means that the body of a target will not even begin to evaluate if one of its upstream dependencies has failed. This is unlike normal Scala functions: a plain old function foo would evaluate halfway and then blow up if one of foo's dependencies throws an exception.

+

Targets cannot take parameters and must be 0-argument defs defined directly within a Module body.

Sources

+
def sourceRootPath = os.pwd / 'src
+
+def sourceRoots = T.sources { sourceRootPath }
+
+

Sources are defined using T.sources { ... }, taking one-or-more ammonite.ops.Paths as arguments. A Source is a subclass of Target[Seq[PathRef]]: this means that its build signature/hashCode depends not just on the path it refers to (e.g. foo/bar/baz) but also the MD5 hash of the filesystem tree under that path.

+

T.sources also has an overload which takes Seq[PathRef], to let you override-and-extend source lists the same way you would any other T {...} definition:

+
def additionalSources = T.sources { os.pwd / 'additionalSources }
+def sourceRoots = T.sources { super.sourceRoots() ++ additionalSources() }
+

Commands

+
def run(mainClsName: String) = T.command {
+  os.proc('java, "-cp", classFiles().path, mainClsName).call()
+}
+
+

Defined using T.command { ... } syntax, Commands can run arbitrary code, with dependencies declared using the same foo() syntax (e.g. classFiles() above). Commands can be parametrized, but their output is not cached, so they will re-evaluate every time even if none of their inputs have changed.

+

Like Targets, a command only evaluates after all its upstream dependencies have completed, and will not begin to run if any upstream dependency has failed.

+

Commands are assigned the same scratch/output folder out/run/dest/ as Targets are, and its returned metadata stored at the same out/run/meta.json path for consumption by external tools.

+

Commands can only be defined directly within a Module body.

Task Context API

+

There are several APIs available to you within the body of a T {...} or T.command {...} block to help your write the code implementing your Target or Command:

mill.api.Ctx.Dest

+ +

This is the unique out/classFiles/dest/ path or out/run/dest/ path that is assigned to every Target or Command. It is cleared before your task runs, and you can use it as a scratch space for temporary files or a place to put returned artifacts. This is guaranteed to be unique for every Target or Command, so you can be sure that you will not collide or interfere with anyone else writing to those same paths.

mill.api.Ctx.Log

+ +

This is the default logger provided for every task. While your task is running, System.out and System.in are also redirected to this logger. The logs for a task are streamed to standard out/error as you would expect, but each task's specific output is also streamed to a log file on disk, e.g. out/run/log or out/classFiles/log for you to inspect later.

+

Messages logged with log.debug appear by default only in the log files. You can use the --debug option when running mill to show them on the console too.

mill.api.Ctx.Env

+ +

Mill keeps a long-lived JVM server to avoid paying the cost of recurrent classloading. Because of this, running System.getenv in a task might not yield up to date environment variables, since it will be initialised when the server starts, rather than when the client executes. To circumvent this, mill's client sends the environment variables to the server as it sees them, and the server makes them available as a Map[String, String] via the Ctx API.

+

If the intent is to always pull the latest environment values, the call should be wrapped in an Input as such :

+
def envVar = T.input { T.ctx().env.get("ENV_VAR") }
+

Other Tasks

+

Anonymous Tasks

+
def foo(x: Int) = T.task { ... x ... bar() ... }
+
+

You can define anonymous tasks using the T.task { ... } syntax. These are not runnable from the command-line, but can be used to share common code you find yourself repeating in Targets and Commands.

+
def downstreamTarget = T { ... foo() ... } 
+def downstreamCommand = T.command { ... foo() ... } 
+
+

Anonymous task's output does not need to be JSON-serializable, their output is not cached, and they can be defined with or without arguments. Unlike Targets or Commands, anonymous tasks can be defined anywhere and passed around any way you want, until you finally make use of them within a downstream target or command.

+

While an anonymous task foo's own output is not cached, if it is used in a downstream target bar and the upstream targets baz qux haven't changed, bar's cached output will be used and foo's evaluation will be skipped altogether.

Persistent Targets

+
def foo = T.persistent { ... }
+
+

Identical to Targets, except that the dest/ folder is not cleared in between runs.

+

This is useful if you are running external incremental-compilers, such as Scala's Zinc, Javascript's WebPack, which rely on filesystem caches to speed up incremental execution of their particular build step.

+

Since Mill no longer forces a "clean slate" re-evaluation of T.persistent targets, it is up to you to ensure your code (or the third-party incremental compilers you rely on!) are deterministic. They should always converge to the same outputs for a given set of inputs, regardless of what builds and what filesystem states existed before.

Inputs

+
def foo = T.input { ... }
+
+

A generalization of Sources, T.inputs are tasks that re-evaluate every time (unlike Anonymous Tasks), containing an arbitrary block of code.

+

Inputs can be used to force re-evaluation of some external property that may affect your build. For example, if I have a Target bar that makes use of the current git version:

+
def bar = T { ... os.proc("git", "rev-parse", "HEAD").call().out.string ... }
+
+

bar will not know that git rev-parse can change, and will not know to re-evaluate when your git rev-parse HEAD does change. This means bar will continue to use any previously cached value, and bar's output will be out of date!

+

To fix this, you can wrap your git rev-parse HEAD in a T.input:

+
def foo = T.input { os.proc("git", "rev-parse", "HEAD").call().out.string }
+def bar = T { ... foo() ... }
+
+

This makes foo will always re-evaluate every build; if git rev-parse HEAD does not change, that will not invalidate bar's caches. But if git rev-parse +HEAD does change, foo's output will change and bar will be correctly invalidated and re-compute using the new version of foo.

+

Note that because T.inputs re-evaluate every time, you should ensure that the code you put in T.input runs quickly. Ideally it should just be a simple check "did anything change?" and any heavy-lifting can be delegated to downstream targets.

Workers

+
def foo = T.worker { ... }
+
+

Most tasks dispose of their in-memory return-value every evaluation; in the case of Targets, this is stored on disk and loaded next time if necessary, while Commands just re-compute them each time. Even if you use --watch or the Build REPL to keep the Mill process running, all this state is still discarded and re-built every evaluation.

+

Workers are unique in that they store their in-memory return-value between evaluations. This makes them useful for storing in-memory caches or references to long-lived external worker processes that you can re-use.

+

Mill uses workers to manage long-lived instances of the Zinc Incremental Scala Compiler and the Scala.js Optimizer. This lets us keep them in-memory with warm caches and fast incremental execution.

+

Like Persistent Targets, Workers inherently involve mutable state, and it is up to the implementation to ensure that this mutable state is only used for caching/performance and does not affect the externally-visible behavior of the worker.

Cheat Sheet

+

The following table might help you make sense of the small collection of different Task types:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Target Command Source/Input Anonymous Task Persistent Target Worker
Cached on Disk X X X
Must be JSON Writable X X X
Must be JSON Readable X X
Runnable from the Command Line X X X
Can Take Arguments X X
Cached between Evaluations X

About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Common Project LayoutsModules
\ No newline at end of file diff --git a/page/thirdparty-modules.html b/page/thirdparty-modules.html new file mode 100644 index 00000000..52c8839f --- /dev/null +++ b/page/thirdparty-modules.html @@ -0,0 +1,351 @@ +Thirdparty Modules

Thirdparty Modules

Contrib Modules

The modules (aka plugins) in this section are developed/maintained outside the mill git tree.

+

Besides the documentation provided here, we urge you to consult the respective linked plugin documentation pages. The usage examples given here are most probably incomplete and sometimes outdated.

+

If you develop or maintain a mill plugin, please create a pull request to get your plugin listed here.

AspectJ

+

AspectJ compiler support for mill.

+

Project home: https://github.com/lefou/mill-aspectj

Quickstart

+
import mill._
+import mill.scalalib._
+import mill.define._
+
+// Load the plugin from Maven Central via ivy/coursier
+import $ivy.`de.tototec::de.tobiasroeser.mill.aspectj:0.1.0`, de.tobiasroeser.mill.aspectj._
+
+object main extends AspectjModule {
+
+  // Select the AspectJ version
+  def aspectjVersion = T{ "{aspectjVersion}" }
+
+  // Set AspectJ options, e.g. the language level and annotation processor
+  // Run `mill main.ajcHelp` to get a list of supported options
+  def ajcOptions = Seq("-8", "-proc:none")
+
+}
+

Configuration

+

Your module needs to extend de.tobiasroeser.mill.aspectj.AspectjModule which itself extends mill.scalalib.JavaModule.

+

The module trait de.tobiasroeser.mill.aspectj.AspectjModule has various configuration options (over those from mill.scalalib.JavaModule).

+

The most essential targets are:

+ +

For a complete list of configuration options and more documentation, please refer to the project home page.

Bash Completion

+

Limited bash completion support.

+

Project home: https://github.com/lefou/mill-bash-completion

DGraph

+

Show transitive dependencies of your build in your browser.

+

Project home: https://github.com/ajrnz/mill-dgraph

Quickstart

+
import $ivy.`com.github.ajrnz::mill-dgraph:0.2.0`
+
+
sh> mill plugin.dgraph.browseDeps(proj)()
+

Ensime

+

Create an .ensime file for your build.

+

Project home: https://github.com/yyadavalli/mill-ensime

Quickstart

+
import $ivy.`fun.valycorp::mill-ensime:0.0.1`
+
+
sh> mill fun.valycorp.mill.GenEnsime/ensimeConfig
+

Integration Testing Mill Plugins

+

Integration testing for mill plugins.

Quickstart

+

We assume, you have a mill plugin named mill-demo

+
// build.sc
+import mill._, mill.scalalib._
+object demo extends ScalaModule with PublishModule {
+  // ...
+}
+
+

Add an new test sub-project, e.g. it.

+
import $ivy.`de.tototec::de.tobiasroeser.mill.integrationtest:0.1.0`
+import de.tobiasroeser.mill.integrationtest._
+
+object it extends MillIntegrationTest {
+
+  def millTestVersion = "{exampleMillVersion}"
+
+  def pluginsUnderTest = Seq(demo)
+
+}
+
+

Your project should now look similar to this:

+
.
++-- demo/
+|   +-- src/
+|
++-- it/
+    +-- src/
+        +-- 01-first-test/
+        |   +-- build.sc
+        |   +-- src/
+        |
+        +-- 02-second-test/
+            +-- build.sc
+
+

As the buildfiles build.sc in your test cases typically want to access the locally built plugin(s), the plugins publishes all plugins referenced under pluginsUnderTest to a temporary ivy repository, just before the test is executed. The mill version used in the integration test then used that temporary ivy repository.

+

Instead of referring to your plugin with import $ivy.'your::plugin:version', you can use the following line instead, which ensures you will use the correct locally build plugins.

+
// build.sc
+import $exec.plugins
+
+

Effectively, at execution time, this line gets replaced by the content of plugins.sc, a file which was generated just before the test started to execute.

Configuration and Targets

+

The mill-integrationtest plugin provides the following targets.

Mandatory configuration

+

Optional configuration

+

Commands

+

JBake

+

Create static sites/blogs with JBake.

+

Plugin home: https://github.com/lefou/mill-jbake

+

JBake home: https://jbake.org

Quickstart

+
// build.sc
+import mill._
+import $ivy.`de.tototec::de.tobiasroeser.mill.jbake:0.1.0`
+import de.tobiasroeser.mill.jbake._
+
+object site extends JBakeModule {
+
+  def jbakeVersion = "2.6.4"
+
+}
+
+

Generate the site:

+
bash> mill site.jbake
+
+

Start a local Web-Server on Port 8820 with the generated site:

+
bash> mill site.jbakeServe
+

JBuildInfo

+

This is a mill module similar to BuildInfo but for Java. It will generate a Java class containing information from your build.

+

Project home: https://github.com/carueda/mill-jbuildinfo

+

To declare a module that uses this plugin, extend the com.github.carueda.mill.JBuildInfo trait and provide the desired information via the buildInfoMembers method:

+
// build.sc
+import $ivy.`com.github.carueda::jbuildinfo:0.1.2`
+import com.github.carueda.mill.JBuildInfo
+import mill.T
+
+object project extends JBuildInfo {
+  def buildInfoMembers: T[Map[String, String]] = T {
+    Map(
+      "name" -> "some name",
+      "version" -> "x.y.z"
+    )
+  }
+}
+
+

This will generate:

+
// BuildInfo.java
+public class BuildInfo {
+  public static final String getName() { return "some name"; }
+  public static final String getVersion() { return "x.y.z"; }
+}
+

Configuration options

+

Mill Wrapper Scripts

+

Small script to automatically fetch and execute mill build tool.

+

Project home: https://github.com/lefou/millw

How it works

+

millw is a small wrapper script around mill and works almost identical to mill. It automatically downloads a mill release into $HOME/.mill/download.

+

The mill version to be used will be determined by the following steps. The search ends, after the first step that results in a version.

+ +

+ sh $ mill --mill-version 0.3.6 --disable-ticker version + 0.3.6 +

+ +

+ sh $ echo -n "0.3.6" > .mill-version + sh $ mill --disable-ticker version + 0.3.6 +

+

The values of the DEFAULT_MILL_VERSION variable inside the script will be used.

Use cases

As mill executable

+

Istead of installing mill, you can just place the script into you local $HOME/bin directory and rename it to mill.

+

If you need a special mill version in a project directory, just place a .mill-version file with the best mill version. Example: setting mill 0.3.6 as best local mill version

+
sh $ echo -n "0.3.6" > .mill-version
+

As a wrapper script in your project

+

To make the start for others easier or to always have the correct mill version in your CI environment, you can just place a copy of the script as millw in your project root directory.

+

You should change the DEFAULT_MILL_VERSION variable in that script to the correct version you want to use and add the file under version control.

OSGi

+

Produce OSGi Bundles with mill.

+

Project home: https://github.com/lefou/mill-osgi

Quickstart

+
import mill._, mill.scalalib._
+import $ivy.`de.tototec::de.tobiasroeser.mill.osgi:0.0.5`
+import de.tobiasroeser.mill.osgi._
+
+object project extends ScalaModule with OsgiBundleModule {
+
+  def bundleSymbolicName = "com.example.project"
+
+  def osgiHeaders = T{ super.osgiHeaders().copy(
+    `Export-Package`   = Seq("com.example.api"),
+    `Bundle-Activator` = Some("com.example.internal.Activator")
+  )}
+
+  // other settings ...
+
+}
+

PublishM2

+

Mill plugin to publish artifacts into a local Maven repository.

+

Project home: https://github.com/lefou/mill-publishM2

Quickstart

+

Just mix-in the PublishM2Module into your project. PublishM2Module already extends mill's built-in PublishModule.

+

File: build.sc

+
import mill._, scalalib._, publish._
+
+import $ivy.`de.tototec::de.tobiasroeser.mill.publishM2:0.0.1`
+import de.tobiasroeser.mill.publishM2._
+
+object project extends PublishModule with PublishM2Module {
+  // ...
+}
+
+

Publishing to default local Maven repository

+
> mill project.publishM2Local
+[40/40] project.publishM2Local
+Publishing to /home/user/.m2/repository
+
+

Publishing to custom local Maven repository

+
> mill project.publishM2Local /tmp/m2repo
+[40/40] project.publishM2Local
+Publishing to /tmp/m2repo
+

About the Author: Haoyi is a software engineer, an early contributor to Scala.js, and the author of many open-source Scala tools such as Mill, the Ammonite REPL and FastParse.

If you've enjoy using Mill, or enjoyed using Haoyi's other open source libraries, please chip in (or get your Company to chip in!) via Patreon so he can continue his open-source work


Contrib Modules
\ No newline at end of file -- cgit v1.2.3