summaryrefslogtreecommitdiff
path: root/scalalib/src/main/scala
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2017-12-30 19:01:03 -0800
committerLi Haoyi <haoyi.sg@gmail.com>2017-12-30 20:35:31 -0800
commit356dca0f92931b07e1a80013aefb025b6a7d7d42 (patch)
tree6517cbd15943361cbd896e64a7007c058f20281d /scalalib/src/main/scala
parente84eff79f6f23b9a6518c74ba137ab4ce1347929 (diff)
downloadmill-356dca0f92931b07e1a80013aefb025b6a7d7d42.tar.gz
mill-356dca0f92931b07e1a80013aefb025b6a7d7d42.tar.bz2
mill-356dca0f92931b07e1a80013aefb025b6a7d7d42.zip
`Core` -> `core`, for consistency with SBT naming schemes
`ScalaPlugin` -> `scalalib`, to avoid confusion with Scala compiler plugins `ScalaModule` -> `module`, to be used via `scalalib.Module`, avoid unnecessary duplication in th name prefix `plugin` -> `moduledefs`, to more accurately describe what it does (since it includes `Cacher` as well)
Diffstat (limited to 'scalalib/src/main/scala')
-rw-r--r--scalalib/src/main/scala/mill/scalalib/Dep.scala35
-rw-r--r--scalalib/src/main/scala/mill/scalalib/GenIdea.scala186
-rw-r--r--scalalib/src/main/scala/mill/scalalib/Lib.scala226
-rw-r--r--scalalib/src/main/scala/mill/scalalib/Main.scala7
-rw-r--r--scalalib/src/main/scala/mill/scalalib/Module.scala356
-rw-r--r--scalalib/src/main/scala/mill/scalalib/TestRunner.scala170
-rw-r--r--scalalib/src/main/scala/mill/scalalib/package.scala5
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/Ivy.scala53
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/JsonFormatters.scala11
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/LocalPublisher.scala33
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/Pom.scala88
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/SonatypeHttpApi.scala130
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/SonatypePublisher.scala148
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/package.scala3
-rw-r--r--scalalib/src/main/scala/mill/scalalib/publish/settings.scala70
15 files changed, 1521 insertions, 0 deletions
diff --git a/scalalib/src/main/scala/mill/scalalib/Dep.scala b/scalalib/src/main/scala/mill/scalalib/Dep.scala
new file mode 100644
index 00000000..3d4c43ac
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/Dep.scala
@@ -0,0 +1,35 @@
+package mill.scalalib
+import mill.util.JsonFormatters._
+import upickle.default.{macroRW, ReadWriter => RW}
+sealed trait Dep
+object Dep{
+ def apply(org: String, name: String, version: String): Dep = {
+ this(coursier.Dependency(coursier.Module(org, name), version))
+ }
+ case class Java(dep: coursier.Dependency) extends Dep
+ object Java{
+ implicit def rw: RW[Java] = macroRW
+ def apply(org: String, name: String, version: String): Dep = {
+ Java(coursier.Dependency(coursier.Module(org, name), version))
+ }
+ }
+ implicit def default(dep: coursier.Dependency): Dep = new Java(dep)
+ def apply(dep: coursier.Dependency) = Scala(dep)
+ case class Scala(dep: coursier.Dependency) extends Dep
+ object Scala{
+ implicit def rw: RW[Scala] = macroRW
+ def apply(org: String, name: String, version: String): Dep = {
+ Scala(coursier.Dependency(coursier.Module(org, name), version))
+ }
+ }
+ case class Point(dep: coursier.Dependency) extends Dep
+ object Point{
+ implicit def rw: RW[Point] = macroRW
+ def apply(org: String, name: String, version: String): Dep = {
+ Point(coursier.Dependency(coursier.Module(org, name), version))
+ }
+ }
+ implicit def rw = RW.merge[Dep](
+ Java.rw, Scala.rw, Point.rw
+ )
+} \ No newline at end of file
diff --git a/scalalib/src/main/scala/mill/scalalib/GenIdea.scala b/scalalib/src/main/scala/mill/scalalib/GenIdea.scala
new file mode 100644
index 00000000..6fd5031a
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/GenIdea.scala
@@ -0,0 +1,186 @@
+package mill.scalalib
+
+import ammonite.ops._
+import mill.define.Target
+import mill.discover.Mirror.Segment
+import mill.discover.{Discovered, Mirror}
+import mill.eval.{Evaluator, PathRef}
+import mill.util.Ctx.LoaderCtx
+import mill.util.{OSet, PrintLogger}
+
+object GenIdea {
+
+ def apply()(implicit ctx: LoaderCtx): Unit = {
+ val mapping = ctx.load(mill.discover.Discovered.Mapping)
+ val pp = new scala.xml.PrettyPrinter(999, 4)
+ rm! pwd/".idea"
+ rm! pwd/".idea_modules"
+
+ val workspacePath = pwd / 'out
+
+ val evaluator = new Evaluator(workspacePath, mapping, new PrintLogger(true))
+
+ for((relPath, xml) <- xmlFileLayout(evaluator)){
+ write.over(pwd/relPath, pp.format(xml))
+ }
+ }
+
+ def xmlFileLayout[T](evaluator: Evaluator[T]): Seq[(RelPath, scala.xml.Node)] = {
+
+ val modules = Mirror
+ .traverse(evaluator.mapping.base, evaluator.mapping.mirror){ (h, p) =>
+ h.node(evaluator.mapping.base, p.reverse.map{case Mirror.Segment.Cross(vs) => vs.toList case _ => Nil}.toList) match {
+ case m: Module => Seq(p -> m)
+ case _ => Nil
+ }
+ }
+ .map{case (p, v) => (p.reverse, v)}
+
+ val resolved = for((path, mod) <- modules) yield {
+ val Seq(resolvedCp: Seq[PathRef], resolvedSrcs: Seq[PathRef]) =
+ evaluator.evaluate(OSet(mod.externalCompileDepClasspath, mod.externalCompileDepSources))
+ .values
+
+ (path, resolvedCp.map(_.path).filter(_.ext == "jar") ++ resolvedSrcs.map(_.path), mod)
+ }
+ val moduleLabels = modules.map(_.swap).toMap
+
+ val fixedFiles = Seq(
+ Tuple2(".idea"/"misc.xml", miscXmlTemplate()),
+ Tuple2(
+ ".idea"/"modules.xml",
+ allModulesXmlTemplate(
+ for((path, mod) <- modules)
+ yield moduleName(path)
+ )
+ ),
+ Tuple2(".idea_modules"/"root.iml", rootXmlTemplate())
+ )
+
+ val allResolved = resolved.flatMap(_._2).distinct
+ val minResolvedLength = allResolved.map(_.segments.length).min
+ val commonPrefix = allResolved.map(_.segments.take(minResolvedLength))
+ .transpose
+ .takeWhile(_.distinct.length == 1)
+ .length
+
+ val pathToLibName = allResolved
+ .map{p => (p, p.segments.drop(commonPrefix).mkString("_"))}
+ .toMap
+
+ val libraries = allResolved.map{path =>
+ val url = "jar://" + path + "!/"
+ val name = pathToLibName(path)
+ Tuple2(".idea"/'libraries/s"$name.xml", libraryXmlTemplate(name, url))
+ }
+
+ val moduleFiles = resolved.map{ case (path, resolvedDeps, mod) =>
+ val Seq(sourcePath: PathRef) =
+ evaluator.evaluate(OSet(mod.sources)).values
+
+ val paths = Evaluator.resolveDestPaths(
+ evaluator.workspacePath,
+ evaluator.mapping.modules(mod.compile)
+ )
+
+ val elem = moduleXmlTemplate(
+ sourcePath.path,
+ Seq(paths.base),
+ resolvedDeps.map(pathToLibName),
+ for(m <- mod.projectDeps)
+ yield moduleName(moduleLabels(m))
+ )
+ Tuple2(".idea_modules"/s"${moduleName(path)}.iml", elem)
+ }
+ fixedFiles ++ libraries ++ moduleFiles
+ }
+
+
+ def relify(p: Path) = {
+ val r = p.relativeTo(pwd/".idea_modules")
+ (Seq.fill(r.ups)("..") ++ r.segments).mkString("/")
+ }
+
+ def moduleName(p: Seq[Mirror.Segment]) = p.foldLeft(StringBuilder.newBuilder) {
+ case (sb, Segment.Label(s)) if sb.isEmpty => sb.append(s)
+ case (sb, Segment.Cross(s)) if sb.isEmpty => sb.append(s.mkString("-"))
+ case (sb, Segment.Label(s)) => sb.append(".").append(s)
+ case (sb, Segment.Cross(s)) => sb.append("-").append(s.mkString("-"))
+ }.mkString.toLowerCase()
+
+ def miscXmlTemplate() = {
+ <project version="4">
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8 (1)" project-jdk-type="JavaSDK">
+ <output url="file://$PROJECT_DIR$/target/idea_output"/>
+ </component>
+ </project>
+ }
+
+ def allModulesXmlTemplate(selectors: Seq[String]) = {
+ <project version="4">
+ <component name="ProjectModuleManager">
+ <modules>
+ <module fileurl="file://$PROJECT_DIR$/.idea_modules/root.iml" filepath="$PROJECT_DIR$/.idea_modules/root.iml" />
+ {
+ for(selector <- selectors)
+ yield {
+ val filepath = "$PROJECT_DIR$/.idea_modules/" + selector + ".iml"
+ val fileurl = "file://" + filepath
+ <module fileurl={fileurl} filepath={filepath} />
+ }
+ }
+ </modules>
+ </component>
+ </project>
+ }
+ def rootXmlTemplate() = {
+ <module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager">
+ <output url="file://$MODULE_DIR$/../out"/>
+ <content url="file://$MODULE_DIR$/.." />
+ <exclude-output/>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+ </module>
+ }
+ def libraryXmlTemplate(name: String, url: String) = {
+ <component name="libraryTable">
+ <library name={name} type={if(name.contains("org_scala-lang_scala-library_")) "Scala" else null}>
+ <CLASSES>
+ <root url={url}/>
+ </CLASSES>
+ </library>
+ </component>
+ }
+ def moduleXmlTemplate(sourcePath: Path,
+ outputPaths: Seq[Path],
+ libNames: Seq[String],
+ depNames: Seq[String]) = {
+ <module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager">
+ {
+ for(outputPath <- outputPaths)
+ yield <output url={"file://$MODULE_DIR$/" + relify(outputPath)} />
+ }
+
+ <exclude-output />
+ <content url={"file://$MODULE_DIR$/" + relify(sourcePath)}>
+ <sourceFolder url={"file://$MODULE_DIR$/" + relify(sourcePath)} isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+
+ {
+ for(name <- libNames)
+ yield <orderEntry type="library" name={name} level="project" />
+
+ }
+ {
+ for(depName <- depNames)
+ yield <orderEntry type="module" module-name={depName} exported="" />
+ }
+ </component>
+ </module>
+ }
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/Lib.scala b/scalalib/src/main/scala/mill/scalalib/Lib.scala
new file mode 100644
index 00000000..55c28a06
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/Lib.scala
@@ -0,0 +1,226 @@
+package mill
+package scalalib
+
+import java.io.File
+import java.net.URLClassLoader
+import java.util.Optional
+
+import ammonite.ops._
+import coursier.{Cache, Fetch, MavenRepository, Repository, Resolution, Module => CoursierModule}
+import mill.define.Worker
+import mill.eval.{PathRef, Result}
+import mill.util.Ctx
+import sbt.internal.inc._
+import sbt.internal.util.{ConsoleOut, MainAppender}
+import sbt.util.LogExchange
+import xsbti.compile.{CompilerCache => _, FileAnalysisStore => _, ScalaInstance => _, _}
+
+object CompilationResult {
+ implicit val jsonFormatter: upickle.default.ReadWriter[CompilationResult] = upickle.default.macroRW
+}
+
+// analysisFile is represented by Path, so we won't break caches after file changes
+case class CompilationResult(analysisFile: Path, classes: PathRef)
+
+object ZincWorker extends Worker[ZincWorker]{
+ def make() = new ZincWorker
+}
+class ZincWorker{
+ @volatile var scalaClassloaderCache = Option.empty[(Long, ClassLoader)]
+ @volatile var scalaInstanceCache = Option.empty[(Long, ScalaInstance)]
+}
+object Lib{
+ case class MockedLookup(am: File => Optional[CompileAnalysis]) extends PerClasspathEntryLookup {
+ override def analysis(classpathEntry: File): Optional[CompileAnalysis] =
+ am(classpathEntry)
+
+ override def definesClass(classpathEntry: File): DefinesClass =
+ Locate.definesClass(classpathEntry)
+ }
+
+ def grepJar(classPath: Seq[Path], s: String) = {
+ classPath
+ .find(_.toString.endsWith(s))
+ .getOrElse(throw new Exception("Cannot find " + s))
+ .toIO
+ }
+
+ def compileScala(zincWorker: ZincWorker,
+ scalaVersion: String,
+ sources: Seq[Path],
+ compileClasspath: Seq[Path],
+ compilerClasspath: Seq[Path],
+ pluginClasspath: Seq[Path],
+ compilerBridge: Path,
+ scalacOptions: Seq[String],
+ scalacPluginClasspath: Seq[Path],
+ javacOptions: Seq[String],
+ upstreamCompileOutput: Seq[CompilationResult])
+ (implicit ctx: Ctx): CompilationResult = {
+ val compileClasspathFiles = compileClasspath.map(_.toIO).toArray
+
+ val compilerJars = compilerClasspath.toArray.map(_.toIO)
+ val pluginJars = pluginClasspath.toArray.map(_.toIO)
+
+ val compilerClassloaderSig = compilerClasspath.map(p => p.toString().hashCode + p.mtime.toMillis).sum
+ val scalaInstanceSig =
+ compilerClassloaderSig + pluginClasspath.map(p => p.toString().hashCode + p.mtime.toMillis).sum
+
+ val compilerClassLoader = zincWorker.scalaClassloaderCache match{
+ case Some((k, v)) if k == compilerClassloaderSig => v
+ case _ =>
+ val classloader = new URLClassLoader(compilerJars.map(_.toURI.toURL), null)
+ zincWorker.scalaClassloaderCache = Some((compilerClassloaderSig, classloader))
+ classloader
+ }
+
+ val scalaInstance = zincWorker.scalaInstanceCache match{
+ case Some((k, v)) if k == scalaInstanceSig => v
+ case _ =>
+ val scalaInstance = new ScalaInstance(
+ version = scalaVersion,
+ loader = new URLClassLoader(pluginJars.map(_.toURI.toURL), compilerClassLoader),
+ libraryJar = grepJar(compilerClasspath, s"scala-library-$scalaVersion.jar"),
+ compilerJar = grepJar(compilerClasspath, s"scala-compiler-$scalaVersion.jar"),
+ allJars = compilerJars ++ pluginJars,
+ explicitActual = None
+ )
+ zincWorker.scalaInstanceCache = Some((scalaInstanceSig, scalaInstance))
+ scalaInstance
+ }
+
+ mkdir(ctx.dest)
+
+ val ic = new sbt.internal.inc.IncrementalCompilerImpl()
+
+ val logger = {
+ val consoleAppender = MainAppender.defaultScreen(ConsoleOut.printStreamOut(
+ ctx.log.outputStream
+ ))
+ val l = LogExchange.logger("Hello")
+ LogExchange.unbindLoggerAppenders("Hello")
+ LogExchange.bindLoggerAppenders("Hello", (consoleAppender -> sbt.util.Level.Info) :: Nil)
+ l
+ }
+
+ def analysisMap(f: File): Optional[CompileAnalysis] = {
+ if (f.isFile) {
+ Optional.empty[CompileAnalysis]
+ } else {
+ upstreamCompileOutput.collectFirst {
+ case CompilationResult(zincPath, classFiles) if classFiles.path.toNIO == f.toPath =>
+ FileAnalysisStore.binary(zincPath.toIO).get().map[CompileAnalysis](_.getAnalysis)
+ }.getOrElse(Optional.empty[CompileAnalysis])
+ }
+ }
+
+ val lookup = MockedLookup(analysisMap)
+
+ val zincFile = ctx.dest / 'zinc
+ val classesDir = ctx.dest / 'classes
+
+ val zincIOFile = zincFile.toIO
+ val classesIODir = classesDir.toIO
+
+ val store = FileAnalysisStore.binary(zincIOFile)
+
+ val newResult = ic.compile(
+ ic.inputs(
+ classpath = classesIODir +: compileClasspathFiles,
+ sources = sources.filter(_.toIO.exists()).flatMap(ls.rec).filter(x => x.isFile && x.ext == "scala").map(_.toIO).toArray,
+ classesDirectory = classesIODir,
+ scalacOptions = (scalacPluginClasspath.map(jar => s"-Xplugin:${jar}") ++ scalacOptions).toArray,
+ javacOptions = javacOptions.toArray,
+ maxErrors = 10,
+ sourcePositionMappers = Array(),
+ order = CompileOrder.Mixed,
+ compilers = ic.compilers(
+ scalaInstance,
+ ClasspathOptionsUtil.boot,
+ None,
+ ZincUtil.scalaCompiler(scalaInstance, compilerBridge.toIO)
+ ),
+ setup = ic.setup(
+ lookup,
+ skip = false,
+ zincIOFile,
+ new FreshCompilerCache,
+ IncOptions.of(),
+ new ManagedLoggedReporter(10, logger),
+ None,
+ Array()
+ ),
+ pr = {
+ val prev = store.get()
+ PreviousResult.of(prev.map(_.getAnalysis), prev.map(_.getMiniSetup))
+ }
+ ),
+ logger = logger
+ )
+
+ store.set(
+ AnalysisContents.create(
+ newResult.analysis(),
+ newResult.setup()
+ )
+ )
+
+ CompilationResult(zincFile, PathRef(classesDir))
+ }
+
+ def resolveDependencies(repositories: Seq[Repository],
+ scalaVersion: String,
+ scalaBinaryVersion: String,
+ deps: Seq[Dep],
+ sources: Boolean = false): Result[Seq[PathRef]] = {
+ val flattened = deps.map{
+ case Dep.Java(dep) => dep
+ case Dep.Scala(dep) =>
+ dep.copy(module = dep.module.copy(name = dep.module.name + "_" + scalaBinaryVersion))
+ case Dep.Point(dep) =>
+ dep.copy(module = dep.module.copy(name = dep.module.name + "_" + scalaVersion))
+ }.toSet
+ val start = Resolution(flattened)
+
+ val fetch = Fetch.from(repositories, Cache.fetch())
+ val resolution = start.process.run(fetch).unsafePerformSync
+ val errs = resolution.metadataErrors
+ if(errs.nonEmpty) {
+ val header =
+ s"""|
+ |Resolution failed for ${errs.length} modules:
+ |--------------------------------------------
+ |""".stripMargin
+
+ val errLines = errs.map {
+ case ((module, vsn), errMsgs) => s" ${module.trim}:$vsn \n\t" + errMsgs.mkString("\n\t")
+ }.mkString("\n")
+ val msg = header + errLines + "\n"
+ Result.Failure(msg)
+ } else {
+ val sourceOrJar =
+ if (sources) resolution.classifiersArtifacts(Seq("sources"))
+ else resolution.artifacts
+ val localArtifacts: Seq[File] = scalaz.concurrent.Task
+ .gatherUnordered(sourceOrJar.map(Cache.file(_).run))
+ .unsafePerformSync
+ .flatMap(_.toOption)
+
+ localArtifacts.map(p => PathRef(Path(p), quick = true))
+ }
+ }
+ def scalaCompilerIvyDeps(scalaVersion: String) = Seq(
+ Dep.Java("org.scala-lang", "scala-compiler", scalaVersion),
+ Dep.Java("org.scala-lang", "scala-reflect", scalaVersion)
+ )
+ def scalaRuntimeIvyDeps(scalaVersion: String) = Seq[Dep](
+ Dep.Java("org.scala-lang", "scala-library", scalaVersion)
+ )
+ def compilerBridgeIvyDep(scalaVersion: String) =
+ Dep.Point(coursier.Dependency(coursier.Module("com.lihaoyi", "mill-bridge"), "0.1", transitive = false))
+
+ val DefaultShellScript: Seq[String] = Seq(
+ "#!/usr/bin/env sh",
+ "exec java -jar \"$0\" \"$@\""
+ )
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/Main.scala b/scalalib/src/main/scala/mill/scalalib/Main.scala
new file mode 100644
index 00000000..584fe9d1
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/Main.scala
@@ -0,0 +1,7 @@
+package mill.scalalib
+
+object Main {
+ def main(args: Array[String]): Unit = {
+ mill.Main.main(args)
+ }
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/Module.scala b/scalalib/src/main/scala/mill/scalalib/Module.scala
new file mode 100644
index 00000000..1ff4c240
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/Module.scala
@@ -0,0 +1,356 @@
+package mill
+package scalalib
+
+import ammonite.ops._
+import coursier.{Cache, MavenRepository, Repository}
+import mill.define.Task
+import mill.define.Task.{Module, TaskModule}
+import mill.eval.{PathRef, Result}
+import mill.modules.Jvm
+import mill.modules.Jvm.{createAssembly, createJar, interactiveSubprocess, subprocess}
+import Lib._
+import sbt.testing.Status
+object TestModule{
+ def handleResults(doneMsg: String, results: Seq[TestRunner.Result]) = {
+ if (results.count(Set(Status.Error, Status.Failure)) == 0) Result.Success((doneMsg, results))
+ else {
+ val grouped = results.map(_.status).groupBy(x => x).mapValues(_.length).filter(_._2 != 0).toList.sorted
+
+ Result.Failure(grouped.map{case (k, v) => k + ": " + v}.mkString(","))
+ }
+ }
+}
+trait TestModule extends Module with TaskModule {
+ override def defaultCommandName() = "test"
+ def testFramework: T[String]
+
+ def forkWorkingDir = ammonite.ops.pwd
+ def forkArgs = T{ Seq.empty[String] }
+ def forkTest(args: String*) = T.command{
+ mkdir(T.ctx().dest)
+ val outputPath = T.ctx().dest/"out.json"
+
+ Jvm.subprocess(
+ mainClass = "mill.scalalib.TestRunner",
+ classPath = Jvm.gatherClassloaderJars(),
+ jvmOptions = forkArgs(),
+ options = Seq(
+ testFramework(),
+ (runDepClasspath().map(_.path) :+ compile().classes.path).mkString(" "),
+ Seq(compile().classes.path).mkString(" "),
+ args.mkString(" "),
+ outputPath.toString
+ ),
+ workingDir = forkWorkingDir
+ )
+
+ val jsonOutput = upickle.json.read(outputPath.toIO)
+ val (doneMsg, results) = upickle.default.readJs[(String, Seq[TestRunner.Result])](jsonOutput)
+ TestModule.handleResults(doneMsg, results)
+
+ }
+ def test(args: String*) = T.command{
+ val (doneMsg, results) = TestRunner(
+ testFramework(),
+ runDepClasspath().map(_.path) :+ compile().classes.path,
+ Seq(compile().classes.path),
+ args
+ )
+ TestModule.handleResults(doneMsg, results)
+ }
+}
+
+trait Module extends mill.Module with TaskModule { outer =>
+ def defaultCommandName() = "run"
+ trait Tests extends TestModule{
+ def scalaVersion = outer.scalaVersion()
+ override def projectDeps = Seq(outer)
+ }
+ def scalaVersion: T[String]
+ def mainClass: T[Option[String]] = None
+
+ def scalaBinaryVersion = T{ scalaVersion().split('.').dropRight(1).mkString(".") }
+ def ivyDeps = T{ Seq[Dep]() }
+ def compileIvyDeps = T{ Seq[Dep]() }
+ def scalacPluginIvyDeps = T{ Seq[Dep]() }
+ def runIvyDeps = T{ Seq[Dep]() }
+ def basePath: Path
+
+ def scalacOptions = T{ Seq.empty[String] }
+ def javacOptions = T{ Seq.empty[String] }
+
+ def repositories: Seq[Repository] = Seq(
+ Cache.ivy2Local,
+ MavenRepository("https://repo1.maven.org/maven2")
+ )
+
+ def projectDeps = Seq.empty[Module]
+ def depClasspath = T{ Seq.empty[PathRef] }
+
+
+ def upstreamRunClasspath = T{
+ Task.traverse(projectDeps)(p =>
+ T.task(p.runDepClasspath() ++ Seq(p.compile().classes, p.resources()))
+ )
+ }
+
+ def upstreamCompileOutput = T{
+ Task.traverse(projectDeps)(_.compile)
+ }
+ def upstreamCompileClasspath = T{
+ externalCompileDepClasspath() ++
+ upstreamCompileOutput().map(_.classes) ++
+ Task.traverse(projectDeps)(_.compileDepClasspath)().flatten
+ }
+
+ def resolveDeps(deps: Task[Seq[Dep]], sources: Boolean = false) = T.task{
+ resolveDependencies(
+ repositories,
+ scalaVersion(),
+ scalaBinaryVersion(),
+ deps(),
+ sources
+ )
+ }
+
+ def externalCompileDepClasspath: T[Seq[PathRef]] = T{
+ Task.traverse(projectDeps)(_.externalCompileDepClasspath)().flatten ++
+ resolveDeps(
+ T.task{ivyDeps() ++ compileIvyDeps() ++ scalaCompilerIvyDeps(scalaVersion())}
+ )()
+ }
+
+ def externalCompileDepSources: T[Seq[PathRef]] = T{
+ Task.traverse(projectDeps)(_.externalCompileDepSources)().flatten ++
+ resolveDeps(
+ T.task{ivyDeps() ++ compileIvyDeps() ++ scalaCompilerIvyDeps(scalaVersion())},
+ sources = true
+ )()
+ }
+
+ /**
+ * Things that need to be on the classpath in order for this code to compile;
+ * might be less than the runtime classpath
+ */
+ def compileDepClasspath: T[Seq[PathRef]] = T{
+ upstreamCompileClasspath() ++
+ depClasspath()
+ }
+
+ /**
+ * Strange compiler-bridge jar that the Zinc incremental compile needs
+ */
+ def compilerBridge: T[PathRef] = T{
+ val compilerBridgeKey = "MILL_COMPILER_BRIDGE_" + scalaVersion().replace('.', '_')
+ val compilerBridgePath = sys.props(compilerBridgeKey)
+ if (compilerBridgePath != null) PathRef(Path(compilerBridgePath), quick = true)
+ else {
+ val dep = compilerBridgeIvyDep(scalaVersion())
+ val classpath = resolveDependencies(
+ repositories,
+ scalaVersion(),
+ scalaBinaryVersion(),
+ Seq(dep)
+ )
+ classpath match {
+ case Result.Success(resolved) =>
+ resolved.filterNot(_.path.ext == "pom") match {
+ case Seq(single) => PathRef(single.path, quick = true)
+ case Seq() => throw new Exception(dep + " resolution failed") // TODO: find out, is it possible?
+ case _ => throw new Exception(dep + " resolution resulted in more than one file")
+ }
+ case f: Result.Failure => throw new Exception(dep + s" resolution failed.\n + ${f.msg}") // TODO: remove, resolveDependencies will take care of this.
+ }
+ }
+ }
+
+ def scalacPluginClasspath: T[Seq[PathRef]] =
+ resolveDeps(
+ T.task{scalacPluginIvyDeps()}
+ )()
+
+ /**
+ * Classpath of the Scala Compiler & any compiler plugins
+ */
+ def scalaCompilerClasspath: T[Seq[PathRef]] = T{
+ resolveDeps(
+ T.task{scalaCompilerIvyDeps(scalaVersion()) ++ scalaRuntimeIvyDeps(scalaVersion())}
+ )()
+ }
+
+ /**
+ * Things that need to be on the classpath in order for this code to run
+ */
+ def runDepClasspath: T[Seq[PathRef]] = T{
+ upstreamRunClasspath().flatten ++
+ depClasspath() ++
+ resolveDeps(
+ T.task{ivyDeps() ++ runIvyDeps() ++ scalaRuntimeIvyDeps(scalaVersion())},
+ )()
+ }
+
+ def prependShellScript: T[String] = T{ "" }
+
+ def sources = T.source{ basePath / 'src }
+ def resources = T.source{ basePath / 'resources }
+ def allSources = T{ Seq(sources()) }
+ def compile: T[CompilationResult] = T.persistent{
+ compileScala(
+ ZincWorker(),
+ scalaVersion(),
+ allSources().map(_.path),
+ compileDepClasspath().map(_.path),
+ scalaCompilerClasspath().map(_.path),
+ scalacPluginClasspath().map(_.path),
+ compilerBridge().path,
+ scalacOptions(),
+ scalacPluginClasspath().map(_.path),
+ javacOptions(),
+ upstreamCompileOutput()
+ )
+ }
+ def assemblyClasspath = T{
+ (runDepClasspath().filter(_.path.ext != "pom") ++
+ Seq(resources(), compile().classes)).map(_.path).filter(exists)
+ }
+
+ def assembly = T{
+ createAssembly(assemblyClasspath(), prependShellScript = prependShellScript())
+ }
+
+ def classpath = T{ Seq(resources(), compile().classes) }
+
+ def jar = T{
+ createJar(
+ Seq(resources(), compile().classes).map(_.path).filter(exists),
+ mainClass()
+ )
+ }
+
+ def docsJar = T {
+ val outDir = T.ctx().dest
+
+ val javadocDir = outDir / 'javadoc
+ mkdir(javadocDir)
+
+ val options = {
+ val files = ls.rec(sources().path).filter(_.isFile).map(_.toNIO.toString)
+ files ++ Seq("-d", javadocDir.toNIO.toString, "-usejavacp")
+ }
+
+ subprocess(
+ "scala.tools.nsc.ScalaDoc",
+ compileDepClasspath().filterNot(_.path.ext == "pom").map(_.path),
+ options = options
+ )
+
+ createJar(Seq(javadocDir))(outDir / "javadoc.jar")
+ }
+
+ def sourcesJar = T {
+ createJar(Seq(sources(), resources()).map(_.path).filter(exists))(T.ctx().dest / "sources.jar")
+ }
+
+ def run() = T.command{
+ val main = mainClass().getOrElse(throw new RuntimeException("No mainClass provided!"))
+ subprocess(main, runDepClasspath().map(_.path) :+ compile().classes.path)
+ }
+
+ def runMain(mainClass: String) = T.command{
+ subprocess(mainClass, runDepClasspath().map(_.path) :+ compile().classes.path)
+ }
+
+ def console() = T.command{
+ interactiveSubprocess(
+ mainClass = "scala.tools.nsc.MainGenericRunner",
+ classPath = externalCompileDepClasspath().map(_.path) :+ compile().classes.path,
+ options = Seq("-usejavacp")
+ )
+ }
+
+ // publish artifact with name "mill_2.12.4" instead of "mill_2.12"
+ def crossFullScalaVersion: T[Boolean] = false
+
+ def artifactName: T[String] = basePath.last.toString
+ def artifactScalaVersion: T[String] = T {
+ if (crossFullScalaVersion()) scalaVersion()
+ else scalaBinaryVersion()
+ }
+
+ def artifactId: T[String] = T { s"${artifactName()}_${artifactScalaVersion()}" }
+
+}
+
+trait PublishModule extends Module { outer =>
+ import mill.scalalib.publish._
+
+ def pomSettings: T[PomSettings]
+ def publishVersion: T[String] = "0.0.1-SNAPSHOT"
+
+ def pom = T {
+ val dependencies =
+ ivyDeps().map(Artifact.fromDep(_, scalaVersion(), scalaBinaryVersion()))
+ val pom = Pom(artifact(), dependencies, artifactName(), pomSettings())
+
+ val pomPath = T.ctx().dest / s"${artifactId()}-${publishVersion()}.pom"
+ write.over(pomPath, pom)
+ PathRef(pomPath)
+ }
+
+ def ivy = T {
+ val dependencies =
+ ivyDeps().map(Artifact.fromDep(_, scalaVersion(), scalaBinaryVersion()))
+ val ivy = Ivy(artifact(), dependencies)
+ val ivyPath = T.ctx().dest / "ivy.xml"
+ write.over(ivyPath, ivy)
+ PathRef(ivyPath)
+ }
+
+ def artifact: T[Artifact] = T {
+ Artifact(pomSettings().organization, artifactId(), publishVersion())
+ }
+
+ def publishLocal(): define.Command[Unit] = T.command {
+ LocalPublisher.publish(
+ jar = jar().path,
+ sourcesJar = sourcesJar().path,
+ docsJar = docsJar().path,
+ pom = pom().path,
+ ivy = ivy().path,
+ artifact = artifact()
+ )
+ }
+
+ def sonatypeUri: String = "https://oss.sonatype.org/service/local"
+
+ def sonatypeSnapshotUri: String = "https://oss.sonatype.org/content/repositories/snapshots"
+
+ def publish(credentials: String, gpgPassphrase: String): define.Command[Unit] = T.command {
+ val baseName = s"${artifactId()}-${publishVersion()}"
+ val artifacts = Seq(
+ jar().path -> s"${baseName}.jar",
+ sourcesJar().path -> s"${baseName}-sources.jar",
+ docsJar().path -> s"${baseName}-javadoc.jar",
+ pom().path -> s"${baseName}.pom"
+ )
+ new SonatypePublisher(
+ sonatypeUri,
+ sonatypeSnapshotUri,
+ credentials,
+ gpgPassphrase,
+ T.ctx().log
+ ).publish(artifacts, artifact())
+ }
+
+}
+
+trait SbtModule extends Module { outer =>
+ def basePath: Path
+ override def sources = T.source{ basePath / 'src / 'main / 'scala }
+ override def resources = T.source{ basePath / 'src / 'main / 'resources }
+ trait Tests extends super.Tests{
+ def basePath = outer.basePath
+ override def sources = T.source{ basePath / 'src / 'test / 'scala }
+ override def resources = T.source{ basePath / 'src / 'test / 'resources }
+ }
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/TestRunner.scala b/scalalib/src/main/scala/mill/scalalib/TestRunner.scala
new file mode 100644
index 00000000..7d42bdea
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/TestRunner.scala
@@ -0,0 +1,170 @@
+package mill.scalalib
+
+import java.io.FileInputStream
+import java.lang.annotation.Annotation
+import java.net.URLClassLoader
+import java.util.zip.ZipInputStream
+
+import ammonite.ops.{Path, ls, pwd}
+import mill.util.Ctx.LogCtx
+import mill.util.PrintLogger
+import sbt.testing._
+import upickle.Js
+import mill.util.JsonFormatters._
+import scala.collection.mutable
+
+object TestRunner {
+ def listClassFiles(base: Path): Iterator[String] = {
+ if (base.isDir) ls.rec(base).toIterator.filter(_.ext == "class").map(_.relativeTo(base).toString)
+ else {
+ val zip = new ZipInputStream(new FileInputStream(base.toIO))
+ Iterator.continually(zip.getNextEntry).takeWhile(_ != null).map(_.getName).filter(_.endsWith(".class"))
+ }
+ }
+ def runTests(cl: ClassLoader, framework: Framework, classpath: Seq[Path]) = {
+
+
+ val fingerprints = framework.fingerprints()
+ val testClasses = classpath.flatMap { base =>
+ listClassFiles(base).flatMap { path =>
+ val cls = cl.loadClass(path.stripSuffix(".class").replace('/', '.'))
+ fingerprints.find {
+ case f: SubclassFingerprint =>
+ cl.loadClass(f.superclassName()).isAssignableFrom(cls)
+ case f: AnnotatedFingerprint =>
+ cls.isAnnotationPresent(
+ cl.loadClass(f.annotationName()).asInstanceOf[Class[Annotation]]
+ )
+ }.map { f => (cls, f) }
+ }
+ }
+ testClasses
+ }
+ def main(args: Array[String]): Unit = {
+ val result = apply(
+ frameworkName = args(0),
+ entireClasspath = args(1).split(" ").map(Path(_)),
+ testClassfilePath = args(2).split(" ").map(Path(_)),
+ args = args(3) match{ case "" => Nil case x => x.split(" ").toList }
+ )(new LogCtx {
+ def log = new PrintLogger(true)
+ })
+ val outputPath = args(4)
+
+ ammonite.ops.write(Path(outputPath), upickle.default.write(result))
+
+ // Tests are over, kill the JVM whether or not anyone's threads are still running
+ // Always return 0, even if tests fail. The caller can pick up the detailed test
+ // results from the outputPath
+ System.exit(0)
+ }
+ def apply(frameworkName: String,
+ entireClasspath: Seq[Path],
+ testClassfilePath: Seq[Path],
+ args: Seq[String])
+ (implicit ctx: LogCtx): (String, Seq[Result]) = {
+ val outerClassLoader = getClass.getClassLoader
+ val cl = new URLClassLoader(
+ entireClasspath.map(_.toIO.toURI.toURL).toArray,
+ ClassLoader.getSystemClassLoader().getParent()){
+ override def findClass(name: String) = {
+ if (name.startsWith("sbt.testing.")){
+ outerClassLoader.loadClass(name)
+ }else{
+ super.findClass(name)
+ }
+ }
+ }
+
+ val framework = cl.loadClass(frameworkName)
+ .newInstance()
+ .asInstanceOf[sbt.testing.Framework]
+
+ val testClasses = runTests(cl, framework, testClassfilePath)
+
+ val runner = framework.runner(args.toArray, args.toArray, cl)
+
+ val tasks = runner.tasks(
+ for((cls, fingerprint) <- testClasses.toArray)
+ yield {
+ new TaskDef(cls.getName.stripSuffix("$"), fingerprint, true, Array())
+ }
+ )
+ val events = mutable.Buffer.empty[Event]
+ for(t <- tasks){
+ t.execute(
+ new EventHandler {
+ def handle(event: Event) = events.append(event)
+ },
+ Array(
+ new Logger {
+ def debug(msg: String) = ctx.log.info(msg)
+
+ def error(msg: String) = ctx.log.error(msg)
+
+ def ansiCodesSupported() = true
+
+ def warn(msg: String) = ctx.log.info(msg)
+
+ def trace(t: Throwable) = t.printStackTrace(ctx.log.outputStream)
+
+ def info(msg: String) = ctx.log.info(msg)
+ })
+ )
+ }
+ val doneMsg = runner.done()
+
+ val results = for(e <- events) yield {
+ val ex = if (e.throwable().isDefined) Some(e.throwable().get) else None
+ Result(
+ e.fullyQualifiedName(),
+ e.selector() match{
+ case s: NestedSuiteSelector => s.suiteId()
+ case s: NestedTestSelector => s.suiteId() + "." + s.testName()
+ case s: SuiteSelector => s.toString
+ case s: TestSelector => s.testName()
+ case s: TestWildcardSelector => s.testWildcard()
+ },
+ e.duration(),
+ e.status(),
+ ex.map(_.getClass.getName),
+ ex.map(_.getMessage),
+ ex.map(_.getStackTrace)
+ )
+ }
+ (doneMsg, results)
+ }
+
+ case class Result(fullyQualifiedName: String,
+ selector: String,
+ duration: Long,
+ status: Status,
+ exceptionName: Option[String],
+ exceptionMsg: Option[String],
+ exceptionTrace: Option[Seq[StackTraceElement]])
+
+ object Result{
+ implicit def resultRW: upickle.default.ReadWriter[Result] = upickle.default.macroRW[Result]
+ implicit def statusRW: upickle.default.ReadWriter[Status] = upickle.default.ReadWriter[Status](
+ {
+ case Status.Success => Js.Str("Success")
+ case Status.Error => Js.Str("Error")
+ case Status.Failure => Js.Str("Failure")
+ case Status.Skipped => Js.Str("Skipped")
+ case Status.Ignored => Js.Str("Ignored")
+ case Status.Canceled => Js.Str("Canceled")
+ case Status.Pending => Js.Str("Pending")
+ },
+ {
+ case Js.Str("Success") => Status.Success
+ case Js.Str("Error") => Status.Error
+ case Js.Str("Failure") => Status.Failure
+ case Js.Str("Skipped") => Status.Skipped
+ case Js.Str("Ignored") => Status.Ignored
+ case Js.Str("Canceled") => Status.Canceled
+ case Js.Str("Pending") => Status.Pending
+ }
+ )
+ }
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/package.scala b/scalalib/src/main/scala/mill/scalalib/package.scala
new file mode 100644
index 00000000..1bad9226
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/package.scala
@@ -0,0 +1,5 @@
+package mill
+
+package object scalalib {
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/Ivy.scala b/scalalib/src/main/scala/mill/scalalib/publish/Ivy.scala
new file mode 100644
index 00000000..ff21c9ac
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/Ivy.scala
@@ -0,0 +1,53 @@
+package mill.scalalib.publish
+
+import scala.xml.PrettyPrinter
+
+object Ivy {
+
+ val head = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+
+ def apply(
+ artifact: Artifact,
+ dependencies: Seq[Dependency]
+ ): String = {
+ val xml =
+ <ivy-module version="2.0" xmlns:e="http://ant.apache.org/ivy/extra">
+ <info
+ organisation={artifact.group} module={artifact.id} revision={artifact.version} status="release">
+ <description/>
+ </info>
+ <configurations>
+ <conf name="pom" visibility="public" description=""/>
+ <conf extends="runtime" name="test" visibility="public" description=""/>
+ <conf name="provided" visibility="public" description=""/>
+ <conf name="optional" visibility="public" description=""/>
+ <conf name="compile" visibility="public" description=""/>
+ <conf extends="compile" name="runtime" visibility="public" description=""/>
+ </configurations>
+
+ <publications>
+ <artifact name={artifact.id} type="pom" ext="pom" conf="pom"/>
+ <artifact name={artifact.id} type="jar" ext="jar" conf="compile"/>
+ <artifact name={artifact.id} type="src" ext="jar" conf="compile" e:classifier="sources"/>
+ <artifact name={artifact.id} type="doc" ext="jar" conf="compile" e:classifier="javadoc"/>
+ </publications>
+ <dependencies>{dependencies.map(renderDependency)}</dependencies>
+ </ivy-module>
+
+ val pp = new PrettyPrinter(120, 4)
+ head + pp.format(xml).replaceAll("&gt;", ">")
+ }
+
+ private def renderDependency(dep: Dependency) = {
+ val scope = scopeToConf(dep.scope)
+ <dependency org={dep.artifact.group} name={dep.artifact.id} rev={dep.artifact.version} conf={s"$scope->default(compile)"}></dependency>
+ }
+
+ private def scopeToConf(s: Scope): String = s match {
+ case Scope.Compile => "compile"
+ case Scope.Provided => "provided"
+ case Scope.Test => "test"
+ case Scope.Runtime => "runtime"
+ }
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/JsonFormatters.scala b/scalalib/src/main/scala/mill/scalalib/publish/JsonFormatters.scala
new file mode 100644
index 00000000..cf1af557
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/JsonFormatters.scala
@@ -0,0 +1,11 @@
+package mill.scalalib.publish
+
+import upickle.default.{ReadWriter => RW}
+
+trait JsonFormatters {
+ implicit lazy val artifactFormat: RW[Artifact] = upickle.default.macroRW
+ implicit lazy val developerFormat: RW[Developer] = upickle.default.macroRW
+ implicit lazy val licenseFormat: RW[License] = upickle.default.macroRW
+ implicit lazy val scmFormat: RW[SCM] = upickle.default.macroRW
+ implicit lazy val pomSettingsFormat: RW[PomSettings] = upickle.default.macroRW
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/LocalPublisher.scala b/scalalib/src/main/scala/mill/scalalib/publish/LocalPublisher.scala
new file mode 100644
index 00000000..a9957e5c
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/LocalPublisher.scala
@@ -0,0 +1,33 @@
+package mill.scalalib.publish
+
+import ammonite.ops._
+
+object LocalPublisher {
+
+ private val root: Path = home / ".ivy2" / "local"
+
+ def publish(jar: Path,
+ sourcesJar: Path,
+ docsJar: Path,
+ pom: Path,
+ ivy: Path,
+ artifact: Artifact): Unit = {
+ val releaseDir = root / artifact.group / artifact.id / artifact.version
+ writeFiles(
+ jar -> releaseDir / "jars" / s"${artifact.id}.jar",
+ sourcesJar -> releaseDir / "srcs" / s"${artifact.id}-sources.jar",
+ docsJar -> releaseDir / "docs" / s"${artifact.id}-javadoc.jar",
+ pom -> releaseDir / "poms" / s"${artifact.id}.pom",
+ ivy -> releaseDir / "ivys" / "ivy.xml"
+ )
+ }
+
+ private def writeFiles(fromTo: (Path, Path)*): Unit = {
+ fromTo.foreach {
+ case (from, to) =>
+ mkdir(to / up)
+ cp.over(from, to)
+ }
+ }
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/Pom.scala b/scalalib/src/main/scala/mill/scalalib/publish/Pom.scala
new file mode 100644
index 00000000..32ad036a
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/Pom.scala
@@ -0,0 +1,88 @@
+package mill.scalalib.publish
+
+import scala.xml.{Elem, NodeSeq, PrettyPrinter}
+
+object Pom {
+
+ val head = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+
+ //TODO - not only jar packaging support?
+ def apply(artifact: Artifact,
+ dependencies: Seq[Dependency],
+ name: String,
+ pomSettings: PomSettings): String = {
+ val xml =
+ <project
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+ xmlns:xsi ="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns ="http://maven.apache.org/POM/4.0.0">
+
+ <modelVersion>4.0.0</modelVersion>
+ <name>{name}</name>
+ <groupId>{artifact.group}</groupId>
+ <artifactId>{artifact.id}</artifactId>
+ <packaging>jar</packaging>
+ <description>{pomSettings.description}</description>
+
+ <version>{artifact.version}</version>
+ <url>{pomSettings.url}</url>
+ <licenses>
+ {pomSettings.licenses.map(renderLicense)}
+ </licenses>
+ <scm>
+ <url>{pomSettings.scm.url}</url>
+ <connection>{pomSettings.scm.connection}</connection>
+ </scm>
+ <developers>
+ {pomSettings.developers.map(renderDeveloper)}
+ </developers>
+ <dependencies>
+ {dependencies.map(renderDependency)}
+ </dependencies>
+ </project>
+
+ val pp = new PrettyPrinter(120, 4)
+ head + pp.format(xml)
+ }
+
+ private def renderLicense(l: License): Elem = {
+ <license>
+ <name>{l.name}</name>
+ <url>{l.url}</url>
+ <distribution>{l.distribution}</distribution>
+ </license>
+ }
+
+ private def renderDeveloper(d: Developer): Elem = {
+ <developer>
+ <id>{d.id}</id>
+ <name>{d.name}</name>
+ {
+ d.organization.map { org =>
+ <organization>{org}</organization>
+ }.getOrElse(NodeSeq.Empty)
+ }
+ {
+ d.organizationUrl.map { orgUrl =>
+ <organizationUrl>{orgUrl}</organizationUrl>
+ }.getOrElse(NodeSeq.Empty)
+ }
+ </developer>
+ }
+
+ private def renderDependency(d: Dependency): Elem = {
+ val scope = d.scope match {
+ case Scope.Compile => NodeSeq.Empty
+ case Scope.Provided => <scope>provided</scope>
+ case Scope.Test => <scope>test</scope>
+ case Scope.Runtime => <scope>runtime</scope>
+ }
+ <dependency>
+ <groupId>{d.artifact.group}</groupId>
+ <artifactId>{d.artifact.id}</artifactId>
+ <version>{d.artifact.version}</version>
+ {scope}
+ </dependency>
+ }
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/SonatypeHttpApi.scala b/scalalib/src/main/scala/mill/scalalib/publish/SonatypeHttpApi.scala
new file mode 100644
index 00000000..8ccdf3ea
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/SonatypeHttpApi.scala
@@ -0,0 +1,130 @@
+package mill.scalalib.publish
+
+import java.util.Base64
+
+import upickle.json
+
+import scala.concurrent.duration._
+import scalaj.http.{BaseHttp, HttpOptions, HttpRequest, HttpResponse}
+
+object PatientHttp
+ extends BaseHttp(
+ options = Seq(
+ HttpOptions.connTimeout(5.seconds.toMillis.toInt),
+ HttpOptions.readTimeout(1.minute.toMillis.toInt),
+ HttpOptions.followRedirects(false)
+ )
+ )
+
+class SonatypeHttpApi(uri: String, credentials: String) {
+
+ private val base64Creds = base64(credentials)
+
+ private val commonHeaders = Seq(
+ "Authorization" -> s"Basic ${base64Creds}",
+ "Accept" -> "application/json",
+ "Content-Type" -> "application/json"
+ )
+
+ // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles.html
+ def getStagingProfileUri(groupId: String): String = {
+ val response = withRetry(
+ PatientHttp(s"${uri}/staging/profiles").headers(commonHeaders))
+
+ val resourceUri =
+ json
+ .read(response.body)("data")
+ .arr
+ .find(profile => profile("name").str == groupId)
+ .map(_("resourceURI").str.toString)
+
+ resourceUri.getOrElse(
+ throw new RuntimeException(
+ s"Could not find staging profile for groupId: ${groupId}")
+ )
+ }
+
+ def getStagingRepoState(stagingRepoId: String): String = {
+ val response = PatientHttp(s"${uri}/staging/repository/${stagingRepoId}")
+ .option(HttpOptions.readTimeout(60000))
+ .headers(commonHeaders)
+ .asString
+
+ json.read(response.body)("type").str.toString
+ }
+
+ // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_start.html
+ def createStagingRepo(profileUri: String, groupId: String): String = {
+ val response = withRetry(PatientHttp(s"${profileUri}/start")
+ .headers(commonHeaders)
+ .postData(
+ s"""{"data": {"description": "fresh staging profile for ${groupId}"}}"""))
+
+ json.read(response.body)("data")("stagedRepositoryId").str.toString
+ }
+
+ // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_finish.html
+ def closeStagingRepo(profileUri: String, repositoryId: String): Boolean = {
+ val response = withRetry(
+ PatientHttp(s"${profileUri}/finish")
+ .headers(commonHeaders)
+ .postData(
+ s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "closing staging repository"}}"""
+ ))
+
+ response.code == 201
+ }
+
+ // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_promote.html
+ def promoteStagingRepo(profileUri: String, repositoryId: String): Boolean = {
+ val response = withRetry(
+ PatientHttp(s"${profileUri}/promote")
+ .headers(commonHeaders)
+ .postData(
+ s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "promote staging repository"}}"""
+ ))
+
+ response.code == 201
+ }
+
+ // https://oss.sonatype.org/nexus-staging-plugin/default/docs/path__staging_profiles_-profileIdKey-_drop.html
+ def dropStagingRepo(profileUri: String, repositoryId: String): Boolean = {
+ val response = withRetry(
+ PatientHttp(s"${profileUri}/drop")
+ .headers(commonHeaders)
+ .postData(
+ s"""{"data": {"stagedRepositoryId": "${repositoryId}", "description": "drop staging repository"}}"""
+ ))
+
+ response.code == 201
+ }
+
+ private val uploadTimeout = 5.minutes.toMillis.toInt
+
+ def upload(uri: String, data: Array[Byte]): HttpResponse[String] = {
+ PatientHttp(uri)
+ .option(HttpOptions.readTimeout(uploadTimeout))
+ .method("PUT")
+ .headers(
+ "Content-Type" -> "application/binary",
+ "Authorization" -> s"Basic ${base64Creds}"
+ )
+ .put(data)
+ .asString
+ }
+
+ private def withRetry(request: HttpRequest,
+ retries: Int = 10): HttpResponse[String] = {
+ val resp = request.asString
+ if (resp.is5xx && retries > 0) {
+ Thread.sleep(500)
+ withRetry(request, retries - 1)
+ } else {
+ resp
+ }
+ }
+
+ private def base64(s: String) =
+ new String(Base64.getEncoder.encode(s.getBytes))
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/SonatypePublisher.scala b/scalalib/src/main/scala/mill/scalalib/publish/SonatypePublisher.scala
new file mode 100644
index 00000000..0749b0c5
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/SonatypePublisher.scala
@@ -0,0 +1,148 @@
+package mill.scalalib.publish
+
+import java.math.BigInteger
+import java.security.MessageDigest
+
+import ammonite.ops._
+import mill.util.Logger
+
+import scalaj.http.HttpResponse
+
+class SonatypePublisher(uri: String,
+ snapshotUri: String,
+ credentials: String,
+ gpgPassphrase: String,
+ log: Logger) {
+
+ private val api = new SonatypeHttpApi(uri, credentials)
+
+ def publish(artifacts: Seq[(Path, String)], artifact: Artifact): Unit = {
+ val signedArtifacts = artifacts ++ artifacts.map {
+ case (file, name) =>
+ poorMansSign(file, gpgPassphrase) -> s"${name}.asc"
+ }
+
+ val signedArtifactsWithDigest = signedArtifacts.flatMap {
+ case (file, name) =>
+ val content = read.bytes(file)
+
+ Seq(
+ name -> content,
+ (name + ".md5") -> md5hex(content),
+ (name + ".sha1") -> sha1hex(content)
+ )
+ }
+
+ val publishPath = Seq(
+ artifact.group.replace(".", "/"),
+ artifact.id,
+ artifact.version
+ ).mkString("/")
+
+ if (artifact.isSnapshot)
+ publishSnapshot(publishPath, signedArtifactsWithDigest, artifact)
+ else
+ publishRelease(publishPath, signedArtifactsWithDigest, artifact)
+ }
+
+ private def publishSnapshot(publishPath: String,
+ payloads: Seq[(String, Array[Byte])],
+ artifact: Artifact): Unit = {
+ val baseUri: String = snapshotUri + "/" + publishPath
+
+ val publishResults = payloads.map {
+ case (fileName, data) =>
+ log.info(s"Uploading ${fileName}")
+ val resp = api.upload(s"${baseUri}/${fileName}", data)
+ resp
+ }
+ reportPublishResults(publishResults, artifact)
+ }
+
+ private def publishRelease(publishPath: String,
+ payloads: Seq[(String, Array[Byte])],
+ artifact: Artifact): Unit = {
+ val profileUri = api.getStagingProfileUri(artifact.group)
+ val stagingRepoId =
+ api.createStagingRepo(profileUri, artifact.group)
+ val baseUri =
+ s"${uri}/staging/deployByRepositoryId/${stagingRepoId}/${publishPath}"
+
+ val publishResults = payloads.map {
+ case (fileName, data) =>
+ log.info(s"Uploading ${fileName}")
+ api.upload(s"${baseUri}/${fileName}", data)
+ }
+ reportPublishResults(publishResults, artifact)
+
+ log.info("Closing staging repository")
+ api.closeStagingRepo(profileUri, stagingRepoId)
+
+ log.info("Waiting for staging repository to close")
+ awaitRepoStatus("closed", stagingRepoId)
+
+ log.info("Promoting staging repository")
+ api.promoteStagingRepo(profileUri, stagingRepoId)
+
+ log.info("Waiting for staging repository to release")
+ awaitRepoStatus("released", stagingRepoId)
+
+ log.info("Dropping staging repository")
+ api.dropStagingRepo(profileUri, stagingRepoId)
+
+ log.info(s"Published ${artifact.id} successfully")
+ }
+
+ private def reportPublishResults(publishResults: Seq[HttpResponse[String]],
+ artifact: Artifact) = {
+ if (publishResults.forall(_.is2xx)) {
+ log.info(s"Published ${artifact.id} to Sonatype")
+ } else {
+ val errors = publishResults.filterNot(_.is2xx).map { response =>
+ s"Code: ${response.code}, message: ${response.body}"
+ }
+ throw new RuntimeException(
+ s"Failed to publish ${artifact.id} to Sonatype. Errors: \n${errors.mkString("\n")}"
+ )
+ }
+ }
+
+ private def awaitRepoStatus(status: String,
+ stagingRepoId: String,
+ attempts: Int = 20): Unit = {
+ def isRightStatus =
+ api.getStagingRepoState(stagingRepoId).equalsIgnoreCase(status)
+ var attemptsLeft = attempts
+
+ while (attemptsLeft > 0 && !isRightStatus) {
+ Thread.sleep(3000)
+ attemptsLeft -= 1
+ if (attemptsLeft == 0) {
+ throw new RuntimeException(
+ s"Couldn't wait for staging repository to be ${status}. Failing")
+ }
+ }
+ }
+
+ // http://central.sonatype.org/pages/working-with-pgp-signatures.html#signing-a-file
+ private def poorMansSign(file: Path, passphrase: String): Path = {
+ val fileName = file.toString
+ import ammonite.ops.ImplicitWd._
+ %("gpg", "--yes", "-a", "-b", "--passphrase", passphrase, fileName)
+ Path(fileName + ".asc")
+ }
+
+ private def md5hex(bytes: Array[Byte]): Array[Byte] =
+ hexArray(md5.digest(bytes)).getBytes
+
+ private def sha1hex(bytes: Array[Byte]): Array[Byte] =
+ hexArray(sha1.digest(bytes)).getBytes
+
+ private def md5 = MessageDigest.getInstance("md5")
+
+ private def sha1 = MessageDigest.getInstance("sha1")
+
+ private def hexArray(arr: Array[Byte]) =
+ String.format("%0" + (arr.length << 1) + "x", new BigInteger(1, arr))
+
+}
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/package.scala b/scalalib/src/main/scala/mill/scalalib/publish/package.scala
new file mode 100644
index 00000000..99eeec14
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/package.scala
@@ -0,0 +1,3 @@
+package mill.scalalib
+
+package object publish extends JsonFormatters
diff --git a/scalalib/src/main/scala/mill/scalalib/publish/settings.scala b/scalalib/src/main/scala/mill/scalalib/publish/settings.scala
new file mode 100644
index 00000000..eb0a44b6
--- /dev/null
+++ b/scalalib/src/main/scala/mill/scalalib/publish/settings.scala
@@ -0,0 +1,70 @@
+package mill.scalalib.publish
+
+import mill.scalalib.Dep
+
+case class Artifact(group: String, id: String, version: String) {
+ def isSnapshot: Boolean = version.endsWith("-SNAPSHOT")
+}
+
+object Artifact {
+
+ def fromDep(dep: Dep, scalaFull: String, scalaBin: String): Dependency = {
+ dep match {
+ case Dep.Java(dep) =>
+ Dependency(
+ Artifact(dep.module.organization, dep.module.name, dep.version),
+ Scope.Compile)
+ case Dep.Scala(dep) =>
+ Dependency(Artifact(dep.module.organization,
+ s"${dep.module.name}_${scalaBin}",
+ dep.version),
+ Scope.Compile)
+ case Dep.Point(dep) =>
+ Dependency(Artifact(dep.module.organization,
+ s"${dep.module.name}_${scalaFull}",
+ dep.version),
+ Scope.Compile)
+ }
+ }
+}
+
+sealed trait Scope
+object Scope {
+ case object Compile extends Scope
+ case object Provided extends Scope
+ case object Runtime extends Scope
+ case object Test extends Scope
+}
+
+case class Dependency(
+ artifact: Artifact,
+ scope: Scope
+)
+
+case class License(
+ name: String,
+ url: String,
+ distribution: String = "repo"
+)
+
+case class SCM(
+ url: String,
+ connection: String
+)
+
+case class Developer(
+ id: String,
+ name: String,
+ url: String,
+ organization: Option[String] = None,
+ organizationUrl: Option[String] = None
+)
+
+case class PomSettings(
+ description: String,
+ organization: String,
+ url: String,
+ licenses: Seq[License],
+ scm: SCM,
+ developers: Seq[Developer]
+)