summaryrefslogtreecommitdiff
path: root/main
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2018-08-22 20:38:13 +0800
committerLi Haoyi <haoyi.sg@gmail.com>2018-08-22 20:38:13 +0800
commit7a15fea2f542d90fe6e4fc5cddf9b94f45b049c0 (patch)
tree73ccf3b5819e7e9bf981569c537861023365622e /main
parent807e470fe3cb359993c8e37f79da2530138748d4 (diff)
downloadmill-7a15fea2f542d90fe6e4fc5cddf9b94f45b049c0.tar.gz
mill-7a15fea2f542d90fe6e4fc5cddf9b94f45b049c0.tar.bz2
mill-7a15fea2f542d90fe6e4fc5cddf9b94f45b049c0.zip
Tidying up:
- Combine `main/` and `core/` - Rename `jsbridges/`/`scalanativebridges/` -> `worker/` for consistency with other terminology
Diffstat (limited to 'main')
-rw-r--r--main/core/src/mill/define/Applicative.scala108
-rw-r--r--main/core/src/mill/define/BaseModule.scala53
-rw-r--r--main/core/src/mill/define/Cross.scala90
-rw-r--r--main/core/src/mill/define/Ctx.scala99
-rw-r--r--main/core/src/mill/define/Discover.scala89
-rw-r--r--main/core/src/mill/define/Graph.scala61
-rw-r--r--main/core/src/mill/define/Module.scala97
-rw-r--r--main/core/src/mill/define/Task.scala353
-rw-r--r--main/core/src/mill/eval/Evaluator.scala445
-rw-r--r--main/core/src/mill/eval/PathRef.scala82
-rw-r--r--main/core/src/mill/eval/Result.scala36
-rw-r--r--main/core/src/mill/eval/Tarjans.scala51
-rw-r--r--main/core/src/mill/util/AggWrapper.scala119
-rw-r--r--main/core/src/mill/util/ClassLoader.scala66
-rw-r--r--main/core/src/mill/util/Ctx.scala56
-rw-r--r--main/core/src/mill/util/EitherOps.scala18
-rw-r--r--main/core/src/mill/util/EnclosingClass.scala15
-rw-r--r--main/core/src/mill/util/IO.scala32
-rw-r--r--main/core/src/mill/util/JsonFormatters.scala44
-rw-r--r--main/core/src/mill/util/Logger.scala205
-rw-r--r--main/core/src/mill/util/MultiBiMap.scala55
-rw-r--r--main/core/src/mill/util/ParseArgs.scala134
-rw-r--r--main/core/src/mill/util/Router.scala451
-rw-r--r--main/core/src/mill/util/Scripts.scala330
-rw-r--r--main/core/src/mill/util/Watched.scala8
-rw-r--r--main/moduledefs/resources/scalac-plugin.xml4
-rw-r--r--main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala58
-rw-r--r--main/moduledefs/src/mill/moduledefs/Cacher.scala35
28 files changed, 3194 insertions, 0 deletions
diff --git a/main/core/src/mill/define/Applicative.scala b/main/core/src/mill/define/Applicative.scala
new file mode 100644
index 00000000..69c506f7
--- /dev/null
+++ b/main/core/src/mill/define/Applicative.scala
@@ -0,0 +1,108 @@
+package mill.define
+
+import scala.annotation.{StaticAnnotation, compileTimeOnly}
+import scala.language.higherKinds
+import scala.reflect.macros.blackbox.Context
+
+/**
+ * A generic Applicative-functor macro: translates calls to
+ *
+ * Applier.apply{ ... applyable1.apply() ... applyable2.apply() ... }
+ *
+ * into
+ *
+ * Applier.zipMap(applyable1, applyable2){ (a1, a2, ctx) => ... a1 ... a2 ... }
+ */
+object Applicative {
+ trait ApplyHandler[M[+_]]{
+ def apply[T](t: M[T]): T
+ }
+ object ApplyHandler{
+ @compileTimeOnly("Target#apply() can only be used with a T{...} block")
+ implicit def defaultApplyHandler[M[+_]]: ApplyHandler[M] = ???
+ }
+ trait Applyable[M[+_], +T]{
+ def self: M[T]
+ def apply()(implicit handler: ApplyHandler[M]): T = handler(self)
+ }
+ class ImplicitStub extends StaticAnnotation
+ type Id[+T] = T
+
+ trait Applyer[W[_], T[_], Z[_], Ctx] extends ApplyerGenerated[T, Z, Ctx] {
+ def ctx()(implicit c: Ctx) = c
+ def underlying[A](v: W[A]): T[_]
+
+ def zipMap[R]()(cb: Ctx => Z[R]) = mapCtx(zip()){ (_, ctx) => cb(ctx)}
+ def zipMap[A, R](a: T[A])(f: (A, Ctx) => Z[R]) = mapCtx(a)(f)
+ def zip(): T[Unit]
+ def zip[A](a: T[A]): T[Tuple1[A]]
+ }
+
+ def impl[M[_], T: c.WeakTypeTag, Ctx: c.WeakTypeTag](c: Context)
+ (t: c.Expr[T]): c.Expr[M[T]] = {
+ impl0(c)(t.tree)(implicitly[c.WeakTypeTag[T]], implicitly[c.WeakTypeTag[Ctx]])
+ }
+ def impl0[M[_], T: c.WeakTypeTag, Ctx: c.WeakTypeTag](c: Context)
+ (t: c.Tree): c.Expr[M[T]] = {
+ import c.universe._
+ def rec(t: Tree): Iterator[c.Tree] = Iterator(t) ++ t.children.flatMap(rec(_))
+
+ val bound = collection.mutable.Buffer.empty[(c.Tree, ValDef)]
+ val targetApplySym = typeOf[Applyable[Nothing, _]].member(TermName("apply"))
+
+ // Derived from @olafurpg's
+ // https://gist.github.com/olafurpg/596d62f87bf3360a29488b725fbc7608
+ val defs = rec(t).filter(_.isDef).map(_.symbol).toSet
+
+ val ctxName = TermName(c.freshName("ctx"))
+ val ctxSym = c.internal.newTermSymbol(c.internal.enclosingOwner, ctxName)
+ c.internal.setInfo(ctxSym, weakTypeOf[Ctx])
+
+ val transformed = c.internal.typingTransform(t) {
+ case (t @ q"$fun.apply()($handler)", api) if t.symbol == targetApplySym =>
+
+ val localDefs = rec(fun).filter(_.isDef).map(_.symbol).toSet
+ val banned = rec(t).filter(x => defs(x.symbol) && !localDefs(x.symbol))
+
+ if (banned.hasNext){
+ val banned0 = banned.next()
+ c.abort(
+ banned0.pos,
+ "Target#apply() call cannot use `" + banned0.symbol + "` defined within the T{...} block"
+ )
+ }
+ val tempName = c.freshName(TermName("tmp"))
+ val tempSym = c.internal.newTermSymbol(c.internal.enclosingOwner, tempName)
+ c.internal.setInfo(tempSym, t.tpe)
+ val tempIdent = Ident(tempSym)
+ c.internal.setType(tempIdent, t.tpe)
+ c.internal.setFlag(tempSym, (1L << 44).asInstanceOf[c.universe.FlagSet])
+ bound.append((q"${c.prefix}.underlying($fun)", c.internal.valDef(tempSym)))
+ tempIdent
+ case (t, api)
+ if t.symbol != null
+ && t.symbol.annotations.exists(_.tree.tpe =:= typeOf[ImplicitStub]) =>
+
+ val tempIdent = Ident(ctxSym)
+ c.internal.setType(tempIdent, t.tpe)
+ c.internal.setFlag(ctxSym, (1L << 44).asInstanceOf[c.universe.FlagSet])
+ tempIdent
+
+ case (t, api) => api.default(t)
+ }
+
+ val (exprs, bindings) = bound.unzip
+
+
+ val ctxBinding = c.internal.valDef(ctxSym)
+
+ val callback = c.typecheck(q"(..$bindings, $ctxBinding) => $transformed ")
+
+ val res = q"${c.prefix}.zipMap(..$exprs){ $callback }"
+
+ c.internal.changeOwner(transformed, c.internal.enclosingOwner, callback.symbol)
+
+ c.Expr[M[T]](res)
+ }
+
+}
diff --git a/main/core/src/mill/define/BaseModule.scala b/main/core/src/mill/define/BaseModule.scala
new file mode 100644
index 00000000..70826be7
--- /dev/null
+++ b/main/core/src/mill/define/BaseModule.scala
@@ -0,0 +1,53 @@
+package mill.define
+
+import ammonite.ops.Path
+
+object BaseModule{
+ case class Implicit(value: BaseModule)
+}
+
+abstract class BaseModule(millSourcePath0: Path,
+ external0: Boolean = false,
+ foreign0 : Boolean = false)
+ (implicit millModuleEnclosing0: sourcecode.Enclosing,
+ millModuleLine0: sourcecode.Line,
+ millName0: sourcecode.Name,
+ millFile0: sourcecode.File)
+ extends Module()(
+ mill.define.Ctx.make(
+ implicitly,
+ implicitly,
+ implicitly,
+ BasePath(millSourcePath0),
+ Segments(),
+ mill.util.Router.Overrides(0),
+ Ctx.External(external0),
+ Ctx.Foreign(foreign0),
+ millFile0
+ )
+ ){
+ // A BaseModule should provide an empty Segments list to it's children, since
+ // it is the root of the module tree, and thus must not include it's own
+ // sourcecode.Name as part of the list,
+ override implicit def millModuleSegments: Segments = Segments()
+ override def millSourcePath = millOuterCtx.millSourcePath
+ override implicit def millModuleBasePath: BasePath = BasePath(millSourcePath)
+ implicit def millImplicitBaseModule: BaseModule.Implicit = BaseModule.Implicit(this)
+ def millDiscover: Discover[this.type]
+}
+
+
+abstract class ExternalModule(implicit millModuleEnclosing0: sourcecode.Enclosing,
+ millModuleLine0: sourcecode.Line,
+ millName0: sourcecode.Name)
+ extends BaseModule(ammonite.ops.pwd, external0 = true, foreign0 = false){
+
+ implicit def millDiscoverImplicit: Discover[_] = millDiscover
+ assert(
+ !" #".exists(millModuleEnclosing0.value.contains(_)),
+ "External modules must be at a top-level static path, not " + millModuleEnclosing0.value
+ )
+ override implicit def millModuleSegments = {
+ Segments(millModuleEnclosing0.value.split('.').map(Segment.Label):_*)
+ }
+}
diff --git a/main/core/src/mill/define/Cross.scala b/main/core/src/mill/define/Cross.scala
new file mode 100644
index 00000000..aa730e0d
--- /dev/null
+++ b/main/core/src/mill/define/Cross.scala
@@ -0,0 +1,90 @@
+package mill.define
+import language.experimental.macros
+import scala.reflect.macros.blackbox
+
+
+object Cross{
+ case class Factory[T](make: (Product, mill.define.Ctx) => T)
+
+ object Factory{
+ implicit def make[T]: Factory[T] = macro makeImpl[T]
+ def makeImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Factory[T]] = {
+ import c.universe._
+ val tpe = weakTypeOf[T]
+
+ val primaryConstructorArgs =
+ tpe.typeSymbol.asClass.primaryConstructor.typeSignature.paramLists.head
+
+ val argTupleValues =
+ for((a, n) <- primaryConstructorArgs.zipWithIndex)
+ yield q"v.productElement($n).asInstanceOf[${a.info}]"
+
+ val instance = c.Expr[(Product, mill.define.Ctx) => T](
+ q"{ (v, ctx0) => new $tpe(..$argTupleValues){ override def millOuterCtx = ctx0 } }"
+ )
+
+ reify { mill.define.Cross.Factory[T](instance.splice) }
+ }
+ }
+
+ trait Resolver[-T]{
+ def resolve[V <: T](c: Cross[V]): V
+ }
+}
+
+/**
+ * Models "cross-builds": sets of duplicate builds which differ only in the
+ * value of one or more "case" variables whose values are determined at runtime.
+ * Used via:
+ *
+ * object foo extends Cross[FooModule]("bar", "baz", "qux")
+ * class FooModule(v: String) extends Module{
+ * ...
+ * }
+ */
+class Cross[T](cases: Any*)
+ (implicit ci: Cross.Factory[T],
+ ctx: mill.define.Ctx) extends mill.define.Module()(ctx) {
+
+ override lazy val millModuleDirectChildren =
+ this.millInternal.reflectNestedObjects[Module] ++
+ items.collect{case (k, v: mill.define.Module) => v}
+
+ val items = for(c0 <- cases.toList) yield{
+ val c = c0 match{
+ case p: Product => p
+ case v => Tuple1(v)
+ }
+ val crossValues = c.productIterator.toList
+ val relPath = ctx.segment.pathSegments
+ val sub = ci.make(
+ c,
+ ctx.copy(
+ segments = ctx.segments ++ Seq(ctx.segment),
+ millSourcePath = ctx.millSourcePath / relPath,
+ segment = Segment.Cross(crossValues)
+ )
+ )
+ (crossValues, sub)
+ }
+ val itemMap = items.toMap
+
+ /**
+ * Fetch the cross module corresponding to the given cross values
+ */
+ def get(args: Seq[Any]) = itemMap(args.toList)
+
+ /**
+ * Fetch the cross module corresponding to the given cross values
+ */
+ def apply(arg0: Any, args: Any*) = itemMap(arg0 :: args.toList)
+
+ /**
+ * Fetch the relevant cross module given the implicit resolver you have in
+ * scope. This is often the first cross module whose cross-version is
+ * compatible with the current module.
+ */
+ def apply[V >: T]()(implicit resolver: Cross.Resolver[V]): T = {
+ resolver.resolve(this.asInstanceOf[Cross[V]]).asInstanceOf[T]
+ }
+} \ No newline at end of file
diff --git a/main/core/src/mill/define/Ctx.scala b/main/core/src/mill/define/Ctx.scala
new file mode 100644
index 00000000..fb15dc19
--- /dev/null
+++ b/main/core/src/mill/define/Ctx.scala
@@ -0,0 +1,99 @@
+package mill.define
+
+
+import ammonite.ops.Path
+
+import scala.annotation.implicitNotFound
+
+sealed trait Segment{
+ def pathSegments: Seq[String] = this match{
+ case Segment.Label(s) => List(s)
+ case Segment.Cross(vs) => vs.map(_.toString)
+ }
+}
+object Segment{
+ case class Label(value: String) extends Segment{
+ assert(!value.contains('.'))
+ }
+ case class Cross(value: Seq[Any]) extends Segment
+}
+
+case class BasePath(value: Path)
+
+
+/**
+ * Models a path with the Mill build hierarchy, e.g.
+ *
+ * amm.util[2.11].test.compile
+ *
+ * .-separated segments are [[Segment.Label]]s, while []-delimited
+ * segments are [[Segment.Cross]]s
+ */
+case class Segments(value: Segment*){
+ def ++(other: Seq[Segment]): Segments = Segments(value ++ other:_*)
+ def ++(other: Segments): Segments = Segments(value ++ other.value:_*)
+ def parts = value.toList match {
+ case Nil => Nil
+ case Segment.Label(head) :: rest =>
+ val stringSegments = rest.flatMap{
+ case Segment.Label(s) => Seq(s)
+ case Segment.Cross(vs) => vs.map(_.toString)
+ }
+ head +: stringSegments
+ }
+ def last : Segments = Segments(value.last)
+ def render = value.toList match {
+ case Nil => ""
+ case Segment.Label(head) :: rest =>
+ val stringSegments = rest.map{
+ case Segment.Label(s) => "." + s
+ case Segment.Cross(vs) => "[" + vs.mkString(",") + "]"
+ }
+ head + stringSegments.mkString
+ }
+}
+
+object Segments {
+
+ def labels(values : String*) : Segments =
+ Segments(values.map(Segment.Label):_*)
+
+}
+
+@implicitNotFound("Modules, Targets and Commands can only be defined within a mill Module")
+case class Ctx(enclosing: String,
+ lineNum: Int,
+ segment: Segment,
+ millSourcePath: Path,
+ segments: Segments,
+ overrides: Int,
+ external: Boolean,
+ foreign: Boolean,
+ fileName: String){
+}
+
+object Ctx{
+ case class External(value: Boolean)
+ case class Foreign(value : Boolean)
+ implicit def make(implicit millModuleEnclosing0: sourcecode.Enclosing,
+ millModuleLine0: sourcecode.Line,
+ millName0: sourcecode.Name,
+ millModuleBasePath0: BasePath,
+ segments0: Segments,
+ overrides0: mill.util.Router.Overrides,
+ external0: External,
+ foreign0: Foreign,
+ fileName: sourcecode.File): Ctx = {
+ Ctx(
+ millModuleEnclosing0.value,
+ millModuleLine0.value,
+ Segment.Label(millName0.value),
+ millModuleBasePath0.value,
+ segments0,
+ overrides0.value,
+ external0.value,
+ foreign0.value,
+ fileName.value
+ )
+ }
+}
diff --git a/main/core/src/mill/define/Discover.scala b/main/core/src/mill/define/Discover.scala
new file mode 100644
index 00000000..f0c668e6
--- /dev/null
+++ b/main/core/src/mill/define/Discover.scala
@@ -0,0 +1,89 @@
+package mill.define
+import mill.util.Router.EntryPoint
+
+import language.experimental.macros
+import sourcecode.Compat.Context
+
+import scala.collection.mutable
+import scala.reflect.macros.blackbox
+
+
+
+case class Discover[T](value: Map[Class[_], Seq[(Int, EntryPoint[_])]])
+object Discover {
+ def apply[T]: Discover[T] = macro applyImpl[T]
+
+ def applyImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Discover[T]] = {
+ import c.universe._
+ import compat._
+ val seen = mutable.Set.empty[Type]
+ def rec(tpe: Type): Unit = {
+ if (!seen(tpe)){
+ seen.add(tpe)
+ for{
+ m <- tpe.members
+ memberTpe = m.typeSignature
+ if memberTpe.resultType <:< typeOf[mill.define.Module] && memberTpe.paramLists.isEmpty
+ } rec(memberTpe.resultType)
+
+ if (tpe <:< typeOf[mill.define.Cross[_]]){
+ val inner = typeOf[Cross[_]]
+ .typeSymbol
+ .asClass
+ .typeParams
+ .head
+ .asType
+ .toType
+ .asSeenFrom(tpe, typeOf[Cross[_]].typeSymbol)
+
+ rec(inner)
+ }
+ }
+ }
+ rec(weakTypeOf[T])
+
+ def assertParamListCounts(methods: Iterable[router.c.universe.MethodSymbol],
+ cases: (c.Type, Int, String)*) = {
+ for (m <- methods.toList){
+ for ((tt, n, label) <- cases){
+ if (m.returnType <:< tt.asInstanceOf[router.c.Type] &&
+ m.paramLists.length != n){
+ c.abort(
+ m.pos.asInstanceOf[c.Position],
+ s"$label definitions must have $n parameter list" + (if (n == 1) "" else "s")
+ )
+ }
+ }
+ }
+ }
+ val router = new mill.util.Router(c)
+ val mapping = for{
+ discoveredModuleType <- seen
+ val curCls = discoveredModuleType.asInstanceOf[router.c.Type]
+ val methods = router.getValsOrMeths(curCls)
+ val overridesRoutes = {
+ assertParamListCounts(
+ methods,
+ (weakTypeOf[mill.define.Sources], 0, "`T.sources`"),
+ (weakTypeOf[mill.define.Input[_]], 0, "`T.input`"),
+ (weakTypeOf[mill.define.Persistent[_]], 0, "`T.persistent`"),
+ (weakTypeOf[mill.define.Target[_]], 0, "`T{...}`"),
+ (weakTypeOf[mill.define.Command[_]], 1, "`T.command`")
+ )
+
+ for{
+ m <- methods.toList
+ if m.returnType <:< weakTypeOf[mill.define.Command[_]].asInstanceOf[router.c.Type]
+ } yield (m.overrides.length, router.extractMethod(m, curCls).asInstanceOf[c.Tree])
+
+ }
+ if overridesRoutes.nonEmpty
+ } yield {
+ val lhs = q"classOf[${discoveredModuleType.typeSymbol.asClass}]"
+ val rhs = q"scala.Seq[(Int, mill.util.Router.EntryPoint[_])](..$overridesRoutes)"
+ q"$lhs -> $rhs"
+ }
+
+ c.Expr[Discover[T]](q"mill.define.Discover(scala.collection.immutable.Map(..$mapping))")
+ }
+}
diff --git a/main/core/src/mill/define/Graph.scala b/main/core/src/mill/define/Graph.scala
new file mode 100644
index 00000000..f06dca11
--- /dev/null
+++ b/main/core/src/mill/define/Graph.scala
@@ -0,0 +1,61 @@
+package mill.define
+
+import mill.eval.Tarjans
+import mill.util.MultiBiMap
+import mill.util.Strict.Agg
+
+object Graph {
+ class TopoSorted private[Graph](val values: Agg[Task[_]])
+ def groupAroundImportantTargets[T](topoSortedTargets: TopoSorted)
+ (important: PartialFunction[Task[_], T]): MultiBiMap[T, Task[_]] = {
+
+ val output = new MultiBiMap.Mutable[T, Task[_]]()
+ for ((target, t) <- topoSortedTargets.values.flatMap(t => important.lift(t).map((t, _)))) {
+
+ val transitiveTargets = new Agg.Mutable[Task[_]]
+ def rec(t: Task[_]): Unit = {
+ if (transitiveTargets.contains(t)) () // do nothing
+ else if (important.isDefinedAt(t) && t != target) () // do nothing
+ else {
+ transitiveTargets.append(t)
+ t.inputs.foreach(rec)
+ }
+ }
+ rec(target)
+ output.addAll(t, topoSorted(transitiveTargets).values)
+ }
+ output
+ }
+
+ def transitiveTargets(sourceTargets: Agg[Task[_]]): Agg[Task[_]] = {
+ val transitiveTargets = new Agg.Mutable[Task[_]]
+ def rec(t: Task[_]): Unit = {
+ if (transitiveTargets.contains(t)) () // do nothing
+ else {
+ transitiveTargets.append(t)
+ t.inputs.foreach(rec)
+ }
+ }
+
+ sourceTargets.items.foreach(rec)
+ transitiveTargets
+ }
+ /**
+ * Takes the given targets, finds all the targets they transitively depend
+ * on, and sort them topologically. Fails if there are dependency cycles
+ */
+ def topoSorted(transitiveTargets: Agg[Task[_]]): TopoSorted = {
+
+ val indexed = transitiveTargets.indexed
+ val targetIndices = indexed.zipWithIndex.toMap
+
+ val numberedEdges =
+ for(t <- transitiveTargets.items)
+ yield t.inputs.collect(targetIndices)
+
+ val sortedClusters = Tarjans(numberedEdges)
+ val nonTrivialClusters = sortedClusters.filter(_.length > 1)
+ assert(nonTrivialClusters.isEmpty, nonTrivialClusters)
+ new TopoSorted(Agg.from(sortedClusters.flatten.map(indexed)))
+ }
+}
diff --git a/main/core/src/mill/define/Module.scala b/main/core/src/mill/define/Module.scala
new file mode 100644
index 00000000..f72ec8ca
--- /dev/null
+++ b/main/core/src/mill/define/Module.scala
@@ -0,0 +1,97 @@
+package mill.define
+
+import java.lang.reflect.Modifier
+
+import ammonite.ops.Path
+import mill.util.ParseArgs
+
+import scala.language.experimental.macros
+import scala.reflect.ClassTag
+import scala.reflect.NameTransformer.decode
+
+
+/**
+ * `Module` is a class meant to be extended by `trait`s *only*, in order to
+ * propagate the implicit parameters forward to the final concrete
+ * instantiation site so they can capture the enclosing/line information of
+ * the concrete instance.
+ */
+class Module(implicit outerCtx0: mill.define.Ctx)
+ extends mill.moduledefs.Cacher{ outer =>
+
+ /**
+ * Miscellaneous machinery around traversing & querying the build hierarchy,
+ * that should not be needed by normal users of Mill
+ */
+ object millInternal extends Module.Internal(this)
+
+ lazy val millModuleDirectChildren = millInternal.reflectNestedObjects[Module].toSeq
+ def millOuterCtx = outerCtx0
+ def millSourcePath: Path = millOuterCtx.millSourcePath / millOuterCtx.segment.pathSegments
+ implicit def millModuleExternal: Ctx.External = Ctx.External(millOuterCtx.external)
+ implicit def millModuleShared: Ctx.Foreign = Ctx.Foreign(millOuterCtx.foreign)
+ implicit def millModuleBasePath: BasePath = BasePath(millSourcePath)
+ implicit def millModuleSegments: Segments = {
+ millOuterCtx.segments ++ Seq(millOuterCtx.segment)
+ }
+ override def toString = millModuleSegments.render
+}
+
+object Module{
+ class Internal(outer: Module){
+ def traverse[T](f: Module => Seq[T]): Seq[T] = {
+ def rec(m: Module): Seq[T] = f(m) ++ m.millModuleDirectChildren.flatMap(rec)
+ rec(outer)
+ }
+
+ lazy val modules = traverse(Seq(_))
+ lazy val segmentsToModules = modules.map(m => (m.millModuleSegments, m)).toMap
+
+ lazy val targets = traverse{_.millInternal.reflectAll[Target[_]]}.toSet
+
+ lazy val segmentsToTargets = targets
+ .map(t => (t.ctx.segments, t))
+ .toMap
+
+ // Ensure we do not propagate the implicit parameters as implicits within
+ // the body of any inheriting class/trait/objects, as it would screw up any
+ // one else trying to use sourcecode.{Enclosing,Line} to capture debug info
+ lazy val millModuleEnclosing = outer.millOuterCtx.enclosing
+ lazy val millModuleLine = outer.millOuterCtx.lineNum
+
+ private def reflect[T: ClassTag](filter: (String) => Boolean): Array[T] = {
+ val runtimeCls = implicitly[ClassTag[T]].runtimeClass
+ for{
+ m <- outer.getClass.getMethods.sortBy(_.getName)
+ n = decode(m.getName)
+ if
+ filter(n) &&
+ ParseArgs.isLegalIdentifier(n) &&
+ m.getParameterCount == 0 &&
+ (m.getModifiers & Modifier.STATIC) == 0 &&
+ (m.getModifiers & Modifier.ABSTRACT) == 0 &&
+ runtimeCls.isAssignableFrom(m.getReturnType)
+ } yield m.invoke(outer).asInstanceOf[T]
+ }
+
+ def reflectAll[T: ClassTag]: Array[T] = reflect(Function.const(true))
+
+ def reflectSingle[T: ClassTag](label: String): Option[T] = reflect(_ == label).headOption
+
+ // For some reason, this fails to pick up concrete `object`s nested directly within
+ // another top-level concrete `object`. This is fine for now, since Mill's Ammonite
+ // script/REPL runner always wraps user code in a wrapper object/trait
+ def reflectNestedObjects[T: ClassTag] = {
+ (reflectAll[T] ++
+ outer
+ .getClass
+ .getClasses
+ .filter(implicitly[ClassTag[T]].runtimeClass isAssignableFrom _)
+ .flatMap(c => c.getFields.find(_.getName == "MODULE$").map(_.get(c).asInstanceOf[T]))
+ ).distinct
+ }
+ }
+}
+trait TaskModule extends Module {
+ def defaultCommandName(): String
+}
diff --git a/main/core/src/mill/define/Task.scala b/main/core/src/mill/define/Task.scala
new file mode 100644
index 00000000..d5f8680e
--- /dev/null
+++ b/main/core/src/mill/define/Task.scala
@@ -0,0 +1,353 @@
+package mill.define
+
+import ammonite.main.Router.Overrides
+import mill.define.Applicative.Applyable
+import mill.eval.{PathRef, Result}
+import mill.util.EnclosingClass
+import sourcecode.Compat.Context
+import upickle.default.{ReadWriter => RW, Reader => R, Writer => W}
+
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+
+/**
+ * Models a single node in the Mill build graph, with a list of inputs and a
+ * single output of type [[T]].
+ *
+ * Generally not instantiated manually, but instead constructed via the
+ * [[Target.apply]] & similar macros.
+ */
+abstract class Task[+T] extends Task.Ops[T] with Applyable[Task, T]{
+ /**
+ * What other Targets does this Target depend on?
+ */
+ val inputs: Seq[Task[_]]
+
+ /**
+ * Evaluate this target
+ */
+ def evaluate(args: mill.util.Ctx): Result[T]
+
+ /**
+ * Even if this target's inputs did not change, does it need to re-evaluate
+ * anyway?
+ */
+ def sideHash: Int = 0
+
+ def flushDest: Boolean = true
+
+ def asTarget: Option[Target[T]] = None
+ def asCommand: Option[Command[T]] = None
+ def asWorker: Option[Worker[T]] = None
+ def self = this
+}
+
+trait NamedTask[+T] extends Task[T]{
+ def ctx: mill.define.Ctx
+ def label = ctx.segment match{case Segment.Label(v) => v}
+ override def toString = ctx.segments.render
+}
+trait Target[+T] extends NamedTask[T]{
+ override def asTarget = Some(this)
+ def readWrite: RW[_]
+}
+
+object Target extends TargetGenerated with Applicative.Applyer[Task, Task, Result, mill.util.Ctx] {
+
+ implicit def apply[T](t: T)
+ (implicit rw: RW[T],
+ ctx: mill.define.Ctx): Target[T] = macro targetImpl[T]
+
+ def targetImpl[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[T])
+ (rw: c.Expr[RW[T]],
+ ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = {
+ import c.universe._
+ val lhs = Applicative.impl0[Task, T, mill.util.Ctx](c)(reify(Result.Success(t.splice)).tree)
+
+ mill.moduledefs.Cacher.impl0[TargetImpl[T]](c)(
+ reify(
+ new TargetImpl[T](lhs.splice, ctx.splice, rw.splice)
+ )
+ )
+ }
+
+ implicit def apply[T](t: Result[T])
+ (implicit rw: RW[T],
+ ctx: mill.define.Ctx): Target[T] = macro targetResultImpl[T]
+
+ def targetResultImpl[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[Result[T]])
+ (rw: c.Expr[RW[T]],
+ ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = {
+ import c.universe._
+ mill.moduledefs.Cacher.impl0[Target[T]](c)(
+ reify(
+ new TargetImpl[T](
+ Applicative.impl0[Task, T, mill.util.Ctx](c)(t.tree).splice,
+ ctx.splice,
+ rw.splice
+ )
+ )
+ )
+ }
+
+ def apply[T](t: Task[T])
+ (implicit rw: RW[T],
+ ctx: mill.define.Ctx): Target[T] = macro targetTaskImpl[T]
+
+ def targetTaskImpl[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[Task[T]])
+ (rw: c.Expr[RW[T]],
+ ctx: c.Expr[mill.define.Ctx]): c.Expr[Target[T]] = {
+ import c.universe._
+ mill.moduledefs.Cacher.impl0[Target[T]](c)(
+ reify(
+ new TargetImpl[T](t.splice, ctx.splice, rw.splice)
+ )
+ )
+ }
+
+ def sources(values: Result[ammonite.ops.Path]*)
+ (implicit ctx: mill.define.Ctx): Sources = macro sourcesImpl1
+
+ def sourcesImpl1(c: Context)
+ (values: c.Expr[Result[ammonite.ops.Path]]*)
+ (ctx: c.Expr[mill.define.Ctx]): c.Expr[Sources] = {
+ import c.universe._
+ val wrapped =
+ for (value <- values.toList)
+ yield Applicative.impl0[Task, PathRef, mill.util.Ctx](c)(
+ reify(value.splice.map(PathRef(_))).tree
+ ).tree
+
+ mill.moduledefs.Cacher.impl0[Sources](c)(
+ reify(
+ new Sources(
+ Task.sequence(c.Expr[List[Task[PathRef]]](q"scala.List(..$wrapped)").splice),
+ ctx.splice
+ )
+ )
+ )
+ }
+
+ def sources(values: Result[Seq[PathRef]])
+ (implicit ctx: mill.define.Ctx): Sources = macro sourcesImpl2
+
+ def sourcesImpl2(c: Context)
+ (values: c.Expr[Result[Seq[PathRef]]])
+ (ctx: c.Expr[mill.define.Ctx]): c.Expr[Sources] = {
+ import c.universe._
+
+
+ mill.moduledefs.Cacher.impl0[Sources](c)(
+ reify(
+ new Sources(
+ Applicative.impl0[Task, Seq[PathRef], mill.util.Ctx](c)(values.tree).splice,
+ ctx.splice
+ )
+ )
+ )
+ }
+ def input[T](value: Result[T])
+ (implicit rw: RW[T],
+ ctx: mill.define.Ctx): Input[T] = macro inputImpl[T]
+
+ def inputImpl[T: c.WeakTypeTag](c: Context)
+ (value: c.Expr[T])
+ (rw: c.Expr[RW[T]],
+ ctx: c.Expr[mill.define.Ctx]): c.Expr[Input[T]] = {
+ import c.universe._
+
+ mill.moduledefs.Cacher.impl0[Input[T]](c)(
+ reify(
+ new Input[T](
+ Applicative.impl[Task, T, mill.util.Ctx](c)(value).splice,
+ ctx.splice,
+ rw.splice
+ )
+ )
+ )
+ }
+
+ def command[T](t: Task[T])
+ (implicit ctx: mill.define.Ctx,
+ w: W[T],
+ cls: EnclosingClass,
+ overrides: Overrides): Command[T] = {
+ new Command(t, ctx, w, cls.value, overrides.value)
+ }
+
+ def command[T](t: Result[T])
+ (implicit w: W[T],
+ ctx: mill.define.Ctx,
+ cls: EnclosingClass,
+ overrides: Overrides): Command[T] = macro commandImpl[T]
+
+ def commandImpl[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[T])
+ (w: c.Expr[W[T]],
+ ctx: c.Expr[mill.define.Ctx],
+ cls: c.Expr[EnclosingClass],
+ overrides: c.Expr[Overrides]): c.Expr[Command[T]] = {
+ import c.universe._
+ reify(
+ new Command[T](
+ Applicative.impl[Task, T, mill.util.Ctx](c)(t).splice,
+ ctx.splice,
+ w.splice,
+ cls.splice.value,
+ overrides.splice.value
+ )
+ )
+ }
+
+ def worker[T](t: Task[T])
+ (implicit ctx: mill.define.Ctx): Worker[T] = new Worker(t, ctx)
+
+ def worker[T](t: Result[T])
+ (implicit ctx: mill.define.Ctx): Worker[T] = macro workerImpl[T]
+
+ def workerImpl[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[T])
+ (ctx: c.Expr[mill.define.Ctx]): c.Expr[Worker[T]] = {
+ import c.universe._
+ reify(
+ new Worker[T](Applicative.impl[Task, T, mill.util.Ctx](c)(t).splice, ctx.splice)
+ )
+ }
+
+ def task[T](t: Result[T]): Task[T] = macro Applicative.impl[Task, T, mill.util.Ctx]
+
+ def persistent[T](t: Result[T])(implicit rw: RW[T],
+ ctx: mill.define.Ctx): Persistent[T] = macro persistentImpl[T]
+
+ def persistentImpl[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[T])
+ (rw: c.Expr[RW[T]],
+ ctx: c.Expr[mill.define.Ctx]): c.Expr[Persistent[T]] = {
+ import c.universe._
+
+
+ mill.moduledefs.Cacher.impl0[Persistent[T]](c)(
+ reify(
+ new Persistent[T](
+ Applicative.impl[Task, T, mill.util.Ctx](c)(t).splice,
+ ctx.splice,
+ rw.splice
+ )
+ )
+ )
+ }
+
+ type TT[+X] = Task[X]
+ def makeT[X](inputs0: Seq[TT[_]], evaluate0: mill.util.Ctx => Result[X]) = new Task[X] {
+ val inputs = inputs0
+ def evaluate(x: mill.util.Ctx) = evaluate0(x)
+ }
+
+ def underlying[A](v: Task[A]) = v
+ def mapCtx[A, B](t: Task[A])(f: (A, mill.util.Ctx) => Result[B]) = t.mapDest(f)
+ def zip() = new Task.Task0(())
+ def zip[A](a: Task[A]) = a.map(Tuple1(_))
+ def zip[A, B](a: Task[A], b: Task[B]) = a.zip(b)
+}
+
+case class Caller[A](value: A)
+object Caller {
+ def apply[T]()(implicit c: Caller[T]) = c.value
+ implicit def generate[T]: Caller[T] = macro impl[T]
+ def impl[T: c.WeakTypeTag](c: Context): c.Tree = {
+ import c.universe._
+ q"new _root_.mill.define.Caller[${weakTypeOf[T]}](this)"
+ }
+}
+abstract class NamedTaskImpl[+T](ctx0: mill.define.Ctx, t: Task[T]) extends NamedTask[T]{
+ def evaluate(args: mill.util.Ctx) = args[T](0)
+ val ctx = ctx0.copy(segments = ctx0.segments ++ Seq(ctx0.segment))
+ val inputs = Seq(t)
+}
+
+class TargetImpl[+T](t: Task[T],
+ ctx0: mill.define.Ctx,
+ val readWrite: RW[_]) extends NamedTaskImpl[T](ctx0, t) with Target[T] {
+}
+
+class Command[+T](t: Task[T],
+ ctx0: mill.define.Ctx,
+ val writer: W[_],
+ val cls: Class[_],
+ val overrides: Int) extends NamedTaskImpl[T](ctx0, t) {
+ override def asCommand = Some(this)
+}
+
+class Worker[+T](t: Task[T], ctx0: mill.define.Ctx) extends NamedTaskImpl[T](ctx0, t) {
+ override def flushDest = false
+ override def asWorker = Some(this)
+}
+class Persistent[+T](t: Task[T],
+ ctx0: mill.define.Ctx,
+ readWrite: RW[_])
+ extends TargetImpl[T](t, ctx0, readWrite) {
+
+ override def flushDest = false
+}
+class Input[T](t: Task[T],
+ ctx0: mill.define.Ctx,
+ val readWrite: RW[_]) extends NamedTaskImpl[T](ctx0, t) with Target[T]{
+ override def sideHash = util.Random.nextInt()
+}
+class Sources(t: Task[Seq[PathRef]],
+ ctx0: mill.define.Ctx) extends Input[Seq[PathRef]](
+ t,
+ ctx0,
+ RW.join(
+ upickle.default.SeqLikeReader[Seq, PathRef],
+ upickle.default.SeqLikeWriter[Seq, PathRef]
+ )
+)
+object Task {
+
+ class Task0[T](t: T) extends Task[T]{
+ lazy val t0 = t
+ val inputs = Nil
+ def evaluate(args: mill.util.Ctx) = t0
+ }
+
+ abstract class Ops[+T]{ this: Task[T] =>
+ def map[V](f: T => V) = new Task.Mapped(this, f)
+ def mapDest[V](f: (T, mill.util.Ctx) => Result[V]) = new Task.MappedDest(this, f)
+
+ def filter(f: T => Boolean) = this
+ def withFilter(f: T => Boolean) = this
+ def zip[V](other: Task[V]) = new Task.Zipped(this, other)
+
+ }
+
+ def traverse[T, V](source: Seq[T])(f: T => Task[V]) = {
+ new Sequence[V](source.map(f))
+ }
+ def sequence[T](source: Seq[Task[T]]) = new Sequence[T](source)
+
+ class Sequence[+T](inputs0: Seq[Task[T]]) extends Task[Seq[T]]{
+ val inputs = inputs0
+ def evaluate(args: mill.util.Ctx) = {
+ for (i <- 0 until args.length)
+ yield args(i).asInstanceOf[T]
+ }
+
+ }
+ class Mapped[+T, +V](source: Task[T], f: T => V) extends Task[V]{
+ def evaluate(args: mill.util.Ctx) = f(args(0))
+ val inputs = List(source)
+ }
+ class MappedDest[+T, +V](source: Task[T], f: (T, mill.util.Ctx) => Result[V]) extends Task[V]{
+ def evaluate(args: mill.util.Ctx) = f(args(0), args)
+ val inputs = List(source)
+ }
+ class Zipped[+T, +V](source1: Task[T], source2: Task[V]) extends Task[(T, V)]{
+ def evaluate(args: mill.util.Ctx) = (args(0), args(1))
+ val inputs = List(source1, source2)
+ }
+}
diff --git a/main/core/src/mill/eval/Evaluator.scala b/main/core/src/mill/eval/Evaluator.scala
new file mode 100644
index 00000000..34e1cf6f
--- /dev/null
+++ b/main/core/src/mill/eval/Evaluator.scala
@@ -0,0 +1,445 @@
+package mill.eval
+
+import java.net.URLClassLoader
+
+import scala.collection.JavaConverters._
+
+import mill.util.Router.EntryPoint
+import ammonite.ops._
+import ammonite.runtime.SpecialClassLoader
+import mill.define.{Ctx => _, _}
+import mill.eval.Result.OuterStack
+import mill.util
+import mill.util._
+import mill.util.Strict.Agg
+import upickle.Js
+
+import scala.collection.mutable
+import scala.util.control.NonFatal
+case class Labelled[T](task: NamedTask[T],
+ segments: Segments){
+ def format = task match{
+ case t: Target[T] => Some(t.readWrite.asInstanceOf[upickle.default.ReadWriter[T]])
+ case _ => None
+ }
+ def writer = task match{
+ case t: mill.define.Command[T] => Some(t.writer.asInstanceOf[upickle.default.Writer[T]])
+ case t: Target[T] => Some(t.readWrite.asInstanceOf[upickle.default.ReadWriter[T]])
+ case _ => None
+ }
+}
+case class Evaluator[T](home: Path,
+ outPath: Path,
+ externalOutPath: Path,
+ rootModule: mill.define.BaseModule,
+ log: Logger,
+ classLoaderSig: Seq[(Either[String, Path], Long)] = Evaluator.classLoaderSig,
+ workerCache: mutable.Map[Segments, (Int, Any)] = mutable.Map.empty,
+ env : Map[String, String] = Evaluator.defaultEnv){
+ val classLoaderSignHash = classLoaderSig.hashCode()
+ def evaluate(goals: Agg[Task[_]]): Evaluator.Results = {
+ mkdir(outPath)
+
+ val (sortedGroups, transitive) = Evaluator.plan(rootModule, goals)
+
+ val evaluated = new Agg.Mutable[Task[_]]
+ val results = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]]
+
+ val timings = mutable.ArrayBuffer.empty[(Either[Task[_], Labelled[_]], Int, Boolean)]
+ for (((terminal, group), i) <- sortedGroups.items().zipWithIndex){
+ val startTime = System.currentTimeMillis()
+ // Increment the counter message by 1 to go from 1/10 to 10/10 instead of 0/10 to 9/10
+ val counterMsg = (i+1) + "/" + sortedGroups.keyCount
+ val (newResults, newEvaluated, cached) = evaluateGroupCached(
+ terminal,
+ group,
+ results,
+ counterMsg
+ )
+
+ for(ev <- newEvaluated){
+ evaluated.append(ev)
+ }
+ for((k, v) <- newResults) {
+ results.put(k, v)
+ }
+ val endTime = System.currentTimeMillis()
+
+ timings.append((terminal, (endTime - startTime).toInt, cached))
+ }
+
+ val failing = new util.MultiBiMap.Mutable[Either[Task[_], Labelled[_]], Result.Failing[_]]
+ for((k, vs) <- sortedGroups.items()){
+ failing.addAll(
+ k,
+ vs.items.flatMap(results.get).collect{case f: Result.Failing[_] => f.map(_._1)}
+ )
+ }
+ write.over(
+ outPath / "mill-profile.json",
+ upickle.default.write(
+ timings .map{case (k, v, b) =>
+ Evaluator.Timing(k.fold(_ => null, s => s.segments.render), v, b)
+ },
+ indent = 4
+ )
+ )
+ Evaluator.Results(
+ goals.indexed.map(results(_).map(_._1)),
+ evaluated,
+ transitive,
+ failing,
+ timings,
+ results.map{case (k, v) => (k, v.map(_._1))}
+ )
+ }
+
+
+ def evaluateGroupCached(terminal: Either[Task[_], Labelled[_]],
+ group: Agg[Task[_]],
+ results: collection.Map[Task[_], Result[(Any, Int)]],
+ counterMsg: String): (collection.Map[Task[_], Result[(Any, Int)]], Seq[Task[_]], Boolean) = {
+
+ val externalInputsHash = scala.util.hashing.MurmurHash3.orderedHash(
+ group.items.flatMap(_.inputs).filter(!group.contains(_))
+ .flatMap(results(_).asSuccess.map(_.value._2))
+ )
+
+ val sideHashes = scala.util.hashing.MurmurHash3.orderedHash(
+ group.toIterator.map(_.sideHash)
+ )
+
+ val inputsHash = externalInputsHash + sideHashes + classLoaderSignHash
+
+ terminal match{
+ case Left(task) =>
+ val (newResults, newEvaluated) = evaluateGroup(
+ group,
+ results,
+ inputsHash,
+ paths = None,
+ maybeTargetLabel = None,
+ counterMsg = counterMsg
+ )
+ (newResults, newEvaluated, false)
+ case Right(labelledNamedTask) =>
+
+ val out = if (!labelledNamedTask.task.ctx.external) outPath
+ else externalOutPath
+
+ val paths = Evaluator.resolveDestPaths(
+ out,
+ destSegments(labelledNamedTask)
+ )
+
+ if (!exists(paths.out)) mkdir(paths.out)
+ val cached = for{
+ cached <-
+ try Some(upickle.default.read[Evaluator.Cached](paths.meta.toIO))
+ catch {case e: Throwable => None}
+
+ if cached.inputsHash == inputsHash
+ reader <- labelledNamedTask.format
+ parsed <-
+ try Some(upickle.default.read(cached.value)(reader))
+ catch {case e: Throwable => None}
+ } yield (parsed, cached.valueHash)
+
+ val workerCached = labelledNamedTask.task.asWorker
+ .flatMap{w => workerCache.get(w.ctx.segments)}
+ .collect{case (`inputsHash`, v) => v}
+
+ workerCached.map((_, inputsHash)) orElse cached match{
+ case Some((v, hashCode)) =>
+ val newResults = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]]
+ newResults(labelledNamedTask.task) = Result.Success((v, hashCode))
+
+ (newResults, Nil, true)
+
+ case _ =>
+
+ val Seq(first, rest @_*) = labelledNamedTask.segments.value
+ val msgParts = Seq(first.asInstanceOf[Segment.Label].value) ++ rest.map{
+ case Segment.Label(s) => "." + s
+ case Segment.Cross(s) => "[" + s.mkString(",") + "]"
+ }
+
+ if (labelledNamedTask.task.flushDest) rm(paths.dest)
+
+ val (newResults, newEvaluated) = evaluateGroup(
+ group,
+ results,
+ inputsHash,
+ paths = Some(paths),
+ maybeTargetLabel = Some(msgParts.mkString),
+ counterMsg = counterMsg
+ )
+
+ newResults(labelledNamedTask.task) match{
+ case Result.Failure(_, Some((v, hashCode))) =>
+ handleTaskResult(v, v.##, paths.meta, inputsHash, labelledNamedTask)
+
+ case Result.Success((v, hashCode)) =>
+ handleTaskResult(v, v.##, paths.meta, inputsHash, labelledNamedTask)
+
+ case _ =>
+ // Wipe out any cached meta.json file that exists, so
+ // a following run won't look at the cached metadata file and
+ // assume it's associated with the possibly-borked state of the
+ // destPath after an evaluation failure.
+ rm(paths.meta)
+ }
+
+ (newResults, newEvaluated, false)
+ }
+ }
+ }
+
+ def destSegments(labelledTask : Labelled[_]) : Segments = {
+ import labelledTask.task.ctx
+ if (ctx.foreign) {
+ val prefix = "foreign-modules"
+ // Computing a path in "out" that uniquely reflects the location
+ // of the foreign module relatively to the current build.
+ val relative = labelledTask.task
+ .ctx.millSourcePath
+ .relativeTo(rootModule.millSourcePath)
+ // Encoding the number of `/..`
+ val ups = if (relative.ups > 0) Segments.labels(s"up-${relative.ups}")
+ else Segments()
+ Segments.labels(prefix)
+ .++(ups)
+ .++(Segments.labels(relative.segments: _*))
+ .++(labelledTask.segments.last)
+ } else labelledTask.segments
+ }
+
+
+ def handleTaskResult(v: Any,
+ hashCode: Int,
+ metaPath: Path,
+ inputsHash: Int,
+ labelledNamedTask: Labelled[_]) = {
+ labelledNamedTask.task.asWorker match{
+ case Some(w) => workerCache(w.ctx.segments) = (inputsHash, v)
+ case None =>
+ val terminalResult = labelledNamedTask
+ .writer
+ .asInstanceOf[Option[upickle.default.Writer[Any]]]
+ .map(w => upickle.default.writeJs(v)(w) -> v)
+
+ for((json, v) <- terminalResult){
+ write.over(
+ metaPath,
+ upickle.default.write(
+ Evaluator.Cached(json, hashCode, inputsHash),
+ indent = 4
+ )
+ )
+ }
+ }
+ }
+
+ def evaluateGroup(group: Agg[Task[_]],
+ results: collection.Map[Task[_], Result[(Any, Int)]],
+ inputsHash: Int,
+ paths: Option[Evaluator.Paths],
+ maybeTargetLabel: Option[String],
+ counterMsg: String) = {
+
+
+ val newEvaluated = mutable.Buffer.empty[Task[_]]
+ val newResults = mutable.LinkedHashMap.empty[Task[_], Result[(Any, Int)]]
+
+ val nonEvaluatedTargets = group.indexed.filterNot(results.contains)
+
+ maybeTargetLabel.foreach { targetLabel =>
+ val inputResults = for {
+ target <- nonEvaluatedTargets
+ item <- target.inputs.filterNot(group.contains)
+ } yield results(item).map(_._1)
+
+ val logRun = inputResults.forall(_.isInstanceOf[Result.Success[_]])
+
+ if(logRun) { log.ticker(s"[$counterMsg] $targetLabel ") }
+ }
+
+ val multiLogger = resolveLogger(paths.map(_.log))
+ var usedDest = Option.empty[(Task[_], Array[StackTraceElement])]
+ for (task <- nonEvaluatedTargets) {
+ newEvaluated.append(task)
+ val targetInputValues = task.inputs
+ .map(x => newResults.getOrElse(x, results(x)))
+ .collect{ case Result.Success((v, hashCode)) => v }
+
+ val res =
+ if (targetInputValues.length != task.inputs.length) Result.Skipped
+ else {
+ val args = new Ctx(
+ targetInputValues.toArray[Any],
+ () => usedDest match{
+ case Some((earlierTask, earlierStack)) if earlierTask != task =>
+ val inner = new Exception("Earlier usage of `dest`")
+ inner.setStackTrace(earlierStack)
+ throw new Exception(
+ "`dest` can only be used in one place within each Target[T]",
+ inner
+ )
+ case _ =>
+
+
+ paths match{
+ case Some(dest) =>
+ if (usedDest.isEmpty) mkdir(dest.dest)
+ usedDest = Some((task, new Exception().getStackTrace))
+ dest.dest
+ case None =>
+ throw new Exception("No `dest` folder available here")
+ }
+ },
+ multiLogger,
+ home,
+ env
+ )
+
+ val out = System.out
+ val in = System.in
+ val err = System.err
+ try{
+ System.setIn(multiLogger.inStream)
+ System.setErr(multiLogger.errorStream)
+ System.setOut(multiLogger.outputStream)
+ Console.withIn(multiLogger.inStream){
+ Console.withOut(multiLogger.outputStream){
+ Console.withErr(multiLogger.errorStream){
+ try task.evaluate(args)
+ catch { case NonFatal(e) =>
+ Result.Exception(e, new OuterStack(new Exception().getStackTrace))
+ }
+ }
+ }
+ }
+ }finally{
+ System.setErr(err)
+ System.setOut(out)
+ System.setIn(in)
+ }
+ }
+
+ newResults(task) = for(v <- res) yield {
+ (v,
+ if (task.isInstanceOf[Worker[_]]) inputsHash
+ else v.##
+ )
+ }
+ }
+
+ multiLogger.close()
+
+ (newResults, newEvaluated)
+ }
+
+ def resolveLogger(logPath: Option[Path]): Logger = logPath match{
+ case None => log
+ case Some(path) => MultiLogger(log.colored, log, FileLogger(log.colored, path))
+ }
+}
+
+
+object Evaluator{
+ case class Cached(value: Js.Value,
+ valueHash: Int,
+ inputsHash: Int)
+ object Cached{
+ implicit val rw: upickle.default.ReadWriter[Cached] = upickle.default.macroRW
+ }
+ case class State(rootModule: mill.define.BaseModule,
+ classLoaderSig: Seq[(Either[String, Path], Long)],
+ workerCache: mutable.Map[Segments, (Int, Any)],
+ watched: Seq[(Path, Long)])
+ // This needs to be a ThreadLocal because we need to pass it into the body of
+ // the TargetScopt#read call, which does not accept additional parameters.
+ // Until we migrate our CLI parsing off of Scopt (so we can pass the BaseModule
+ // in directly) we are forced to pass it in via a ThreadLocal
+ val currentEvaluator = new ThreadLocal[mill.eval.Evaluator[_]]
+
+ val defaultEnv: Map[String, String] = System.getenv().asScala.toMap
+
+ case class Paths(out: Path,
+ dest: Path,
+ meta: Path,
+ log: Path)
+ def makeSegmentStrings(segments: Segments) = segments.value.flatMap{
+ case Segment.Label(s) => Seq(s)
+ case Segment.Cross(values) => values.map(_.toString)
+ }
+ def resolveDestPaths(workspacePath: Path, segments: Segments): Paths = {
+ val segmentStrings = makeSegmentStrings(segments)
+ val targetPath = workspacePath / segmentStrings
+ Paths(targetPath, targetPath / 'dest, targetPath / "meta.json", targetPath / 'log)
+ }
+
+ // check if the build itself has changed
+ def classLoaderSig = Thread.currentThread().getContextClassLoader match {
+ case scl: SpecialClassLoader => scl.classpathSignature
+ case ucl: URLClassLoader =>
+ SpecialClassLoader.initialClasspathSignature(ucl)
+ case _ => Nil
+ }
+ case class Timing(label: String,
+ millis: Int,
+ cached: Boolean)
+ object Timing{
+ implicit val readWrite: upickle.default.ReadWriter[Timing] = upickle.default.macroRW
+ }
+ case class Results(rawValues: Seq[Result[Any]],
+ evaluated: Agg[Task[_]],
+ transitive: Agg[Task[_]],
+ failing: MultiBiMap[Either[Task[_], Labelled[_]], Result.Failing[_]],
+ timings: IndexedSeq[(Either[Task[_], Labelled[_]], Int, Boolean)],
+ results: collection.Map[Task[_], Result[Any]]){
+ def values = rawValues.collect{case Result.Success(v) => v}
+ }
+ def plan(rootModule: BaseModule, goals: Agg[Task[_]]) = {
+ val transitive = Graph.transitiveTargets(goals)
+ val topoSorted = Graph.topoSorted(transitive)
+ val sortedGroups = Graph.groupAroundImportantTargets(topoSorted){
+ case t: NamedTask[Any] =>
+ val segments = t.ctx.segments
+ val finalTaskOverrides = t match{
+ case t: Target[_] =>
+ rootModule.millInternal.segmentsToTargets.get(segments).fold(0)(_.ctx.overrides)
+
+ case c: mill.define.Command[_] =>
+ def findMatching(cls: Class[_]): Option[Seq[(Int, EntryPoint[_])]] = {
+ rootModule.millDiscover.value.get(cls) match{
+ case Some(v) => Some(v)
+ case None =>
+ cls.getSuperclass match{
+ case null => None
+ case superCls => findMatching(superCls)
+ }
+ }
+ }
+
+ findMatching(c.cls) match{
+ case Some(v) =>
+ v.find(_._2.name == c.ctx.segment.pathSegments.head).get._1
+ // For now we don't properly support overrides for external modules
+ // that do not appear in the Evaluator's main Discovered listing
+ case None => 0
+ }
+
+ case c: mill.define.Worker[_] => 0
+ }
+
+ val additional =
+ if (finalTaskOverrides == t.ctx.overrides) Nil
+ else Seq(Segment.Label("overriden")) ++ t.ctx.enclosing.split("\\.|#| ").map(Segment.Label)
+
+ Right(Labelled(t, segments ++ additional))
+ case t if goals.contains(t) => Left(t)
+ }
+ (sortedGroups, transitive)
+ }
+}
diff --git a/main/core/src/mill/eval/PathRef.scala b/main/core/src/mill/eval/PathRef.scala
new file mode 100644
index 00000000..118d98fe
--- /dev/null
+++ b/main/core/src/mill/eval/PathRef.scala
@@ -0,0 +1,82 @@
+package mill.eval
+
+import java.io.IOException
+import java.nio.file.attribute.BasicFileAttributes
+import java.nio.file.{FileVisitResult, FileVisitor}
+import java.nio.{file => jnio}
+import java.security.{DigestOutputStream, MessageDigest}
+
+import upickle.default.{ReadWriter => RW}
+import ammonite.ops.Path
+import mill.util.{DummyOutputStream, IO, JsonFormatters}
+
+
+/**
+ * A wrapper around `ammonite.ops.Path` that calculates it's hashcode based
+ * on the contents of the filesystem underneath it. Used to ensure filesystem
+ * changes can bust caches which are keyed off hashcodes.
+ */
+case class PathRef(path: ammonite.ops.Path, quick: Boolean, sig: Int){
+ override def hashCode() = sig
+}
+
+object PathRef{
+ def apply(path: ammonite.ops.Path, quick: Boolean = false) = {
+ val sig = {
+ val digest = MessageDigest.getInstance("MD5")
+ val digestOut = new DigestOutputStream(DummyOutputStream, digest)
+ jnio.Files.walkFileTree(
+ path.toNIO,
+ java.util.EnumSet.of(jnio.FileVisitOption.FOLLOW_LINKS),
+ Integer.MAX_VALUE,
+ new FileVisitor[jnio.Path] {
+ def preVisitDirectory(dir: jnio.Path, attrs: BasicFileAttributes) = {
+ digest.update(dir.toAbsolutePath.toString.getBytes)
+ FileVisitResult.CONTINUE
+ }
+
+ def visitFile(file: jnio.Path, attrs: BasicFileAttributes) = {
+ digest.update(file.toAbsolutePath.toString.getBytes)
+ if (quick){
+ val value = (path.mtime.toMillis, path.size).hashCode()
+ digest.update((value >>> 24).toByte)
+ digest.update((value >>> 16).toByte)
+ digest.update((value >>> 8).toByte)
+ digest.update(value.toByte)
+ }else {
+ val is = jnio.Files.newInputStream(file)
+ IO.stream(is, digestOut)
+ is.close()
+ }
+ FileVisitResult.CONTINUE
+ }
+
+ def visitFileFailed(file: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
+ def postVisitDirectory(dir: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
+ }
+ )
+
+ java.util.Arrays.hashCode(digest.digest())
+
+ }
+ new PathRef(path, quick, sig)
+ }
+
+ implicit def jsonFormatter: RW[PathRef] = upickle.default.readwriter[String].bimap[PathRef](
+ p => {
+ (if (p.quick) "qref" else "ref") + ":" +
+ String.format("%08x", p.sig: Integer) + ":" +
+ p.path.toString()
+ },
+ s => {
+ val Array(prefix, hex, path) = s.split(":", 3)
+ PathRef(
+ Path(path),
+ prefix match{ case "qref" => true case "ref" => false},
+ // Parsing to a long and casting to an int is the only way to make
+ // round-trip handling of negative numbers work =(
+ java.lang.Long.parseLong(hex, 16).toInt
+ )
+ }
+ )
+}
diff --git a/main/core/src/mill/eval/Result.scala b/main/core/src/mill/eval/Result.scala
new file mode 100644
index 00000000..d0400599
--- /dev/null
+++ b/main/core/src/mill/eval/Result.scala
@@ -0,0 +1,36 @@
+package mill.eval
+
+sealed trait Result[+T]{
+ def map[V](f: T => V): Result[V]
+ def asSuccess: Option[Result.Success[T]] = None
+}
+object Result{
+ implicit def create[T](t: => T): Result[T] = {
+ try Success(t)
+ catch { case e: Throwable => Exception(e, new OuterStack(new java.lang.Exception().getStackTrace)) }
+ }
+ case class Success[+T](value: T) extends Result[T]{
+ def map[V](f: T => V) = Result.Success(f(value))
+ override def asSuccess = Some(this)
+ }
+ case object Skipped extends Result[Nothing]{
+ def map[V](f: Nothing => V) = this
+ }
+ sealed trait Failing[+T] extends Result[T]{
+ def map[V](f: T => V): Failing[V]
+ }
+ case class Failure[T](msg: String, value: Option[T] = None) extends Failing[T]{
+ def map[V](f: T => V) = Result.Failure(msg, value.map(f(_)))
+ }
+ case class Exception(throwable: Throwable, outerStack: OuterStack) extends Failing[Nothing]{
+ def map[V](f: Nothing => V) = this
+ }
+ class OuterStack(val value: Seq[StackTraceElement]){
+ override def hashCode() = value.hashCode()
+
+ override def equals(obj: scala.Any) = obj match{
+ case o: OuterStack => value.equals(o.value)
+ case _ => false
+ }
+ }
+} \ No newline at end of file
diff --git a/main/core/src/mill/eval/Tarjans.scala b/main/core/src/mill/eval/Tarjans.scala
new file mode 100644
index 00000000..ade335a9
--- /dev/null
+++ b/main/core/src/mill/eval/Tarjans.scala
@@ -0,0 +1,51 @@
+package mill.eval
+
+import scala.collection.mutable
+
+// Adapted from
+// https://github.com/indy256/codelibrary/blob/c52247216258e84aac442a23273b7d8306ef757b/java/src/SCCTarjan.java
+object Tarjans {
+ def apply(graph0: TraversableOnce[TraversableOnce[Int]]): Seq[Seq[Int]] = {
+ val graph = graph0.map(_.toArray).toArray
+ val n = graph.length
+ val visited = new Array[Boolean](n)
+ val stack = mutable.ArrayBuffer.empty[Integer]
+ var time = 0
+ val lowlink = new Array[Int](n)
+ val components = mutable.ArrayBuffer.empty[Seq[Int]]
+
+
+ for (u <- 0 until n) {
+ if (!visited(u)) dfs(u)
+ }
+
+ def dfs(u: Int): Unit = {
+ lowlink(u) = time
+ time += 1
+ visited(u) = true
+ stack.append(u)
+ var isComponentRoot = true
+ for (v <- graph(u)) {
+ if (!visited(v)) dfs(v)
+ if (lowlink(u) > lowlink(v)) {
+ lowlink(u) = lowlink(v)
+ isComponentRoot = false
+ }
+ }
+ if (isComponentRoot) {
+ val component = mutable.Buffer.empty[Int]
+
+ var done = false
+ while (!done) {
+ val x = stack.last
+ stack.remove(stack.length - 1)
+ component.append(x)
+ lowlink(x) = Integer.MAX_VALUE
+ if (x == u) done = true
+ }
+ components.append(component)
+ }
+ }
+ components
+ }
+}
diff --git a/main/core/src/mill/util/AggWrapper.scala b/main/core/src/mill/util/AggWrapper.scala
new file mode 100644
index 00000000..6c107875
--- /dev/null
+++ b/main/core/src/mill/util/AggWrapper.scala
@@ -0,0 +1,119 @@
+package mill.util
+
+
+
+import scala.collection.mutable
+object Strict extends AggWrapper(true)
+object Loose extends AggWrapper(false)
+sealed class AggWrapper(strictUniqueness: Boolean){
+ /**
+ * A collection with enforced uniqueness, fast contains and deterministic
+ * ordering. Raises an exception if a duplicate is found; call
+ * `toSeq.distinct` if you explicitly want to make it swallow duplicates
+ */
+ trait Agg[V] extends TraversableOnce[V]{
+ def contains(v: V): Boolean
+ def items: Iterator[V]
+ def indexed: IndexedSeq[V]
+ def flatMap[T](f: V => TraversableOnce[T]): Agg[T]
+ def map[T](f: V => T): Agg[T]
+ def filter(f: V => Boolean): Agg[V]
+ def withFilter(f: V => Boolean): Agg[V]
+ def collect[T](f: PartialFunction[V, T]): Agg[T]
+ def zipWithIndex: Agg[(V, Int)]
+ def reverse: Agg[V]
+ def zip[T](other: Agg[T]): Agg[(V, T)]
+ def ++[T >: V](other: TraversableOnce[T]): Agg[T]
+ def length: Int
+ }
+
+ object Agg{
+ def empty[V]: Agg[V] = new Agg.Mutable[V]
+ implicit def jsonFormat[T: upickle.default.ReadWriter]: upickle.default.ReadWriter[Agg[T]] =
+ upickle.default.readwriter[Seq[T]].bimap[Agg[T]](
+ _.toList,
+ Agg.from(_)
+ )
+
+ def apply[V](items: V*) = from(items)
+
+ implicit def from[V](items: TraversableOnce[V]): Agg[V] = {
+ val set = new Agg.Mutable[V]()
+ items.foreach(set.append)
+ set
+ }
+
+
+ class Mutable[V]() extends Agg[V]{
+
+ private[this] val set0 = mutable.LinkedHashSet.empty[V]
+ def contains(v: V) = set0.contains(v)
+ def append(v: V) = if (!contains(v)){
+ set0.add(v)
+
+ }else if (strictUniqueness){
+ throw new Exception("Duplicated item inserted into OrderedSet: " + v)
+ }
+ def appendAll(vs: Seq[V]) = vs.foreach(append)
+ def items = set0.iterator
+ def indexed: IndexedSeq[V] = items.toIndexedSeq
+ def set: collection.Set[V] = set0
+
+ def map[T](f: V => T): Agg[T] = {
+ val output = new Agg.Mutable[T]
+ for(i <- items) output.append(f(i))
+ output
+ }
+ def flatMap[T](f: V => TraversableOnce[T]): Agg[T] = {
+ val output = new Agg.Mutable[T]
+ for(i <- items) for(i0 <- f(i)) output.append(i0)
+ output
+ }
+ def filter(f: V => Boolean): Agg[V] = {
+ val output = new Agg.Mutable[V]
+ for(i <- items) if (f(i)) output.append(i)
+ output
+ }
+ def withFilter(f: V => Boolean): Agg[V] = filter(f)
+
+ def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x))
+
+ def zipWithIndex = {
+ var i = 0
+ this.map{ x =>
+ i += 1
+ (x, i-1)
+ }
+ }
+
+ def reverse = Agg.from(indexed.reverseIterator)
+
+ def zip[T](other: Agg[T]) = Agg.from(items.zip(other.items))
+ def ++[T >: V](other: TraversableOnce[T]) = Agg.from(items ++ other)
+ def length: Int = set0.size
+
+ // Members declared in scala.collection.GenTraversableOnce
+ def isTraversableAgain: Boolean = items.isTraversableAgain
+ def toIterator: Iterator[V] = items.toIterator
+ def toStream: Stream[V] = items.toStream
+
+ // Members declared in scala.collection.TraversableOnce
+ def copyToArray[B >: V](xs: Array[B], start: Int,len: Int): Unit = items.copyToArray(xs, start, len)
+ def exists(p: V => Boolean): Boolean = items.exists(p)
+ def find(p: V => Boolean): Option[V] = items.find(p)
+ def forall(p: V => Boolean): Boolean = items.forall(p)
+ def foreach[U](f: V => U): Unit = items.foreach(f)
+ def hasDefiniteSize: Boolean = items.hasDefiniteSize
+ def isEmpty: Boolean = items.isEmpty
+ def seq: scala.collection.TraversableOnce[V] = items
+ def toTraversable: Traversable[V] = items.toTraversable
+
+ override def hashCode() = items.map(_.hashCode()).sum
+ override def equals(other: Any) = other match{
+ case s: Agg[_] => items.sameElements(s.items)
+ case _ => super.equals(other)
+ }
+ override def toString = items.mkString("Agg(", ", ", ")")
+ }
+ }
+}
diff --git a/main/core/src/mill/util/ClassLoader.scala b/main/core/src/mill/util/ClassLoader.scala
new file mode 100644
index 00000000..c0421a7b
--- /dev/null
+++ b/main/core/src/mill/util/ClassLoader.scala
@@ -0,0 +1,66 @@
+package mill.util
+
+import java.net.{URL, URLClassLoader}
+
+import ammonite.ops._
+import io.github.retronym.java9rtexport.Export
+
+object ClassLoader {
+
+ def create(urls: Seq[URL], parent: java.lang.ClassLoader)(
+ implicit ctx: Ctx.Home): URLClassLoader = {
+ new URLClassLoader(
+ makeUrls(urls).toArray,
+ refinePlatformParent(parent)
+ ) {
+ override def findClass(name: String): Class[_] = {
+ if (name.startsWith("com.sun.jna")) getClass.getClassLoader.loadClass(name)
+ else super.findClass(name)
+ }
+ }
+ }
+
+ def create(urls: Seq[URL],
+ parent: java.lang.ClassLoader,
+ customFindClass: String => Option[Class[_]])(
+ implicit ctx: Ctx.Home): URLClassLoader = {
+ new URLClassLoader(
+ makeUrls(urls).toArray,
+ refinePlatformParent(parent)
+ ) {
+ override def findClass(name: String): Class[_] = {
+ if (name.startsWith("com.sun.jna")) getClass.getClassLoader.loadClass(name)
+ else customFindClass(name).getOrElse(super.findClass(name))
+ }
+ }
+ }
+
+ /**
+ * Return `ClassLoader.getPlatformClassLoader` for java 9 and above, if parent class loader is null,
+ * otherwise return same parent class loader.
+ * More details: https://docs.oracle.com/javase/9/migrate/toc.htm#JSMIG-GUID-A868D0B9-026F-4D46-B979-901834343F9E
+ *
+ * `ClassLoader.getPlatformClassLoader` call is implemented via runtime reflection, cause otherwise
+ * mill could be compiled only with jdk 9 or above. We don't want to introduce this restriction now.
+ */
+ private def refinePlatformParent(parent: java.lang.ClassLoader): ClassLoader = {
+ if (ammonite.util.Util.java9OrAbove) {
+ if (parent == null)
+ classOf[ClassLoader]
+ .getMethod("getPlatformClassLoader")
+ .invoke(null)
+ .asInstanceOf[ClassLoader]
+ else parent
+ } else {
+ parent
+ }
+ }
+
+ private def makeUrls(urls: Seq[URL])(implicit ctx: Ctx.Home): Seq[URL] = {
+ if (ammonite.util.Util.java9OrAbove) {
+ urls :+ Export.rtAt(ctx.home.toIO).toURI.toURL
+ } else {
+ urls
+ }
+ }
+}
diff --git a/main/core/src/mill/util/Ctx.scala b/main/core/src/mill/util/Ctx.scala
new file mode 100644
index 00000000..6c8b2afb
--- /dev/null
+++ b/main/core/src/mill/util/Ctx.scala
@@ -0,0 +1,56 @@
+package mill.util
+
+import ammonite.ops.Path
+import mill.define.Applicative.ImplicitStub
+
+import scala.annotation.compileTimeOnly
+import scala.language.implicitConversions
+
+object Ctx{
+ @compileTimeOnly("Target.ctx() can only be used with a T{...} block")
+ @ImplicitStub
+ implicit def taskCtx: Ctx = ???
+
+ object Dest {
+ implicit def pathToCtx(path: Path): Dest = new Dest { def dest = path }
+ }
+ trait Dest{
+ def dest: Path
+ }
+ trait Log{
+ def log: Logger
+ }
+ trait Home{
+ def home: Path
+ }
+ trait Env{
+ def env: Map[String, String]
+ }
+ object Log{
+ implicit def logToCtx(l: Logger): Log = new Log { def log = l }
+ }
+ trait Args{
+ def args: IndexedSeq[_]
+ }
+
+ def defaultHome = ammonite.ops.home / ".mill" / "ammonite"
+
+}
+class Ctx(val args: IndexedSeq[_],
+ dest0: () => Path,
+ val log: Logger,
+ val home: Path,
+ val env : Map[String, String])
+ extends Ctx.Dest
+ with Ctx.Log
+ with Ctx.Args
+ with Ctx.Home
+ with Ctx.Env {
+
+ def dest = dest0()
+ def length = args.length
+ def apply[T](index: Int): T = {
+ if (index >= 0 && index < args.length) args(index).asInstanceOf[T]
+ else throw new IndexOutOfBoundsException(s"Index $index outside of range 0 - ${args.length}")
+ }
+}
diff --git a/main/core/src/mill/util/EitherOps.scala b/main/core/src/mill/util/EitherOps.scala
new file mode 100644
index 00000000..da2552c8
--- /dev/null
+++ b/main/core/src/mill/util/EitherOps.scala
@@ -0,0 +1,18 @@
+package mill.util
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.mutable
+import scala.language.higherKinds
+
+object EitherOps {
+
+ // implementation similar to scala.concurrent.Future#sequence
+ def sequence[A, B, M[X] <: TraversableOnce[X]](in: M[Either[A, B]])(
+ implicit cbf: CanBuildFrom[M[Either[A, B]], B, M[B]]): Either[A, M[B]] = {
+ in.foldLeft[Either[A, mutable.Builder[B, M[B]]]](Right(cbf(in))) {
+ case (acc, el) =>
+ for (a <- acc; e <- el) yield a += e
+ }
+ .map(_.result())
+ }
+}
diff --git a/main/core/src/mill/util/EnclosingClass.scala b/main/core/src/mill/util/EnclosingClass.scala
new file mode 100644
index 00000000..a69cc525
--- /dev/null
+++ b/main/core/src/mill/util/EnclosingClass.scala
@@ -0,0 +1,15 @@
+package mill.util
+
+import sourcecode.Compat.Context
+import language.experimental.macros
+case class EnclosingClass(value: Class[_])
+object EnclosingClass{
+ def apply()(implicit c: EnclosingClass) = c.value
+ implicit def generate: EnclosingClass = macro impl
+ def impl(c: Context): c.Tree = {
+ import c.universe._
+ val cls = c.internal.enclosingOwner.owner.asType.asClass
+ // q"new _root_.mill.define.EnclosingClass(classOf[$cls])"
+ q"new _root_.mill.util.EnclosingClass(this.getClass)"
+ }
+}
diff --git a/main/core/src/mill/util/IO.scala b/main/core/src/mill/util/IO.scala
new file mode 100644
index 00000000..833e52c7
--- /dev/null
+++ b/main/core/src/mill/util/IO.scala
@@ -0,0 +1,32 @@
+package mill.util
+
+import java.io.{InputStream, OutputStream}
+
+import scala.tools.nsc.interpreter.OutputStream
+
+/**
+ * Misc IO utilities, eventually probably should be pushed upstream into
+ * ammonite-ops
+ */
+object IO {
+ def stream(src: InputStream, dest: OutputStream) = {
+ val buffer = new Array[Byte](4096)
+ while ( {
+ src.read(buffer) match {
+ case -1 => false
+ case n =>
+ dest.write(buffer, 0, n)
+ true
+ }
+ }) ()
+ }
+}
+
+import java.io.{ByteArrayInputStream, OutputStream}
+
+object DummyInputStream extends ByteArrayInputStream(Array())
+object DummyOutputStream extends OutputStream{
+ override def write(b: Int) = ()
+ override def write(b: Array[Byte]) = ()
+ override def write(b: Array[Byte], off: Int, len: Int) = ()
+}
diff --git a/main/core/src/mill/util/JsonFormatters.scala b/main/core/src/mill/util/JsonFormatters.scala
new file mode 100644
index 00000000..f92941f7
--- /dev/null
+++ b/main/core/src/mill/util/JsonFormatters.scala
@@ -0,0 +1,44 @@
+package mill.util
+
+import ammonite.ops.{Bytes, Path}
+import upickle.Js
+import upickle.default.{ReadWriter => RW}
+object JsonFormatters extends JsonFormatters
+trait JsonFormatters {
+ implicit val pathReadWrite: RW[ammonite.ops.Path] = upickle.default.readwriter[String]
+ .bimap[ammonite.ops.Path](
+ _.toString,
+ Path(_)
+ )
+
+ implicit val bytesReadWrite: RW[Bytes] = upickle.default.readwriter[String]
+ .bimap(
+ o => javax.xml.bind.DatatypeConverter.printBase64Binary(o.array),
+ str => new Bytes(javax.xml.bind.DatatypeConverter.parseBase64Binary(str))
+ )
+
+
+ implicit lazy val crFormat: RW[ammonite.ops.CommandResult] = upickle.default.macroRW
+
+ implicit lazy val modFormat: RW[coursier.Module] = upickle.default.macroRW
+ implicit lazy val depFormat: RW[coursier.Dependency]= upickle.default.macroRW
+ implicit lazy val attrFormat: RW[coursier.Attributes] = upickle.default.macroRW
+ implicit val stackTraceRW = upickle.default.readwriter[Js.Obj].bimap[StackTraceElement](
+ ste => Js.Obj(
+ "declaringClass" -> Js.Str(ste.getClassName),
+ "methodName" -> Js.Str(ste.getMethodName),
+ "fileName" -> Js.Str(ste.getFileName),
+ "lineNumber" -> Js.Num(ste.getLineNumber)
+ ),
+ {case json: Js.Obj =>
+ new StackTraceElement(
+ json("declaringClass").str.toString,
+ json("methodName").str.toString,
+ json("fileName").str.toString,
+ json("lineNumber").num.toInt
+ )
+ }
+ )
+
+
+}
diff --git a/main/core/src/mill/util/Logger.scala b/main/core/src/mill/util/Logger.scala
new file mode 100644
index 00000000..37ae8577
--- /dev/null
+++ b/main/core/src/mill/util/Logger.scala
@@ -0,0 +1,205 @@
+package mill.util
+
+import java.io._
+
+import ammonite.ops.{Path, rm}
+import ammonite.util.Colors
+
+
+/**
+ * The standard logging interface of the Mill build tool.
+ *
+ * Contains four primary logging methods, in order of increasing importance:
+ *
+ * - `ticker`: short-lived logging output where consecutive lines over-write
+ * each other; useful for information which is transient and disposable
+ *
+ * - `info`: miscellaneous logging output which isn't part of the main output
+ * a user is looking for, but useful to provide context on what Mill is doing
+ *
+ * - `error`: logging output which represents problems the user should care
+ * about
+ *
+ * Also contains the two forwarded stdout and stderr streams, for code executed
+ * by Mill to use directly. Typically these correspond to the stdout and stderr,
+ * but when `show` is used both are forwarded to stderr and stdout is only
+ * used to display the final `show` output for easy piping.
+ */
+trait Logger {
+ def colored: Boolean
+ val errorStream: PrintStream
+ val outputStream: PrintStream
+ val inStream: InputStream
+ def info(s: String): Unit
+ def error(s: String): Unit
+ def ticker(s: String): Unit
+ def close(): Unit = ()
+}
+
+object DummyLogger extends Logger {
+ def colored = false
+ object errorStream extends PrintStream(_ => ())
+ object outputStream extends PrintStream(_ => ())
+ val inStream = new ByteArrayInputStream(Array())
+ def info(s: String) = ()
+ def error(s: String) = ()
+ def ticker(s: String) = ()
+}
+
+class CallbackStream(wrapped: OutputStream,
+ setPrintState0: PrintState => Unit) extends OutputStream{
+ def setPrintState(c: Char) = {
+ setPrintState0(
+ c match{
+ case '\n' => PrintState.Newline
+ case '\r' => PrintState.Newline
+ case _ => PrintState.Middle
+ }
+ )
+ }
+ override def write(b: Array[Byte]): Unit = {
+ if (b.nonEmpty) setPrintState(b(b.length-1).toChar)
+ wrapped.write(b)
+ }
+
+ override def write(b: Array[Byte], off: Int, len: Int): Unit = {
+ if (len != 0) setPrintState(b(off+len-1).toChar)
+ wrapped.write(b, off, len)
+ }
+
+ def write(b: Int) = {
+ setPrintState(b.toChar)
+ wrapped.write(b)
+ }
+}
+sealed trait PrintState
+object PrintState{
+ case object Ticker extends PrintState
+ case object Newline extends PrintState
+ case object Middle extends PrintState
+}
+case class PrintLogger(colored: Boolean,
+ disableTicker: Boolean,
+ colors: ammonite.util.Colors,
+ outStream: PrintStream,
+ infoStream: PrintStream,
+ errStream: PrintStream,
+ inStream: InputStream) extends Logger {
+
+ var printState: PrintState = PrintState.Newline
+
+ override val errorStream = new PrintStream(new CallbackStream(errStream, printState = _))
+ override val outputStream = new PrintStream(new CallbackStream(outStream, printState = _))
+
+
+ def info(s: String) = {
+ printState = PrintState.Newline
+ infoStream.println(colors.info()(s))
+ }
+ def error(s: String) = {
+ printState = PrintState.Newline
+ errStream.println(colors.error()(s))
+ }
+ def ticker(s: String) = {
+ if(!disableTicker) {
+ printState match{
+ case PrintState.Newline =>
+ infoStream.println(colors.info()(s))
+ case PrintState.Middle =>
+ infoStream.println()
+ infoStream.println(colors.info()(s))
+ case PrintState.Ticker =>
+ val p = new PrintWriter(infoStream)
+ val nav = new ammonite.terminal.AnsiNav(p)
+ nav.up(1)
+ nav.clearLine(2)
+ nav.left(9999)
+ p.flush()
+
+ infoStream.println(colors.info()(s))
+ }
+ printState = PrintState.Ticker
+ }
+ }
+}
+
+case class FileLogger(colored: Boolean, file: Path) extends Logger {
+ private[this] var outputStreamUsed: Boolean = false
+
+ lazy val outputStream = {
+ if (!outputStreamUsed) rm(file)
+ outputStreamUsed = true
+ new PrintStream(new FileOutputStream(file.toIO.getAbsolutePath))
+ }
+
+ lazy val errorStream = {
+ if (!outputStreamUsed) rm(file)
+ outputStreamUsed = true
+ new PrintStream(new FileOutputStream(file.toIO.getAbsolutePath))
+ }
+
+ def info(s: String) = outputStream.println(s)
+ def error(s: String) = outputStream.println(s)
+ def ticker(s: String) = outputStream.println(s)
+ val inStream: InputStream = DummyInputStream
+ override def close() = {
+ if (outputStreamUsed)
+ outputStream.close()
+ }
+}
+
+
+
+class MultiStream(stream1: OutputStream, stream2: OutputStream) extends PrintStream(new OutputStream {
+ def write(b: Int): Unit = {
+ stream1.write(b)
+ stream2.write(b)
+ }
+ override def write(b: Array[Byte]): Unit = {
+ stream1.write(b)
+ stream2.write(b)
+ }
+ override def write(b: Array[Byte], off: Int, len: Int) = {
+ stream1.write(b, off, len)
+ stream2.write(b, off, len)
+ }
+ override def flush() = {
+ stream1.flush()
+ stream2.flush()
+ }
+ override def close() = {
+ stream1.close()
+ stream2.close()
+ }
+})
+
+case class MultiLogger(colored: Boolean, logger1: Logger, logger2: Logger) extends Logger {
+
+
+ lazy val outputStream: PrintStream = new MultiStream(logger1.outputStream, logger2.outputStream)
+
+ lazy val errorStream: PrintStream = new MultiStream(logger1.errorStream, logger2.errorStream)
+
+ lazy val inStream = Seq(logger1, logger2).collectFirst{case t: PrintLogger => t} match{
+ case Some(x) => x.inStream
+ case None => new ByteArrayInputStream(Array())
+ }
+
+ def info(s: String) = {
+ logger1.info(s)
+ logger2.info(s)
+ }
+ def error(s: String) = {
+ logger1.error(s)
+ logger2.error(s)
+ }
+ def ticker(s: String) = {
+ logger1.ticker(s)
+ logger2.ticker(s)
+ }
+
+ override def close() = {
+ logger1.close()
+ logger2.close()
+ }
+}
diff --git a/main/core/src/mill/util/MultiBiMap.scala b/main/core/src/mill/util/MultiBiMap.scala
new file mode 100644
index 00000000..2cb81944
--- /dev/null
+++ b/main/core/src/mill/util/MultiBiMap.scala
@@ -0,0 +1,55 @@
+package mill.util
+
+import scala.collection.mutable
+import Strict.Agg
+/**
+ * A map from keys to collections of values: you can assign multiple values
+ * to any particular key. Also allows lookups in both directions: what values
+ * are assigned to a key or what key a value is assigned ti.
+ */
+trait MultiBiMap[K, V]{
+ def containsValue(v: V): Boolean
+ def lookupKey(k: K): Agg[V]
+ def lookupValue(v: V): K
+ def lookupValueOpt(v: V): Option[K]
+ def add(k: K, v: V): Unit
+ def removeAll(k: K): Agg[V]
+ def addAll(k: K, vs: TraversableOnce[V]): Unit
+ def keys(): Iterator[K]
+ def items(): Iterator[(K, Agg[V])]
+ def values(): Iterator[Agg[V]]
+ def keyCount: Int
+}
+
+object MultiBiMap{
+ class Mutable[K, V]() extends MultiBiMap[K, V]{
+ private[this] val valueToKey = mutable.LinkedHashMap.empty[V, K]
+ private[this] val keyToValues = mutable.LinkedHashMap.empty[K, Agg.Mutable[V]]
+ def containsValue(v: V) = valueToKey.contains(v)
+ def lookupKey(k: K) = keyToValues(k)
+ def lookupKeyOpt(k: K) = keyToValues.get(k)
+ def lookupValue(v: V) = valueToKey(v)
+ def lookupValueOpt(v: V) = valueToKey.get(v)
+ def add(k: K, v: V): Unit = {
+ valueToKey(v) = k
+ keyToValues.getOrElseUpdate(k, new Agg.Mutable[V]()).append(v)
+ }
+ def removeAll(k: K): Agg[V] = keyToValues.get(k) match {
+ case None => Agg()
+ case Some(vs) =>
+ vs.foreach(valueToKey.remove)
+
+ keyToValues.remove(k)
+ vs
+ }
+ def addAll(k: K, vs: TraversableOnce[V]): Unit = vs.foreach(this.add(k, _))
+
+ def keys() = keyToValues.keysIterator
+
+ def values() = keyToValues.valuesIterator
+
+ def items() = keyToValues.iterator
+
+ def keyCount = keyToValues.size
+ }
+}
diff --git a/main/core/src/mill/util/ParseArgs.scala b/main/core/src/mill/util/ParseArgs.scala
new file mode 100644
index 00000000..ae3b1685
--- /dev/null
+++ b/main/core/src/mill/util/ParseArgs.scala
@@ -0,0 +1,134 @@
+package mill.util
+
+import fastparse.all._
+import mill.define.{Segment, Segments}
+
+object ParseArgs {
+
+ def apply(scriptArgs: Seq[String],
+ multiSelect: Boolean): Either[String, (List[(Option[Segments], Segments)], Seq[String])] = {
+ val (selectors, args) = extractSelsAndArgs(scriptArgs, multiSelect)
+ for {
+ _ <- validateSelectors(selectors)
+ expandedSelectors <- EitherOps
+ .sequence(selectors.map(expandBraces))
+ .map(_.flatten)
+ selectors <- EitherOps.sequence(expandedSelectors.map(extractSegments))
+ } yield (selectors.toList, args)
+ }
+
+ def extractSelsAndArgs(scriptArgs: Seq[String],
+ multiSelect: Boolean): (Seq[String], Seq[String]) = {
+
+ if (multiSelect) {
+ val dd = scriptArgs.indexOf("--")
+ val selectors = if (dd == -1) scriptArgs else scriptArgs.take(dd)
+ val args = if (dd == -1) Seq.empty else scriptArgs.drop(dd + 1)
+
+ (selectors, args)
+ } else {
+ (scriptArgs.take(1), scriptArgs.drop(1))
+ }
+ }
+
+ private def validateSelectors(selectors: Seq[String]): Either[String, Unit] = {
+ if (selectors.isEmpty || selectors.exists(_.isEmpty))
+ Left("Selector cannot be empty")
+ else Right(())
+ }
+
+ def expandBraces(selectorString: String): Either[String, List[String]] = {
+ parseBraceExpansion(selectorString) match {
+ case f: Parsed.Failure => Left(s"Parsing exception ${f.msg}")
+ case Parsed.Success(expanded, _) => Right(expanded.toList)
+ }
+ }
+
+ private sealed trait Fragment
+ private object Fragment {
+ case class Keep(value: String) extends Fragment
+ case class Expand(values: List[List[Fragment]]) extends Fragment
+
+ def unfold(fragments: List[Fragment]): Seq[String] = {
+ fragments match {
+ case head :: rest =>
+ val prefixes = head match {
+ case Keep(v) => Seq(v)
+ case Expand(Nil) => Seq("{}")
+ case Expand(List(vs)) => unfold(vs).map("{" + _ + "}")
+ case Expand(vss) => vss.flatMap(unfold)
+ }
+ for {
+ prefix <- prefixes
+ suffix <- unfold(rest)
+ } yield prefix + suffix
+
+ case Nil => Seq("")
+ }
+ }
+ }
+
+ private object BraceExpansionParser {
+ val plainChars =
+ P(CharsWhile(c => c != ',' && c != '{' && c != '}')).!.map(Fragment.Keep)
+
+ val toExpand: P[Fragment] =
+ P("{" ~ braceParser.rep(1).rep(sep = ",") ~ "}").map(
+ x => Fragment.Expand(x.toList.map(_.toList))
+ )
+
+ val braceParser = P(toExpand | plainChars)
+
+ val parser = P(braceParser.rep(1).rep(sep = ",") ~ End)
+ }
+
+ private def parseBraceExpansion(input: String) = {
+ def unfold(vss: List[Seq[String]]): Seq[String] = {
+ vss match {
+ case Nil => Seq("")
+ case head :: rest =>
+ for {
+ str <- head
+ r <- unfold(rest)
+ } yield
+ r match {
+ case "" => str
+ case _ => str + "," + r
+ }
+ }
+ }
+
+ BraceExpansionParser.parser
+ .map { vss =>
+ val stringss = vss.map(x => Fragment.unfold(x.toList)).toList
+ unfold(stringss)
+ }
+ .parse(input)
+ }
+
+ def extractSegments(selectorString: String): Either[String, (Option[Segments], Segments)] =
+ parseSelector(selectorString) match {
+ case f: Parsed.Failure => Left(s"Parsing exception ${f.msg}")
+ case Parsed.Success(selector, _) => Right(selector)
+ }
+
+ private val identChars = ('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9') ++ Seq('_', '-')
+ private val ident = P( CharsWhileIn(identChars) ).!
+
+ def isLegalIdentifier(identifier: String): Boolean =
+ (Start ~ ident ~ End).parse(identifier).isInstanceOf[Parsed.Success[_]]
+
+ private def parseSelector(input: String) = {
+ val ident2 = P( CharsWhileIn(identChars ++ ".") ).!
+ val segment = P( ident ).map( Segment.Label)
+ val crossSegment = P("[" ~ ident2.rep(1, sep = ",") ~ "]").map(Segment.Cross)
+ val simpleQuery = P(segment ~ ("." ~ segment | crossSegment).rep).map {
+ case (h, rest) => Segments(h :: rest.toList:_*)
+ }
+ val query = P( simpleQuery ~ ("/" ~/ simpleQuery).?).map{
+ case (q, None) => (None, q)
+ case (q, Some(q2)) => (Some(q), q2)
+ }
+ query.parse(input)
+ }
+}
diff --git a/main/core/src/mill/util/Router.scala b/main/core/src/mill/util/Router.scala
new file mode 100644
index 00000000..5dd3c947
--- /dev/null
+++ b/main/core/src/mill/util/Router.scala
@@ -0,0 +1,451 @@
+package mill.util
+
+import ammonite.main.Compat
+import language.experimental.macros
+
+import scala.annotation.StaticAnnotation
+import scala.collection.mutable
+import scala.reflect.macros.blackbox.Context
+
+/**
+ * More or less a minimal version of Autowire's Server that lets you generate
+ * a set of "routes" from the methods defined in an object, and call them
+ * using passing in name/args/kwargs via Java reflection, without having to
+ * generate/compile code or use Scala reflection. This saves us spinning up
+ * the Scala compiler and greatly reduces the startup time of cached scripts.
+ */
+object Router{
+ /**
+ * Allows you to query how many things are overriden by the enclosing owner.
+ */
+ case class Overrides(value: Int)
+ object Overrides{
+ def apply()(implicit c: Overrides) = c.value
+ implicit def generate: Overrides = macro impl
+ def impl(c: Context): c.Tree = {
+ import c.universe._
+ q"new _root_.mill.util.Router.Overrides(${c.internal.enclosingOwner.overrides.length})"
+ }
+ }
+
+ class doc(s: String) extends StaticAnnotation
+ class main extends StaticAnnotation
+ def generateRoutes[T]: Seq[Router.EntryPoint[T]] = macro generateRoutesImpl[T]
+ def generateRoutesImpl[T: c.WeakTypeTag](c: Context): c.Expr[Seq[EntryPoint[T]]] = {
+ import c.universe._
+ val r = new Router(c)
+ val allRoutes = r.getAllRoutesForClass(
+ weakTypeOf[T].asInstanceOf[r.c.Type]
+ ).asInstanceOf[Iterable[c.Tree]]
+
+ c.Expr[Seq[EntryPoint[T]]](q"_root_.scala.Seq(..$allRoutes)")
+ }
+
+ /**
+ * Models what is known by the router about a single argument: that it has
+ * a [[name]], a human-readable [[typeString]] describing what the type is
+ * (just for logging and reading, not a replacement for a `TypeTag`) and
+ * possible a function that can compute its default value
+ */
+ case class ArgSig[T, V](name: String,
+ typeString: String,
+ doc: Option[String],
+ default: Option[T => V])
+ (implicit val reads: scopt.Read[V])
+
+ def stripDashes(s: String) = {
+ if (s.startsWith("--")) s.drop(2)
+ else if (s.startsWith("-")) s.drop(1)
+ else s
+ }
+ /**
+ * What is known about a single endpoint for our routes. It has a [[name]],
+ * [[argSignatures]] for each argument, and a macro-generated [[invoke0]]
+ * that performs all the necessary argument parsing and de-serialization.
+ *
+ * Realistically, you will probably spend most of your time calling [[invoke]]
+ * instead, which provides a nicer API to call it that mimmicks the API of
+ * calling a Scala method.
+ */
+ case class EntryPoint[T](name: String,
+ argSignatures: Seq[ArgSig[T, _]],
+ doc: Option[String],
+ varargs: Boolean,
+ invoke0: (T, Map[String, String], Seq[String], Seq[ArgSig[T, _]]) => Result[Any],
+ overrides: Int){
+ def invoke(target: T, groupedArgs: Seq[(String, Option[String])]): Result[Any] = {
+ var remainingArgSignatures = argSignatures.toList.filter(_.reads.arity > 0)
+
+ val accumulatedKeywords = mutable.Map.empty[ArgSig[T, _], mutable.Buffer[String]]
+ val keywordableArgs = if (varargs) argSignatures.dropRight(1) else argSignatures
+
+ for(arg <- keywordableArgs) accumulatedKeywords(arg) = mutable.Buffer.empty
+
+ val leftoverArgs = mutable.Buffer.empty[String]
+
+ val lookupArgSig = Map(argSignatures.map(x => (x.name, x)):_*)
+
+ var incomplete: Option[ArgSig[T, _]] = None
+
+ for(group <- groupedArgs){
+
+ group match{
+ case (value, None) =>
+ if (value(0) == '-' && !varargs){
+ lookupArgSig.get(stripDashes(value)) match{
+ case None => leftoverArgs.append(value)
+ case Some(sig) => incomplete = Some(sig)
+ }
+
+ } else remainingArgSignatures match {
+ case Nil => leftoverArgs.append(value)
+ case last :: Nil if varargs => leftoverArgs.append(value)
+ case next :: rest =>
+ accumulatedKeywords(next).append(value)
+ remainingArgSignatures = rest
+ }
+ case (rawKey, Some(value)) =>
+ val key = stripDashes(rawKey)
+ lookupArgSig.get(key) match{
+ case Some(x) if accumulatedKeywords.contains(x) =>
+ if (accumulatedKeywords(x).nonEmpty && varargs){
+ leftoverArgs.append(rawKey, value)
+ }else{
+ accumulatedKeywords(x).append(value)
+ remainingArgSignatures = remainingArgSignatures.filter(_.name != key)
+ }
+ case _ =>
+ leftoverArgs.append(rawKey, value)
+ }
+ }
+ }
+
+ val missing0 = remainingArgSignatures
+ .filter(_.default.isEmpty)
+
+ val missing = if(varargs) {
+ missing0.filter(_ != argSignatures.last)
+ } else {
+ missing0.filter(x => incomplete != Some(x))
+ }
+ val duplicates = accumulatedKeywords.toSeq.filter(_._2.length > 1)
+
+ if (
+ incomplete.nonEmpty ||
+ missing.nonEmpty ||
+ duplicates.nonEmpty ||
+ (leftoverArgs.nonEmpty && !varargs)
+ ){
+ Result.Error.MismatchedArguments(
+ missing = missing,
+ unknown = leftoverArgs,
+ duplicate = duplicates,
+ incomplete = incomplete
+
+ )
+ } else {
+ val mapping = accumulatedKeywords
+ .iterator
+ .collect{case (k, Seq(single)) => (k.name, single)}
+ .toMap
+
+ try invoke0(target, mapping, leftoverArgs, argSignatures)
+ catch{case e: Throwable =>
+ Result.Error.Exception(e)
+ }
+ }
+ }
+ }
+
+ def tryEither[T](t: => T, error: Throwable => Result.ParamError) = {
+ try Right(t)
+ catch{ case e: Throwable => Left(error(e))}
+ }
+ def readVarargs(arg: ArgSig[_, _],
+ values: Seq[String],
+ thunk: String => Any) = {
+ val attempts =
+ for(item <- values)
+ yield tryEither(thunk(item), Result.ParamError.Invalid(arg, item, _))
+
+
+ val bad = attempts.collect{ case Left(x) => x}
+ if (bad.nonEmpty) Left(bad)
+ else Right(attempts.collect{case Right(x) => x})
+ }
+ def read(dict: Map[String, String],
+ default: => Option[Any],
+ arg: ArgSig[_, _],
+ thunk: String => Any): FailMaybe = {
+ arg.reads.arity match{
+ case 0 =>
+ tryEither(thunk(null), Result.ParamError.DefaultFailed(arg, _)).left.map(Seq(_))
+ case 1 =>
+ dict.get(arg.name) match{
+ case None =>
+ tryEither(default.get, Result.ParamError.DefaultFailed(arg, _)).left.map(Seq(_))
+
+ case Some(x) =>
+ tryEither(thunk(x), Result.ParamError.Invalid(arg, x, _)).left.map(Seq(_))
+ }
+ }
+
+ }
+
+ /**
+ * Represents what comes out of an attempt to invoke an [[EntryPoint]].
+ * Could succeed with a value, but could fail in many different ways.
+ */
+ sealed trait Result[+T]
+ object Result{
+
+ /**
+ * Invoking the [[EntryPoint]] was totally successful, and returned a
+ * result
+ */
+ case class Success[T](value: T) extends Result[T]
+
+ /**
+ * Invoking the [[EntryPoint]] was not successful
+ */
+ sealed trait Error extends Result[Nothing]
+ object Error{
+
+ /**
+ * Invoking the [[EntryPoint]] failed with an exception while executing
+ * code within it.
+ */
+ case class Exception(t: Throwable) extends Error
+
+ /**
+ * Invoking the [[EntryPoint]] failed because the arguments provided
+ * did not line up with the arguments expected
+ */
+ case class MismatchedArguments(missing: Seq[ArgSig[_, _]],
+ unknown: Seq[String],
+ duplicate: Seq[(ArgSig[_, _], Seq[String])],
+ incomplete: Option[ArgSig[_, _]]) extends Error
+ /**
+ * Invoking the [[EntryPoint]] failed because there were problems
+ * deserializing/parsing individual arguments
+ */
+ case class InvalidArguments(values: Seq[ParamError]) extends Error
+ }
+
+ sealed trait ParamError
+ object ParamError{
+ /**
+ * Something went wrong trying to de-serialize the input parameter;
+ * the thrown exception is stored in [[ex]]
+ */
+ case class Invalid(arg: ArgSig[_, _], value: String, ex: Throwable) extends ParamError
+ /**
+ * Something went wrong trying to evaluate the default value
+ * for this input parameter
+ */
+ case class DefaultFailed(arg: ArgSig[_, _], ex: Throwable) extends ParamError
+ }
+ }
+
+
+ type FailMaybe = Either[Seq[Result.ParamError], Any]
+ type FailAll = Either[Seq[Result.ParamError], Seq[Any]]
+
+ def validate(args: Seq[FailMaybe]): Result[Seq[Any]] = {
+ val lefts = args.collect{case Left(x) => x}.flatten
+
+ if (lefts.nonEmpty) Result.Error.InvalidArguments(lefts)
+ else {
+ val rights = args.collect{case Right(x) => x}
+ Result.Success(rights)
+ }
+ }
+
+ def makeReadCall(dict: Map[String, String],
+ default: => Option[Any],
+ arg: ArgSig[_, _]) = {
+ read(dict, default, arg, arg.reads.reads(_))
+ }
+ def makeReadVarargsCall(arg: ArgSig[_, _], values: Seq[String]) = {
+ readVarargs(arg, values, arg.reads.reads(_))
+ }
+}
+
+
+class Router [C <: Context](val c: C) {
+ import c.universe._
+ def getValsOrMeths(curCls: Type): Iterable[MethodSymbol] = {
+ def isAMemberOfAnyRef(member: Symbol) = {
+ // AnyRef is an alias symbol, we go to the real "owner" of these methods
+ val anyRefSym = c.mirror.universe.definitions.ObjectClass
+ member.owner == anyRefSym
+ }
+ val extractableMembers = for {
+ member <- curCls.members.toList.reverse
+ if !isAMemberOfAnyRef(member)
+ if !member.isSynthetic
+ if member.isPublic
+ if member.isTerm
+ memTerm = member.asTerm
+ if memTerm.isMethod
+ if !memTerm.isModule
+ } yield memTerm.asMethod
+
+ extractableMembers flatMap { case memTerm =>
+ if (memTerm.isSetter || memTerm.isConstructor || memTerm.isGetter) Nil
+ else Seq(memTerm)
+
+ }
+ }
+
+
+
+ def extractMethod(meth: MethodSymbol, curCls: c.universe.Type): c.universe.Tree = {
+ val baseArgSym = TermName(c.freshName())
+ val flattenedArgLists = meth.paramss.flatten
+ def hasDefault(i: Int) = {
+ val defaultName = s"${meth.name}$$default$$${i + 1}"
+ if (curCls.members.exists(_.name.toString == defaultName)) Some(defaultName)
+ else None
+ }
+ val argListSymbol = q"${c.fresh[TermName]("argsList")}"
+ val extrasSymbol = q"${c.fresh[TermName]("extras")}"
+ val defaults = for ((arg, i) <- flattenedArgLists.zipWithIndex) yield {
+ val arg = TermName(c.freshName())
+ hasDefault(i).map(defaultName => q"($arg: $curCls) => $arg.${newTermName(defaultName)}")
+ }
+
+ def getDocAnnotation(annotations: List[Annotation]) = {
+ val (docTrees, remaining) = annotations.partition(_.tpe =:= typeOf[Router.doc])
+ val docValues = for {
+ doc <- docTrees
+ if doc.scalaArgs.head.isInstanceOf[Literal]
+ l = doc.scalaArgs.head.asInstanceOf[Literal]
+ if l.value.value.isInstanceOf[String]
+ } yield l.value.value.asInstanceOf[String]
+ (remaining, docValues.headOption)
+ }
+
+ def unwrapVarargType(arg: Symbol) = {
+ val vararg = arg.typeSignature.typeSymbol == definitions.RepeatedParamClass
+ val unwrappedType =
+ if (!vararg) arg.typeSignature
+ else arg.typeSignature.asInstanceOf[TypeRef].args(0)
+
+ (vararg, unwrappedType)
+ }
+
+ val argSigSymbol = q"${c.fresh[TermName]("argSigs")}"
+
+ val (_, methodDoc) = getDocAnnotation(meth.annotations)
+ val readArgSigs = for(
+ ((arg, defaultOpt), i) <- flattenedArgLists.zip(defaults).zipWithIndex
+ ) yield {
+
+ val (vararg, varargUnwrappedType) = unwrapVarargType(arg)
+
+ val default =
+ if (vararg) q"scala.Some(scala.Nil)"
+ else defaultOpt match {
+ case Some(defaultExpr) => q"scala.Some($defaultExpr($baseArgSym))"
+ case None => q"scala.None"
+ }
+
+ val (docUnwrappedType, docOpt) = varargUnwrappedType match{
+ case t: AnnotatedType =>
+
+ val (remaining, docValue) = getDocAnnotation(t.annotations)
+ if (remaining.isEmpty) (t.underlying, docValue)
+ else (Compat.copyAnnotatedType(c)(t, remaining), docValue)
+
+ case t => (t, None)
+ }
+
+ val docTree = docOpt match{
+ case None => q"scala.None"
+ case Some(s) => q"scala.Some($s)"
+ }
+
+
+ val argSig = q"""
+ mill.util.Router.ArgSig[$curCls, $docUnwrappedType](
+ ${arg.name.toString},
+ ${docUnwrappedType.toString + (if(vararg) "*" else "")},
+ $docTree,
+ $defaultOpt
+ )
+ """
+
+ val reader =
+ if(vararg) q"""
+ mill.util.Router.makeReadVarargsCall(
+ $argSigSymbol($i),
+ $extrasSymbol
+ )
+ """ else q"""
+ mill.util.Router.makeReadCall(
+ $argListSymbol,
+ $default,
+ $argSigSymbol($i)
+ )
+ """
+ c.internal.setPos(reader, meth.pos)
+ (reader, argSig, vararg)
+ }
+
+ val readArgs = readArgSigs.map(_._1)
+ val argSigs = readArgSigs.map(_._2)
+ val varargs = readArgSigs.map(_._3)
+ val (argNames, argNameCasts) = flattenedArgLists.map { arg =>
+ val (vararg, unwrappedType) = unwrapVarargType(arg)
+ (
+ pq"${arg.name.toTermName}",
+ if (!vararg) q"${arg.name.toTermName}.asInstanceOf[$unwrappedType]"
+ else q"${arg.name.toTermName}.asInstanceOf[Seq[$unwrappedType]]: _*"
+
+ )
+ }.unzip
+
+
+ val res = q"""
+ mill.util.Router.EntryPoint[$curCls](
+ ${meth.name.toString},
+ scala.Seq(..$argSigs),
+ ${methodDoc match{
+ case None => q"scala.None"
+ case Some(s) => q"scala.Some($s)"
+ }},
+ ${varargs.contains(true)},
+ (
+ $baseArgSym: $curCls,
+ $argListSymbol: Map[String, String],
+ $extrasSymbol: Seq[String],
+ $argSigSymbol: Seq[mill.util.Router.ArgSig[$curCls, _]]
+ ) =>
+ mill.util.Router.validate(Seq(..$readArgs)) match{
+ case mill.util.Router.Result.Success(List(..$argNames)) =>
+ mill.util.Router.Result.Success(
+ $baseArgSym.${meth.name.toTermName}(..$argNameCasts)
+ )
+ case x: mill.util.Router.Result.Error => x
+ },
+ ammonite.main.Router.Overrides()
+ )
+ """
+ res
+ }
+
+ def hasMainAnnotation(t: MethodSymbol) = {
+ t.annotations.exists(_.tpe =:= typeOf[Router.main])
+ }
+ def getAllRoutesForClass(curCls: Type,
+ pred: MethodSymbol => Boolean = hasMainAnnotation)
+ : Iterable[c.universe.Tree] = {
+ for{
+ t <- getValsOrMeths(curCls)
+ if pred(t)
+ } yield {
+ extractMethod(t, curCls)
+ }
+ }
+}
diff --git a/main/core/src/mill/util/Scripts.scala b/main/core/src/mill/util/Scripts.scala
new file mode 100644
index 00000000..7dde8252
--- /dev/null
+++ b/main/core/src/mill/util/Scripts.scala
@@ -0,0 +1,330 @@
+package mill.util
+
+import java.nio.file.NoSuchFileException
+
+import ammonite.ops._
+import ammonite.runtime.Evaluator.AmmoniteExit
+import ammonite.util.Name.backtickWrap
+import ammonite.util.Util.CodeSource
+import ammonite.util.{Name, Res, Util}
+import fastparse.utils.Utils._
+import mill.util.Router.{ArgSig, EntryPoint}
+
+/**
+ * Logic around using Ammonite as a script-runner; invoking scripts via the
+ * macro-generated [[Router]], and pretty-printing any output or error messages
+ */
+object Scripts {
+ def groupArgs(flatArgs: List[String]): Seq[(String, Option[String])] = {
+ var keywordTokens = flatArgs
+ var scriptArgs = Vector.empty[(String, Option[String])]
+
+ while(keywordTokens.nonEmpty) keywordTokens match{
+ case List(head, next, rest@_*) if head.startsWith("-") =>
+ scriptArgs = scriptArgs :+ (head, Some(next))
+ keywordTokens = rest.toList
+ case List(head, rest@_*) =>
+ scriptArgs = scriptArgs :+ (head, None)
+ keywordTokens = rest.toList
+
+ }
+ scriptArgs
+ }
+
+ def runScript(wd: Path,
+ path: Path,
+ interp: ammonite.interp.Interpreter,
+ scriptArgs: Seq[(String, Option[String])] = Nil) = {
+ interp.watch(path)
+ val (pkg, wrapper) = Util.pathToPackageWrapper(Seq(), path relativeTo wd)
+
+ for{
+ scriptTxt <- try Res.Success(Util.normalizeNewlines(read(path))) catch{
+ case e: NoSuchFileException => Res.Failure("Script file not found: " + path)
+ }
+
+ processed <- interp.processModule(
+ scriptTxt,
+ CodeSource(wrapper, pkg, Seq(Name("ammonite"), Name("$file")), Some(path)),
+ autoImport = true,
+ // Not sure why we need to wrap this in a separate `$routes` object,
+ // but if we don't do it for some reason the `generateRoutes` macro
+ // does not see the annotations on the methods of the outer-wrapper.
+ // It can inspect the type and its methods fine, it's just the
+ // `methodsymbol.annotations` ends up being empty.
+ extraCode = Util.normalizeNewlines(
+ s"""
+ |val $$routesOuter = this
+ |object $$routes
+ |extends scala.Function0[scala.Seq[ammonite.main.Router.EntryPoint[$$routesOuter.type]]]{
+ | def apply() = ammonite.main.Router.generateRoutes[$$routesOuter.type]
+ |}
+ """.stripMargin
+ ),
+ hardcoded = true
+ )
+
+ routeClsName <- processed.blockInfo.lastOption match{
+ case Some(meta) => Res.Success(meta.id.wrapperPath)
+ case None => Res.Skip
+ }
+
+ mainCls =
+ interp
+ .evalClassloader
+ .loadClass(processed.blockInfo.last.id.wrapperPath + "$")
+
+ routesCls =
+ interp
+ .evalClassloader
+ .loadClass(routeClsName + "$$routes$")
+
+ scriptMains =
+ routesCls
+ .getField("MODULE$")
+ .get(null)
+ .asInstanceOf[() => Seq[Router.EntryPoint[Any]]]
+ .apply()
+
+
+ mainObj = mainCls.getField("MODULE$").get(null)
+
+ res <- Util.withContextClassloader(interp.evalClassloader){
+ scriptMains match {
+ // If there are no @main methods, there's nothing to do
+ case Seq() =>
+ if (scriptArgs.isEmpty) Res.Success(())
+ else {
+ val scriptArgString =
+ scriptArgs.flatMap{case (a, b) => Seq(a) ++ b}.map(literalize(_))
+ .mkString(" ")
+
+ Res.Failure("Script " + path.last + " does not take arguments: " + scriptArgString)
+ }
+
+ // If there's one @main method, we run it with all args
+ case Seq(main) => runMainMethod(mainObj, main, scriptArgs)
+
+ // If there are multiple @main methods, we use the first arg to decide
+ // which method to run, and pass the rest to that main method
+ case mainMethods =>
+ val suffix = formatMainMethods(mainObj, mainMethods)
+ scriptArgs match{
+ case Seq() =>
+ Res.Failure(
+ s"Need to specify a subcommand to call when running " + path.last + suffix
+ )
+ case Seq((head, Some(_)), tail @ _*) =>
+ Res.Failure(
+ "To select a subcommand to run, you don't need --s." + Util.newLine +
+ s"Did you mean `${head.drop(2)}` instead of `$head`?"
+ )
+ case Seq((head, None), tail @ _*) =>
+ mainMethods.find(_.name == head) match{
+ case None =>
+ Res.Failure(
+ s"Unable to find subcommand: " + backtickWrap(head) + suffix
+ )
+ case Some(main) =>
+ runMainMethod(mainObj, main, tail)
+ }
+ }
+ }
+ }
+ } yield res
+ }
+ def formatMainMethods[T](base: T, mainMethods: Seq[Router.EntryPoint[T]]) = {
+ if (mainMethods.isEmpty) ""
+ else{
+ val leftColWidth = getLeftColWidth(mainMethods.flatMap(_.argSignatures))
+
+ val methods =
+ for(main <- mainMethods)
+ yield formatMainMethodSignature(base, main, 2, leftColWidth)
+
+ Util.normalizeNewlines(
+ s"""
+ |
+ |Available subcommands:
+ |
+ |${methods.mkString(Util.newLine)}""".stripMargin
+ )
+ }
+ }
+ def getLeftColWidth[T](items: Seq[ArgSig[T, _]]) = {
+ items.map(_.name.length + 2) match{
+ case Nil => 0
+ case x => x.max
+ }
+ }
+ def formatMainMethodSignature[T](base: T,
+ main: Router.EntryPoint[T],
+ leftIndent: Int,
+ leftColWidth: Int) = {
+ // +2 for space on right of left col
+ val args = main.argSignatures.map(renderArg(base, _, leftColWidth + leftIndent + 2 + 2, 80))
+
+ val leftIndentStr = " " * leftIndent
+ val argStrings =
+ for((lhs, rhs) <- args)
+ yield {
+ val lhsPadded = lhs.padTo(leftColWidth, ' ')
+ val rhsPadded = rhs.lines.mkString(Util.newLine)
+ s"$leftIndentStr $lhsPadded $rhsPadded"
+ }
+ val mainDocSuffix = main.doc match{
+ case Some(d) => Util.newLine + leftIndentStr + softWrap(d, leftIndent, 80)
+ case None => ""
+ }
+
+ s"""$leftIndentStr${main.name}$mainDocSuffix
+ |${argStrings.map(_ + Util.newLine).mkString}""".stripMargin
+ }
+ def runMainMethod[T](base: T,
+ mainMethod: Router.EntryPoint[T],
+ scriptArgs: Seq[(String, Option[String])]): Res[Any] = {
+ val leftColWidth = getLeftColWidth(mainMethod.argSignatures)
+
+ def expectedMsg = formatMainMethodSignature(base: T, mainMethod, 0, leftColWidth)
+
+ def pluralize(s: String, n: Int) = {
+ if (n == 1) s else s + "s"
+ }
+
+ mainMethod.invoke(base, scriptArgs) match{
+ case Router.Result.Success(x) => Res.Success(x)
+ case Router.Result.Error.Exception(x: AmmoniteExit) => Res.Success(x.value)
+ case Router.Result.Error.Exception(x) => Res.Exception(x, "")
+ case Router.Result.Error.MismatchedArguments(missing, unknown, duplicate, incomplete) =>
+ val missingStr =
+ if (missing.isEmpty) ""
+ else {
+ val chunks =
+ for (x <- missing)
+ yield "--" + x.name + ": " + x.typeString
+
+ val argumentsStr = pluralize("argument", chunks.length)
+ s"Missing $argumentsStr: (${chunks.mkString(", ")})" + Util.newLine
+ }
+
+
+ val unknownStr =
+ if (unknown.isEmpty) ""
+ else {
+ val argumentsStr = pluralize("argument", unknown.length)
+ s"Unknown $argumentsStr: " + unknown.map(literalize(_)).mkString(" ") + Util.newLine
+ }
+
+ val duplicateStr =
+ if (duplicate.isEmpty) ""
+ else {
+ val lines =
+ for ((sig, options) <- duplicate)
+ yield {
+ s"Duplicate arguments for (--${sig.name}: ${sig.typeString}): " +
+ options.map(literalize(_)).mkString(" ") + Util.newLine
+ }
+
+ lines.mkString
+
+ }
+ val incompleteStr = incomplete match{
+ case None => ""
+ case Some(sig) =>
+ s"Option (--${sig.name}: ${sig.typeString}) is missing a corresponding value" +
+ Util.newLine
+
+ }
+
+ Res.Failure(
+ Util.normalizeNewlines(
+ s"""$missingStr$unknownStr$duplicateStr$incompleteStr
+ |Arguments provided did not match expected signature:
+ |
+ |$expectedMsg
+ |""".stripMargin
+ )
+ )
+
+ case Router.Result.Error.InvalidArguments(x) =>
+ val argumentsStr = pluralize("argument", x.length)
+ val thingies = x.map{
+ case Router.Result.ParamError.Invalid(p, v, ex) =>
+ val literalV = literalize(v)
+ val rendered = {renderArgShort(p)}
+ s"$rendered: ${p.typeString} = $literalV failed to parse with $ex"
+ case Router.Result.ParamError.DefaultFailed(p, ex) =>
+ s"${renderArgShort(p)}'s default value failed to evaluate with $ex"
+ }
+
+ Res.Failure(
+ Util.normalizeNewlines(
+ s"""The following $argumentsStr failed to parse:
+ |
+ |${thingies.mkString(Util.newLine)}
+ |
+ |expected signature:
+ |
+ |$expectedMsg
+ """.stripMargin
+ )
+ )
+ }
+ }
+
+ def softWrap(s: String, leftOffset: Int, maxWidth: Int) = {
+ val oneLine = s.lines.mkString(" ").split(' ')
+
+ lazy val indent = " " * leftOffset
+
+ val output = new StringBuilder(oneLine.head)
+ var currentLineWidth = oneLine.head.length
+ for(chunk <- oneLine.tail){
+ val addedWidth = currentLineWidth + chunk.length + 1
+ if (addedWidth > maxWidth){
+ output.append(Util.newLine + indent)
+ output.append(chunk)
+ currentLineWidth = chunk.length
+ } else{
+ currentLineWidth = addedWidth
+ output.append(' ')
+ output.append(chunk)
+ }
+ }
+ output.mkString
+ }
+ def renderArgShort[T](arg: ArgSig[T, _]) = "--" + backtickWrap(arg.name)
+ def renderArg[T](base: T,
+ arg: ArgSig[T, _],
+ leftOffset: Int,
+ wrappedWidth: Int): (String, String) = {
+ val suffix = arg.default match{
+ case Some(f) => " (default " + f(base) + ")"
+ case None => ""
+ }
+ val docSuffix = arg.doc match{
+ case Some(d) => ": " + d
+ case None => ""
+ }
+ val wrapped = softWrap(
+ arg.typeString + suffix + docSuffix,
+ leftOffset,
+ wrappedWidth - leftOffset
+ )
+ (renderArgShort(arg), wrapped)
+ }
+
+
+ def mainMethodDetails[T](ep: EntryPoint[T]) = {
+ ep.argSignatures.collect{
+ case ArgSig(name, tpe, Some(doc), default) =>
+ Util.newLine + name + " // " + doc
+ }.mkString
+ }
+
+ /**
+ * Additional [[scopt.Read]] instance to teach it how to read Ammonite paths
+ */
+ implicit def pathScoptRead: scopt.Read[Path] = scopt.Read.stringRead.map(Path(_, pwd))
+
+}
diff --git a/main/core/src/mill/util/Watched.scala b/main/core/src/mill/util/Watched.scala
new file mode 100644
index 00000000..f1ef4fee
--- /dev/null
+++ b/main/core/src/mill/util/Watched.scala
@@ -0,0 +1,8 @@
+package mill.util
+
+import mill.eval.PathRef
+
+case class Watched[T](value: T, watched: Seq[PathRef])
+object Watched{
+ implicit def readWrite[T: upickle.default.ReadWriter] = upickle.default.macroRW[Watched[T]]
+}
diff --git a/main/moduledefs/resources/scalac-plugin.xml b/main/moduledefs/resources/scalac-plugin.xml
new file mode 100644
index 00000000..48753a22
--- /dev/null
+++ b/main/moduledefs/resources/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>auto-override-plugin</name>
+ <classname>mill.moduledefs.AutoOverridePlugin</classname>
+</plugin> \ No newline at end of file
diff --git a/main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala b/main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala
new file mode 100644
index 00000000..5b33abbe
--- /dev/null
+++ b/main/moduledefs/src/mill/moduledefs/AutoOverridePlugin.scala
@@ -0,0 +1,58 @@
+package mill.moduledefs
+
+import scala.reflect.internal.Flags
+import scala.tools.nsc.io.VirtualFile
+import scala.tools.nsc.util.BatchSourceFile
+import scala.tools.nsc.{Global, Phase}
+import scala.tools.nsc.plugins.{Plugin, PluginComponent}
+
+class AutoOverridePlugin(val global: Global) extends Plugin {
+ import global._
+ override def init(options: List[String], error: String => Unit): Boolean = true
+
+ val name = "auto-override-plugin"
+ val description = "automatically inserts `override` keywords for you"
+ val components = List[PluginComponent](
+ new PluginComponent {
+
+ val global = AutoOverridePlugin.this.global
+ import global._
+
+ override val runsAfter = List("typer")
+ override val runsBefore = List("patmat")
+
+ val phaseName = "auto-override"
+
+ override def newPhase(prev: Phase) = new GlobalPhase(prev) {
+
+ def name: String = phaseName
+
+ def isCacher(owner: Symbol) = {
+ val baseClasses =
+ if (owner.isClass) Some(owner.asClass.baseClasses)
+ else if (owner.isModule) Some(owner.asModule.baseClasses)
+ else None
+ baseClasses.exists(_.exists(_.fullName == "mill.moduledefs.Cacher"))
+ }
+
+ def apply(unit: global.CompilationUnit): Unit = {
+ object AutoOverrider extends global.Transformer {
+ override def transform(tree: global.Tree) = tree match{
+ case d: DefDef
+ if d.symbol.overrideChain.count(!_.isAbstract) > 1
+ && !d.mods.isOverride
+ && isCacher(d.symbol.owner) =>
+
+ d.symbol.flags = d.symbol.flags | Flags.OVERRIDE
+ copyDefDef(d)(mods = d.mods | Flags.OVERRIDE)
+ case _ => super.transform(tree)
+
+ }
+ }
+
+ unit.body = AutoOverrider.transform(unit.body)
+ }
+ }
+ }
+ )
+} \ No newline at end of file
diff --git a/main/moduledefs/src/mill/moduledefs/Cacher.scala b/main/moduledefs/src/mill/moduledefs/Cacher.scala
new file mode 100644
index 00000000..023f03be
--- /dev/null
+++ b/main/moduledefs/src/mill/moduledefs/Cacher.scala
@@ -0,0 +1,35 @@
+package mill.moduledefs
+
+import scala.collection.mutable
+import scala.reflect.macros.blackbox.Context
+
+
+trait Cacher{
+ private[this] lazy val cacherLazyMap = mutable.Map.empty[sourcecode.Enclosing, Any]
+
+ protected[this] def cachedTarget[T](t: => T)
+ (implicit c: sourcecode.Enclosing): T = synchronized{
+ cacherLazyMap.getOrElseUpdate(c, t).asInstanceOf[T]
+ }
+}
+
+object Cacher{
+ def impl0[T: c.WeakTypeTag](c: Context)
+ (t: c.Expr[T]): c.Expr[T] = {
+ c.Expr[T](wrapCached[T](c)(t.tree))
+ }
+ def wrapCached[R: c.WeakTypeTag](c: Context)(t: c.Tree) = {
+
+ import c.universe._
+ val owner = c.internal.enclosingOwner
+ val ownerIsCacherClass =
+ owner.owner.isClass &&
+ owner.owner.asClass.baseClasses.exists(_.fullName == "mill.moduledefs.Cacher")
+
+ if (ownerIsCacherClass && owner.isMethod) q"this.cachedTarget[${weakTypeTag[R]}]($t)"
+ else c.abort(
+ c.enclosingPosition,
+ "T{} members must be defs defined in a Cacher class/trait/object body"
+ )
+ }
+} \ No newline at end of file