summaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2017-11-03 23:44:39 -0700
committerLi Haoyi <haoyi.sg@gmail.com>2017-11-03 23:44:39 -0700
commit13270145903b457c906a9fa77bd152afb6448ef5 (patch)
treee85b7ed530e0c8e3c3041cbf17641857c448b602 /core
parent66f1c5c2438aeb8f2496575f52c25b09cf5793a6 (diff)
downloadmill-13270145903b457c906a9fa77bd152afb6448ef5.tar.gz
mill-13270145903b457c906a9fa77bd152afb6448ef5.tar.bz2
mill-13270145903b457c906a9fa77bd152afb6448ef5.zip
Split up forge into `scalaplugin` an `core` subprojects, to allow us to use the `T#apply` macro in the implementation of `scalaplugin.Subproject`
Also needed to implement inter-`Subproject` dependencies so the `MetacircularTests` can continue to support the new layout
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/forge/Discovered.scala57
-rw-r--r--core/src/main/scala/forge/Evaluator.scala191
-rw-r--r--core/src/main/scala/forge/Main.scala25
-rw-r--r--core/src/main/scala/forge/Target.scala132
-rw-r--r--core/src/main/scala/forge/Tarjans.scala50
-rw-r--r--core/src/main/scala/forge/package.scala99
-rw-r--r--core/src/main/scala/forge/util/Args.scala9
-rw-r--r--core/src/main/scala/forge/util/Labelled.scala8
-rw-r--r--core/src/main/scala/forge/util/MultiBiMap.scala47
-rw-r--r--core/src/main/scala/forge/util/OSet.scala107
-rw-r--r--core/src/main/scala/forge/util/PathRef.scala57
-rw-r--r--core/src/test/examples/javac/build.sc66
-rw-r--r--core/src/test/examples/javac/resources/hello.txt1
-rw-r--r--core/src/test/examples/javac/src/Bar.java4
-rw-r--r--core/src/test/examples/javac/src/Foo.java7
-rw-r--r--core/src/test/scala/forge/EvaluationTests.scala144
-rw-r--r--core/src/test/scala/forge/GraphTests.scala187
-rw-r--r--core/src/test/scala/forge/IntegrationTests.scala138
-rw-r--r--core/src/test/scala/forge/TarjanTests.scala89
-rw-r--r--core/src/test/scala/forge/TestGraphs.scala73
-rw-r--r--core/src/test/scala/forge/TestMain.scala100
-rw-r--r--core/src/test/scala/forge/TestUtil.scala36
-rw-r--r--core/src/test/scala/forge/UTestFramework.scala11
23 files changed, 1638 insertions, 0 deletions
diff --git a/core/src/main/scala/forge/Discovered.scala b/core/src/main/scala/forge/Discovered.scala
new file mode 100644
index 00000000..03577b1f
--- /dev/null
+++ b/core/src/main/scala/forge/Discovered.scala
@@ -0,0 +1,57 @@
+package forge
+
+import forge.util.Labelled
+import play.api.libs.json.Format
+
+import language.experimental.macros
+import reflect.macros.blackbox.Context
+
+class Discovered[T](val value: Seq[(Seq[String], Format[_], T => Target[_])]){
+ def apply(t: T) = value.map{case (a, f, b) => (a, f, b(t)) }
+
+}
+object Discovered {
+ def makeTuple[T, V](path: Seq[String], func: T => Target[V])(implicit f: Format[V]) = {
+ (path, f, func)
+ }
+
+
+ def mapping[T: Discovered](t: T): Map[Target[_], Labelled[_]] = {
+ implicitly[Discovered[T]].apply(t)
+ .map(x => x._3 -> Labelled(x._3.asInstanceOf[Target[Any]], x._2.asInstanceOf[Format[Any]], x._1))
+ .toMap
+ }
+
+ implicit def apply[T]: Discovered[T] = macro applyImpl[T]
+
+ def applyImpl[T: c.WeakTypeTag](c: Context): c.Expr[Discovered[T]] = {
+ import c.universe._
+ val tpe = c.weakTypeTag[T].tpe
+ def rec(segments: List[String], t: c.Type): Seq[Seq[String]] = for {
+ m <- t.members.toSeq
+ if m.isTerm && (m.asTerm.isGetter || m.asTerm.isLazy) || m.isModule
+ res <- {
+ val extendedSegments = m.name.toString :: segments
+ val self =
+ if (m.typeSignature.resultType <:< c.weakTypeOf[Target[_]]) Seq(extendedSegments)
+ else Nil
+ val children = rec(extendedSegments, m.typeSignature)
+ self ++ children
+ }
+ } yield res
+
+ val reversedPaths = rec(Nil, tpe)
+
+ val result = for(reversedPath <- reversedPaths.toList) yield {
+ val base = q"${TermName(c.freshName())}"
+ val segments = reversedPath.reverse.toList
+ val ident = segments.foldLeft[Tree](base)((prefix, name) =>
+ q"$prefix.${TermName(name)}"
+ )
+
+ q"forge.Discovered.makeTuple($segments, ($base: $tpe) => $ident)"
+ }
+
+ c.Expr[Discovered[T]](q"new _root_.forge.Discovered($result)")
+ }
+}
diff --git a/core/src/main/scala/forge/Evaluator.scala b/core/src/main/scala/forge/Evaluator.scala
new file mode 100644
index 00000000..50dc46d4
--- /dev/null
+++ b/core/src/main/scala/forge/Evaluator.scala
@@ -0,0 +1,191 @@
+package forge
+
+
+import play.api.libs.json.{Format, JsValue, Json}
+
+import scala.collection.mutable
+import ammonite.ops._
+import forge.util.{Args, Labelled, MultiBiMap, OSet}
+class Evaluator(workspacePath: Path,
+ labeling: Map[Target[_], Labelled[_]]){
+
+ def evaluate(targets: OSet[Target[_]]): Evaluator.Results = {
+ mkdir(workspacePath)
+
+ val sortedGroups = Evaluator.groupAroundNamedTargets(
+ Evaluator.topoSortedTransitiveTargets(targets),
+ labeling
+ )
+
+ val evaluated = new OSet.Mutable[Target[_]]
+ val results = mutable.LinkedHashMap.empty[Target[_], Any]
+
+ for (groupIndex <- sortedGroups.keys()){
+ val group = sortedGroups.lookupKey(groupIndex)
+
+ val (newResults, newEvaluated) = evaluateGroupCached(
+ group,
+ results,
+ sortedGroups
+ )
+ evaluated.appendAll(newEvaluated)
+ for((k, v) <- newResults) results.put(k, v)
+
+ }
+
+ Evaluator.Results(targets.items.map(results), evaluated)
+ }
+
+ def evaluateGroupCached(group: OSet[Target[_]],
+ results: collection.Map[Target[_], Any],
+ sortedGroups: MultiBiMap[Int, Target[_]]): (collection.Map[Target[_], Any], Seq[Target[_]]) = {
+
+
+ val (externalInputs, terminals) = partitionGroupInputOutput(group, results)
+
+ val inputsHash =
+ externalInputs.toIterator.map(results).toVector.hashCode +
+ group.toIterator.map(_.sideHash).toVector.hashCode()
+
+ val primeLabel = labeling(terminals.items(0)).segments
+
+
+ val targetDestPath = workspacePath / primeLabel
+ val metadataPath = targetDestPath / up / (targetDestPath.last + ".forge.json")
+
+ val cached = for{
+ json <- scala.util.Try(Json.parse(read.getInputStream(metadataPath))).toOption
+ (cachedHash, terminalResults) <- Json.fromJson[(Int, Seq[JsValue])](json).asOpt
+ if cachedHash == inputsHash
+ } yield terminalResults
+
+ cached match{
+ case Some(terminalResults) =>
+ val newResults = mutable.LinkedHashMap.empty[Target[_], Any]
+ for((terminal, res) <- terminals.items.zip(terminalResults)){
+ newResults(terminal) = labeling(terminal).format.reads(res).get
+ }
+ (newResults, Nil)
+
+ case _ =>
+ val (newResults, newEvaluated, terminalResults) = evaluateGroup(group, results, targetDestPath)
+
+ write.over(
+ metadataPath,
+ Json.prettyPrint(
+ Json.toJson(inputsHash -> terminals.toList.map(terminalResults))
+ ),
+ )
+
+ (newResults, newEvaluated)
+ }
+ }
+
+ def partitionGroupInputOutput(group: OSet[Target[_]],
+ results: collection.Map[Target[_], Any]) = {
+ val allInputs = group.items.flatMap(_.inputs)
+ val (internalInputs, externalInputs) = allInputs.partition(group.contains)
+ val internalInputSet = internalInputs.toSet
+ val terminals = group.filter(!internalInputSet(_))
+ (OSet.from(externalInputs.distinct), terminals)
+ }
+
+ def evaluateGroup(group: OSet[Target[_]],
+ results: collection.Map[Target[_], Any],
+ targetDestPath: Path) = {
+
+ rm(targetDestPath)
+ val terminalResults = mutable.LinkedHashMap.empty[Target[_], JsValue]
+ val newEvaluated = mutable.Buffer.empty[Target[_]]
+ val newResults = mutable.LinkedHashMap.empty[Target[_], Any]
+ for (target <- group.items) {
+ newEvaluated.append(target)
+ val targetInputValues = target.inputs.toVector.map(x =>
+ newResults.getOrElse(x, results(x))
+ )
+
+ val args = new Args(targetInputValues, targetDestPath)
+ val res = target.evaluate(args)
+ for(targetLabel <- labeling.get(target)){
+ terminalResults(target) = targetLabel
+ .format
+ .asInstanceOf[Format[Any]]
+ .writes(res.asInstanceOf[Any])
+ }
+ newResults(target) = res
+ }
+
+ (newResults, newEvaluated, terminalResults)
+ }
+
+}
+
+
+object Evaluator{
+ class TopoSorted private[Evaluator] (val values: OSet[Target[_]])
+ case class Results(values: Seq[Any], evaluated: OSet[Target[_]])
+ def groupAroundNamedTargets(topoSortedTargets: TopoSorted,
+ labeling: Map[Target[_], Labelled[_]]): MultiBiMap[Int, Target[_]] = {
+
+ val grouping = new MultiBiMap.Mutable[Int, Target[_]]()
+
+ var groupCount = 0
+
+ for(target <- topoSortedTargets.values.items.reverseIterator){
+ if (!grouping.containsValue(target)){
+ grouping.add(groupCount, target)
+ groupCount += 1
+ }
+
+ val targetGroup = grouping.lookupValue(target)
+ for(upstream <- target.inputs){
+ grouping.lookupValueOpt(upstream) match{
+ case None if !labeling.contains(upstream) =>
+ grouping.add(targetGroup, upstream)
+ case Some(upstreamGroup) if upstreamGroup == targetGroup =>
+ val upstreamTargets = grouping.removeAll(upstreamGroup)
+
+ grouping.addAll(targetGroup, upstreamTargets)
+ case _ => //donothing
+ }
+ }
+ }
+
+ val targetOrdering = topoSortedTargets.values.items.zipWithIndex.toMap
+ val output = new MultiBiMap.Mutable[Int, Target[_]]
+
+ // Sort groups amongst themselves, and sort the contents of each group
+ // before aggregating it into the final output
+ for(g <- grouping.values().toArray.sortBy(g => targetOrdering(g.items(0)))){
+ output.addAll(output.keys.length, g.toArray.sortBy(targetOrdering))
+ }
+ output
+ }
+
+ /**
+ * Takes the given targets, finds all the targets they transitively depend
+ * on, and sort them topologically. Fails if there are dependency cycles
+ */
+ def topoSortedTransitiveTargets(sourceTargets: OSet[Target[_]]): TopoSorted = {
+ val transitiveTargets = new OSet.Mutable[Target[_]]
+ def rec(t: Target[_]): Unit = {
+ if (transitiveTargets.contains(t)) () // do nothing
+ else {
+ transitiveTargets.append(t)
+ t.inputs.foreach(rec)
+ }
+ }
+
+ sourceTargets.items.foreach(rec)
+ val targetIndices = transitiveTargets.items.zipWithIndex.toMap
+
+ val numberedEdges =
+ for(t <- transitiveTargets.items)
+ yield t.inputs.map(targetIndices)
+
+ val sortedClusters = Tarjans(numberedEdges)
+ val nonTrivialClusters = sortedClusters.filter(_.length > 1)
+ assert(nonTrivialClusters.isEmpty, nonTrivialClusters)
+ new TopoSorted(OSet.from(sortedClusters.flatten.map(transitiveTargets.items)))
+ }
+} \ No newline at end of file
diff --git a/core/src/main/scala/forge/Main.scala b/core/src/main/scala/forge/Main.scala
new file mode 100644
index 00000000..d919d0e2
--- /dev/null
+++ b/core/src/main/scala/forge/Main.scala
@@ -0,0 +1,25 @@
+package forge
+
+import ammonite.ops._
+import ammonite.util.{Name, Res}
+import forge.util.OSet
+
+
+object Main {
+ def main(args: Array[String]): Unit = {
+
+ ammonite.Main().instantiateInterpreter() match{
+ case Right(interp) =>
+ val result = ammonite.main.Scripts.runScript(pwd, Path(args(0), pwd), interp, Nil)
+
+ val (obj, discovered) = result.asInstanceOf[Res.Success[(Any, forge.Discovered[Any])]].s
+ val mapping = Discovered.mapping(obj)(discovered)
+ val workspacePath = pwd / 'target / 'javac
+ val evaluator = new Evaluator(workspacePath, mapping)
+ val evaluated = evaluator.evaluate(OSet.from(mapping.keys)).evaluated.filter(mapping.contains)
+ (result, interp.watchedFiles)
+ case Left(problems) => problems
+ }
+ }
+
+}
diff --git a/core/src/main/scala/forge/Target.scala b/core/src/main/scala/forge/Target.scala
new file mode 100644
index 00000000..0e84a9b4
--- /dev/null
+++ b/core/src/main/scala/forge/Target.scala
@@ -0,0 +1,132 @@
+package forge
+
+
+import ammonite.ops.{ls, mkdir}
+import forge.util.{Args, PathRef}
+import play.api.libs.json.{Format, JsValue, Json}
+
+import scala.annotation.compileTimeOnly
+import language.experimental.macros
+import reflect.macros.blackbox.Context
+import scala.collection.mutable
+
+abstract class Target[T] extends Target.Ops[T]{
+ /**
+ * What other Targets does this Target depend on?
+ */
+ val inputs: Seq[Target[_]]
+
+ /**
+ * Evaluate this target
+ */
+ def evaluate(args: Args): T
+
+ /**
+ * Even if this target's inputs did not change, does it need to re-evaluate
+ * anyway?
+ */
+ def sideHash: Int = 0
+
+ @compileTimeOnly("Target#apply() can only be used with a T{...} block")
+ def apply(): T = ???
+}
+
+object Target{
+ class Target0[T](t: T) extends Target[T]{
+ lazy val t0 = t
+ val inputs = Nil
+ def evaluate(args: Args) = t0
+ }
+ class Target1[T](t: => Target[T]) extends Target[T]{
+ lazy val t0 = t
+ lazy val inputs = t0.inputs
+ def evaluate(args: Args) = t0.evaluate(args)
+ }
+ implicit def apply[T](t: => Target[T]): Target[T] = new Target1(t)
+ def apply[T](t: T): Target[T] = macro impl[T]
+ def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Target[T]] = {
+ import c.universe._
+ val bound = collection.mutable.Buffer.empty[(c.Tree, Symbol)]
+ val OptionGet = c.universe.typeOf[Target[_]].member(TermName("apply"))
+ object transformer extends c.universe.Transformer {
+ // Derived from @olafurpg's
+ // https://gist.github.com/olafurpg/596d62f87bf3360a29488b725fbc7608
+ override def transform(tree: c.Tree): c.Tree = tree match {
+ case t @ q"$fun.apply()" if t.symbol == OptionGet =>
+ val tempName = c.freshName(TermName("tmp"))
+ val tempSym = c.internal.newTermSymbol(c.internal.enclosingOwner, tempName)
+ c.internal.setInfo(tempSym, t.tpe)
+ val tempIdent = Ident(tempSym)
+ c.internal.setType(tempIdent, t.tpe)
+ bound.append((fun, tempSym))
+ tempIdent
+ case _ => super.transform(tree)
+ }
+ }
+ val transformed = transformer.transform(t.tree)
+ val (exprs, symbols) = bound.unzip
+
+ val bindings = symbols.map(c.internal.valDef(_))
+
+ val embedded = q"new forge.Target.Target1(forge.zipMap(..$exprs){ (..$bindings) => $transformed })"
+
+ c.Expr[Target[T]](embedded)
+ }
+
+ abstract class Ops[T]{ this: Target[T] =>
+ def map[V](f: T => V) = new Target.Mapped(this, f)
+
+ def filter(f: T => Boolean) = this
+ def withFilter(f: T => Boolean) = this
+ def zip[V](other: Target[V]) = new Target.Zipped(this, other)
+
+ }
+
+ def traverse[T](source: Seq[Target[T]]) = {
+ new Traverse[T](source)
+ }
+ class Traverse[T](val inputs: Seq[Target[T]]) extends Target[Seq[T]]{
+ def evaluate(args: Args) = {
+ for (i <- 0 until args.length)
+ yield args(i).asInstanceOf[T]
+ }
+
+ }
+ class Mapped[T, V](source: Target[T], f: T => V) extends Target[V]{
+ def evaluate(args: Args) = f(args(0))
+ val inputs = List(source)
+ }
+ class Zipped[T, V](source1: Target[T],
+ source2: Target[V]) extends Target[(T, V)]{
+ def evaluate(args: Args) = (args(0), args(1))
+ val inputs = List(source1, source2)
+ }
+
+ def path(path: ammonite.ops.Path) = new Path(path)
+ class Path(path: ammonite.ops.Path) extends Target[PathRef]{
+ def handle = PathRef(path)
+ def evaluate(args: Args) = handle
+ override def sideHash = handle.hashCode()
+ val inputs = Nil
+ }
+
+ class Subprocess(val inputs: Seq[Target[_]],
+ command: Args => Seq[String]) extends Target[Subprocess.Result] {
+
+ def evaluate(args: Args) = {
+ mkdir(args.dest)
+ import ammonite.ops._
+ implicit val path = ammonite.ops.Path(args.dest, pwd)
+ val toTarget = () // Shadow the implicit conversion :/
+ val output = %%(command(args))
+ assert(output.exitCode == 0)
+ Subprocess.Result(output, PathRef(args.dest))
+ }
+ }
+ object Subprocess{
+ case class Result(result: ammonite.ops.CommandResult, dest: PathRef)
+ object Result{
+ implicit val tsFormat: Format[Target.Subprocess.Result] = Json.format
+ }
+ }
+}
diff --git a/core/src/main/scala/forge/Tarjans.scala b/core/src/main/scala/forge/Tarjans.scala
new file mode 100644
index 00000000..9831fe7f
--- /dev/null
+++ b/core/src/main/scala/forge/Tarjans.scala
@@ -0,0 +1,50 @@
+package forge
+
+import collection.mutable
+// Adapted from
+// https://github.com/indy256/codelibrary/blob/c52247216258e84aac442a23273b7d8306ef757b/java/src/SCCTarjan.java
+object Tarjans {
+ def apply(graph0: Seq[Seq[Int]]): Seq[Seq[Int]] = {
+ val graph = graph0.map(_.toArray).toArray
+ val n = graph.length
+ val visited = new Array[Boolean](n)
+ val stack = mutable.ArrayBuffer.empty[Integer]
+ var time = 0
+ val lowlink = new Array[Int](n)
+ val components = mutable.ArrayBuffer.empty[Seq[Int]]
+
+
+ for (u <- 0 until n) {
+ if (!visited(u)) dfs(u)
+ }
+
+ def dfs(u: Int): Unit = {
+ lowlink(u) = time
+ time += 1
+ visited(u) = true
+ stack.append(u)
+ var isComponentRoot = true
+ for (v <- graph(u)) {
+ if (!visited(v)) dfs(v)
+ if (lowlink(u) > lowlink(v)) {
+ lowlink(u) = lowlink(v)
+ isComponentRoot = false
+ }
+ }
+ if (isComponentRoot) {
+ val component = mutable.Buffer.empty[Int]
+
+ var done = false
+ while (!done) {
+ val x = stack.last
+ stack.remove(stack.length - 1)
+ component.append(x)
+ lowlink(x) = Integer.MAX_VALUE
+ if (x == u) done = true
+ }
+ components.append(component)
+ }
+ }
+ components
+ }
+} \ No newline at end of file
diff --git a/core/src/main/scala/forge/package.scala b/core/src/main/scala/forge/package.scala
new file mode 100644
index 00000000..8c24bde6
--- /dev/null
+++ b/core/src/main/scala/forge/package.scala
@@ -0,0 +1,99 @@
+import play.api.libs.json._
+import ammonite.ops.{Bytes, Path}
+import coursier.Dependency
+import forge.util.Args
+package object forge {
+
+ val T = Target
+ type T[T] = Target[T]
+ def zipMap[R]()(f: () => R) = new Target.Target0(f())
+ def zipMap[A, R](a: T[A])(f: A => R) = a.map(f)
+ def zipMap[A, B, R](a: T[A], b: T[B])(f: (A, B) => R) = zip(a, b).map(f.tupled)
+ def zipMap[A, B, C, R](a: T[A], b: T[B], c: T[C])(f: (A, B, C) => R) = zip(a, b, c).map(f.tupled)
+ def zipMap[A, B, C, D, R](a: T[A], b: T[B], c: T[C], d: T[D])(f: (A, B, C, D) => R) = zip(a, b, c, d).map(f.tupled)
+ def zipMap[A, B, C, D, E, R](a: T[A], b: T[B], c: T[C], d: T[D], e: T[E])(f: (A, B, C, D, E) => R) = zip(a, b, c, d, e).map(f.tupled)
+ def zip() = new Target.Target0(())
+ def zip[A](a: T[A]) = a.map(Tuple1(_))
+ def zip[A, B](a: T[A], b: T[B]) = a.zip(b)
+ def zip[A, B, C](a: T[A], b: T[B], c: T[C]) = new T[(A, B, C)]{
+ val inputs = Seq(a, b, c)
+ def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2))
+ }
+ def zip[A, B, C, D](a: T[A], b: T[B], c: T[C], d: T[D]) = new T[(A, B, C, D)]{
+ val inputs = Seq(a, b, c, d)
+ def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2), args[D](3))
+ }
+ def zip[A, B, C, D, E](a: T[A], b: T[B], c: T[C], d: T[D], e: T[E]) = new T[(A, B, C, D, E)]{
+ val inputs = Seq(a, b, c, d, e)
+ def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2), args[D](3), args[E](4))
+ }
+ implicit object pathFormat extends Format[ammonite.ops.Path]{
+ def reads(json: JsValue) = json match{
+ case JsString(v) => JsSuccess(Path(v))
+ case _ => JsError("Paths must be a String")
+ }
+ def writes(o: Path) = JsString(o.toString)
+ }
+
+ implicit object bytesFormat extends Format[Bytes]{
+ def reads(json: JsValue) = json match{
+ case JsString(v) => JsSuccess(
+ new Bytes(javax.xml.bind.DatatypeConverter.parseBase64Binary(v))
+ )
+ case _ => JsError("Bytes must be a String")
+ }
+ def writes(o: Bytes) = {
+ JsString(javax.xml.bind.DatatypeConverter.printBase64Binary(o.array))
+ }
+ }
+
+ implicit def EitherFormat[T: Format, V: Format] = new Format[Either[T, V]]{
+ def reads(json: JsValue) = json match{
+ case JsObject(struct) =>
+ (struct.get("type"), struct.get("value")) match{
+ case (Some(JsString("Left")), Some(v)) => implicitly[Reads[T]].reads(v).map(Left(_))
+ case (Some(JsString("Right")), Some(v)) => implicitly[Reads[V]].reads(v).map(Right(_))
+ case _ => JsError("Either object layout is unknown")
+ }
+ case _ => JsError("Either must be an Object")
+ }
+ def writes(o: Either[T, V]) = o match{
+ case Left(v) => Json.obj("type" -> "Left", "value" -> implicitly[Writes[T]].writes(v))
+ case Right(v) => Json.obj("type" -> "Right", "value" -> implicitly[Writes[V]].writes(v))
+ }
+ }
+
+ implicit val crFormat: Format[ammonite.ops.CommandResult] = Json.format
+ implicit val modFormat: Format[coursier.Module] = Json.format
+ // https://github.com/playframework/play-json/issues/120
+ // implicit val depFormat: Format[coursier.Dependency] = Json.format
+ implicit val depFormat: Format[coursier.Dependency] = new Format[coursier.Dependency] {
+ def writes(o: Dependency) = {
+ Json.obj(
+ "module" -> Json.toJson(o.module),
+ "version" -> Json.toJson(o.version),
+ "configuration" -> Json.toJson(o.configuration),
+ "exclusions" -> Json.toJson(o.exclusions),
+ "attributes" -> Json.toJson(o.attributes),
+ "optional" -> Json.toJson(o.optional),
+ "transitive" -> Json.toJson(o.transitive)
+ )
+ }
+
+ def reads(json: JsValue) = json match{
+ case x: JsObject =>
+ JsSuccess(coursier.Dependency(
+ Json.fromJson[coursier.Module](x.value("module")).get,
+ Json.fromJson[String](x.value("version")).get,
+ Json.fromJson[String](x.value("configuration")).get,
+ Json.fromJson[coursier.Attributes](x.value("attributes")).get,
+ Json.fromJson[Set[(String, String)]](x.value("exclusions")).get,
+ Json.fromJson[Boolean](x.value("optional")).get,
+ Json.fromJson[Boolean](x.value("transitive")).get
+ ))
+
+ case _ => JsError("Dep must be an object")
+ }
+ }
+ implicit val attrFormat: Format[coursier.Attributes] = Json.format
+}
diff --git a/core/src/main/scala/forge/util/Args.scala b/core/src/main/scala/forge/util/Args.scala
new file mode 100644
index 00000000..23102572
--- /dev/null
+++ b/core/src/main/scala/forge/util/Args.scala
@@ -0,0 +1,9 @@
+package forge.util
+
+class Args(val args: IndexedSeq[_], val dest: ammonite.ops.Path){
+ def length = args.length
+ def apply[T](index: Int): T = {
+ if (index >= 0 && index < args.length) args(index).asInstanceOf[T]
+ else throw new IndexOutOfBoundsException(s"Index $index outside of range 0 - ${args.length}")
+ }
+}
diff --git a/core/src/main/scala/forge/util/Labelled.scala b/core/src/main/scala/forge/util/Labelled.scala
new file mode 100644
index 00000000..a79d2d93
--- /dev/null
+++ b/core/src/main/scala/forge/util/Labelled.scala
@@ -0,0 +1,8 @@
+package forge.util
+
+import forge.Target
+import play.api.libs.json.Format
+
+case class Labelled[T](target: Target[T],
+ format: Format[T],
+ segments: Seq[String])
diff --git a/core/src/main/scala/forge/util/MultiBiMap.scala b/core/src/main/scala/forge/util/MultiBiMap.scala
new file mode 100644
index 00000000..cb6ff280
--- /dev/null
+++ b/core/src/main/scala/forge/util/MultiBiMap.scala
@@ -0,0 +1,47 @@
+package forge.util
+
+import scala.collection.mutable
+
+/**
+ * A map from keys to collections of values: you can assign multiple values
+ * to any particular key. Also allows lookups in both directions: what values
+ * are assigned to a key or what key a value is assigned ti.
+ */
+trait MultiBiMap[K, V]{
+ def containsValue(v: V): Boolean
+ def lookupKey(k: K): OSet[V]
+ def lookupValue(v: V): K
+ def lookupValueOpt(v: V): Option[K]
+ def add(k: K, v: V): Unit
+ def removeAll(k: K): OSet[V]
+ def addAll(k: K, vs: TraversableOnce[V]): Unit
+ def keys(): Iterator[K]
+ def values(): Iterator[OSet[V]]
+}
+object MultiBiMap{
+ class Mutable[K, V]() extends MultiBiMap[K, V]{
+ private[this] val valueToKey = mutable.LinkedHashMap.empty[V, K]
+ private[this] val keyToValues = mutable.LinkedHashMap.empty[K, OSet.Mutable[V]]
+ def containsValue(v: V) = valueToKey.contains(v)
+ def lookupKey(k: K) = keyToValues(k)
+ def lookupValue(v: V) = valueToKey(v)
+ def lookupValueOpt(v: V) = valueToKey.get(v)
+ def add(k: K, v: V): Unit = {
+ valueToKey(v) = k
+ keyToValues.getOrElseUpdate(k, new OSet.Mutable[V]()).append(v)
+ }
+ def removeAll(k: K): OSet[V] = keyToValues.get(k) match {
+ case None => OSet()
+ case Some(vs) =>
+ vs.foreach(valueToKey.remove)
+
+ keyToValues.remove(k)
+ vs
+ }
+ def addAll(k: K, vs: TraversableOnce[V]): Unit = vs.foreach(this.add(k, _))
+
+ def keys() = keyToValues.keysIterator
+
+ def values() = keyToValues.valuesIterator
+ }
+}
diff --git a/core/src/main/scala/forge/util/OSet.scala b/core/src/main/scala/forge/util/OSet.scala
new file mode 100644
index 00000000..43743cdc
--- /dev/null
+++ b/core/src/main/scala/forge/util/OSet.scala
@@ -0,0 +1,107 @@
+package forge.util
+
+
+import play.api.libs.json._
+
+import scala.collection.mutable
+
+/**
+ * A collection with enforced uniqueness, fast contains and deterministic
+ * ordering. Raises an exception if a duplicate is found; call
+ * `toSeq.distinct` if you explicitly want to make it swallow duplicates
+ */
+trait OSet[V] extends TraversableOnce[V]{
+ def contains(v: V): Boolean
+ def items: IndexedSeq[V]
+ def flatMap[T](f: V => TraversableOnce[T]): OSet[T]
+ def map[T](f: V => T): OSet[T]
+ def filter(f: V => Boolean): OSet[V]
+ def collect[T](f: PartialFunction[V, T]): OSet[T]
+ def zipWithIndex: OSet[(V, Int)]
+ def reverse: OSet[V]
+}
+
+object OSet{
+ implicit def jsonFormat[T: Format]: Format[OSet[T]] = new Format[OSet[T]] {
+ def writes(o: OSet[T]) = JsArray(o.items.map(implicitly[Format[T]].writes))
+
+ def reads(json: JsValue) = json match{
+ case x: JsArray => implicitly[Format[Seq[T]]].reads(x).map(OSet.from)
+ case _ => JsError("OSet needs to be an Array")
+ }
+ }
+ def apply[V](items: V*) = from(items)
+
+ def from[V](items: TraversableOnce[V]): OSet[V] = {
+ val set = new OSet.Mutable[V]()
+ items.foreach(set.append)
+ set
+ }
+
+
+ class Mutable[V]() extends OSet[V]{
+
+ private[this] val set0 = mutable.LinkedHashSet.empty[V]
+ def contains(v: V) = set0.contains(v)
+ def append(v: V) = if (!contains(v)){
+ set0.add(v)
+
+ }else {
+ throw new Exception("Duplicated item inserted into OrderedSet: " + v)
+ }
+ def appendAll(vs: Seq[V]) = vs.foreach(append)
+ def items: IndexedSeq[V] = set0.toIndexedSeq
+ def set: collection.Set[V] = set0
+
+ def map[T](f: V => T): OSet[T] = {
+ val output = new OSet.Mutable[T]
+ for(i <- items) output.append(f(i))
+ output
+ }
+ def flatMap[T](f: V => TraversableOnce[T]): OSet[T] = {
+ val output = new OSet.Mutable[T]
+ for(i <- items) for(i0 <- f(i)) output.append(i0)
+ output
+ }
+ def filter(f: V => Boolean): OSet[V] = {
+ val output = new OSet.Mutable[V]
+ for(i <- items) if (f(i)) output.append(i)
+ output
+ }
+
+ def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x))
+
+ def zipWithIndex = {
+ var i = 0
+ this.map{ x =>
+ i += 1
+ (x, i-1)
+ }
+ }
+
+ def reverse = OSet.from(items.reverseIterator)
+
+ // Members declared in scala.collection.GenTraversableOnce
+ def isTraversableAgain: Boolean = items.isTraversableAgain
+ def toIterator: Iterator[V] = items.toIterator
+ def toStream: Stream[V] = items.toStream
+
+ // Members declared in scala.collection.TraversableOnce
+ def copyToArray[B >: V](xs: Array[B],start: Int,len: Int): Unit = items.copyToArray(xs, start, len)
+ def exists(p: V => Boolean): Boolean = items.exists(p)
+ def find(p: V => Boolean): Option[V] = items.find(p)
+ def forall(p: V => Boolean): Boolean = items.forall(p)
+ def foreach[U](f: V => U): Unit = items.foreach(f)
+ def hasDefiniteSize: Boolean = items.hasDefiniteSize
+ def isEmpty: Boolean = items.isEmpty
+ def seq: scala.collection.TraversableOnce[V] = items
+ def toTraversable: Traversable[V] = items
+
+ override def hashCode() = items.hashCode()
+ override def equals(other: Any) = other match{
+ case s: OSet[_] => items.equals(s.items)
+ case _ => super.equals(other)
+ }
+ override def toString = items.mkString("OSet(", ", ", ")")
+ }
+}
diff --git a/core/src/main/scala/forge/util/PathRef.scala b/core/src/main/scala/forge/util/PathRef.scala
new file mode 100644
index 00000000..dbe1ebbd
--- /dev/null
+++ b/core/src/main/scala/forge/util/PathRef.scala
@@ -0,0 +1,57 @@
+package forge
+package util
+
+import java.io.IOException
+import java.nio.file.{FileVisitResult, FileVisitor}
+import java.nio.file.attribute.BasicFileAttributes
+import java.security.MessageDigest
+import java.nio.{file => jnio}
+import play.api.libs.json.{Format, Json}
+
+
+/**
+ * A wrapper around `ammonite.ops.Path` that calculates it's hashcode based
+ * on the contents of the filesystem underneath it. Used to ensure filesystem
+ * changes can bust caches which are keyed off hashcodes.
+ */
+case class PathRef(path: ammonite.ops.Path){
+ val md5Hash = {
+ val digest = MessageDigest.getInstance("MD5")
+
+ val buffer = new Array[Byte](16 * 1024)
+ jnio.Files.walkFileTree(
+ path.toNIO,
+ new FileVisitor[jnio.Path] {
+ def preVisitDirectory(dir: jnio.Path, attrs: BasicFileAttributes) = {
+ digest.update(dir.toAbsolutePath.toString.getBytes)
+ FileVisitResult.CONTINUE
+ }
+
+ def visitFile(file: jnio.Path, attrs: BasicFileAttributes) = {
+ digest.update(file.toAbsolutePath.toString.getBytes)
+ val is = jnio.Files.newInputStream(file)
+ def rec(): Unit = {
+ val length = is.read(buffer)
+ if (length != -1){
+ digest.update(buffer, 0, length)
+ rec()
+ }
+ }
+ rec()
+ FileVisitResult.CONTINUE
+ }
+
+ def visitFileFailed(file: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
+ def postVisitDirectory(dir: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
+ }
+ )
+
+ java.util.Arrays.hashCode(digest.digest())
+
+ }
+ override def hashCode() = md5Hash
+}
+
+object PathRef{
+ implicit def jsonFormatter: Format[PathRef] = Json.format
+}
diff --git a/core/src/test/examples/javac/build.sc b/core/src/test/examples/javac/build.sc
new file mode 100644
index 00000000..dfbe5271
--- /dev/null
+++ b/core/src/test/examples/javac/build.sc
@@ -0,0 +1,66 @@
+object Foo {
+
+ import java.io.FileOutputStream
+ import java.util.jar.JarEntry
+
+ import ammonite.ops.{ls, pwd, read}
+ import forge.{Discovered, Target}
+ import forge.util.{Args, PathRef}
+
+ val workspacePath = pwd / 'target / 'workspace / 'javac
+ val javacSrcPath = pwd / 'src / 'test / 'examples / 'javac
+ val javacDestPath = workspacePath / 'src
+
+ val sourceRootPath = javacDestPath / 'src
+ val resourceRootPath = javacDestPath / 'resources
+
+ // sourceRoot -> allSources -> classFiles
+ // |
+ // v
+ // resourceRoot ----> jar
+ val sourceRoot = Target.path(sourceRootPath)
+ val resourceRoot = Target.path(resourceRootPath)
+ val allSources = list(sourceRoot)
+ val classFiles = compileAll(allSources)
+ val jar = jarUp(resourceRoot, classFiles)
+
+ def compileAll(sources: Target[Seq[PathRef]]) = {
+ new Target.Subprocess(
+ Seq(sources),
+ args =>
+ Seq("javac") ++
+ args[Seq[PathRef]](0).map(_.path.toString) ++
+ Seq("-d", args.dest.toString)
+ ).map(_.dest)
+ }
+
+ def list(root: Target[PathRef]): Target[Seq[PathRef]] = {
+ root.map(x => ls.rec(x.path).map(PathRef(_)))
+ }
+
+ case class jarUp(roots: Target[PathRef]*) extends Target[PathRef] {
+
+ val inputs = roots
+
+ def evaluate(args: Args): PathRef = {
+
+ val output = new java.util.jar.JarOutputStream(new FileOutputStream(args.dest.toIO))
+ for {
+ root0 <- args.args
+ root = root0.asInstanceOf[PathRef]
+
+ path <- ls.rec(root.path)
+ if path.isFile
+ } {
+ val relative = path.relativeTo(root.path)
+ output.putNextEntry(new JarEntry(relative.toString))
+ output.write(read.bytes(path))
+ }
+ output.close()
+ PathRef(args.dest)
+ }
+ }
+
+}
+
+@main def main(): Any = Foo -> forge.Discovered[Foo.type]
diff --git a/core/src/test/examples/javac/resources/hello.txt b/core/src/test/examples/javac/resources/hello.txt
new file mode 100644
index 00000000..5e1c309d
--- /dev/null
+++ b/core/src/test/examples/javac/resources/hello.txt
@@ -0,0 +1 @@
+Hello World \ No newline at end of file
diff --git a/core/src/test/examples/javac/src/Bar.java b/core/src/test/examples/javac/src/Bar.java
new file mode 100644
index 00000000..4e30c89b
--- /dev/null
+++ b/core/src/test/examples/javac/src/Bar.java
@@ -0,0 +1,4 @@
+package test;
+public class Bar{
+ static int value = 271828;
+} \ No newline at end of file
diff --git a/core/src/test/examples/javac/src/Foo.java b/core/src/test/examples/javac/src/Foo.java
new file mode 100644
index 00000000..e694f9fa
--- /dev/null
+++ b/core/src/test/examples/javac/src/Foo.java
@@ -0,0 +1,7 @@
+package test;
+public class Foo{
+ static int value = 31337;
+ public static void main(String[] args){
+ System.out.println(value + Bar.value);
+ }
+} \ No newline at end of file
diff --git a/core/src/test/scala/forge/EvaluationTests.scala b/core/src/test/scala/forge/EvaluationTests.scala
new file mode 100644
index 00000000..b58d5ccc
--- /dev/null
+++ b/core/src/test/scala/forge/EvaluationTests.scala
@@ -0,0 +1,144 @@
+package forge
+
+
+import forge.util.OSet
+import utest._
+import utest.framework.TestPath
+
+object EvaluationTests extends TestSuite{
+
+ val tests = Tests{
+ val graphs = new TestGraphs()
+ import graphs._
+ 'evaluateSingle - {
+
+ class Checker[T: Discovered](base: T)(implicit tp: TestPath) {
+ val workspace = ammonite.ops.pwd / 'target / 'workspace / tp.value
+ ammonite.ops.rm(ammonite.ops.Path(workspace, ammonite.ops.pwd))
+ // Make sure data is persisted even if we re-create the evaluator each time
+ def evaluator = new Evaluator(
+ workspace,
+
+ Discovered.mapping(base)
+ )
+ def apply(target: Target[_], expValue: Any,
+ expEvaled: OSet[Target[_]],
+ extraEvaled: Int = 0) = {
+
+ val Evaluator.Results(returnedValues, returnedEvaluated) = evaluator.evaluate(OSet(target))
+
+ val (matchingReturnedEvaled, extra) = returnedEvaluated.items.partition(expEvaled.contains)
+
+ assert(
+ returnedValues == Seq(expValue),
+ matchingReturnedEvaled.toSet == expEvaled.toSet,
+ extra.length == extraEvaled
+ )
+
+ // Second time the value is already cached, so no evaluation needed
+ val Evaluator.Results(returnedValues2, returnedEvaluated2) = evaluator.evaluate(OSet(target))
+ assert(
+ returnedValues2 == returnedValues,
+ returnedEvaluated2 == OSet()
+ )
+ }
+ }
+
+ 'singleton - {
+ import singleton._
+ val check = new Checker(singleton)
+ // First time the target is evaluated
+ check(single, expValue = 0, expEvaled = OSet(single))
+
+ single.counter += 1
+ // After incrementing the counter, it forces re-evaluation
+ check(single, expValue = 1, expEvaled = OSet(single))
+ }
+ 'pair - {
+ import pair._
+ val check = new Checker(pair)
+ check(down, expValue = 0, expEvaled = OSet(up, down))
+
+ down.counter += 1
+ check(down, expValue = 1, expEvaled = OSet(down))
+
+ up.counter += 1
+ check(down, expValue = 2, expEvaled = OSet(up, down))
+ }
+ 'anonTriple - {
+ import anonTriple._
+ val check = new Checker(anonTriple)
+ val middle = down.inputs(0)
+ check(down, expValue = 0, expEvaled = OSet(up, middle, down))
+
+ down.counter += 1
+ check(down, expValue = 1, expEvaled = OSet(middle, down))
+
+ up.counter += 1
+ check(down, expValue = 2, expEvaled = OSet(up, middle, down))
+
+ middle.asInstanceOf[TestUtil.Test].counter += 1
+
+ check(down, expValue = 3, expEvaled = OSet(middle, down))
+ }
+ 'diamond - {
+ import diamond._
+ val check = new Checker(diamond)
+ check(down, expValue = 0, expEvaled = OSet(up, left, right, down))
+
+ down.counter += 1
+ check(down, expValue = 1, expEvaled = OSet(down))
+
+ up.counter += 1
+ // Increment by 2 because up is referenced twice: once by left once by right
+ check(down, expValue = 3, expEvaled = OSet(up, left, right, down))
+
+ left.counter += 1
+ check(down, expValue = 4, expEvaled = OSet(left, down))
+
+ right.counter += 1
+ check(down, expValue = 5, expEvaled = OSet(right, down))
+ }
+ 'anonDiamond - {
+ import anonDiamond._
+ val check = new Checker(anonDiamond)
+ val left = down.inputs(0).asInstanceOf[TestUtil.Test]
+ val right = down.inputs(1).asInstanceOf[TestUtil.Test]
+ check(down, expValue = 0, expEvaled = OSet(up, left, right, down))
+
+ down.counter += 1
+ check(down, expValue = 1, expEvaled = OSet(left, right, down))
+
+ up.counter += 1
+ // Increment by 2 because up is referenced twice: once by left once by right
+ check(down, expValue = 3, expEvaled = OSet(up, left, right, down))
+
+ left.counter += 1
+ check(down, expValue = 4, expEvaled = OSet(left, right, down))
+
+ right.counter += 1
+ check(down, expValue = 5, expEvaled = OSet(left, right, down))
+ }
+
+ 'bigSingleTerminal - {
+ import bigSingleTerminal._
+ val check = new Checker(bigSingleTerminal)
+
+ check(j, expValue = 0, expEvaled = OSet(a, b, e, f, i, j), extraEvaled = 22)
+
+ j.counter += 1
+ check(j, expValue = 1, expEvaled = OSet(j), extraEvaled = 3)
+
+ i.counter += 1
+ // increment value by 2 because `i` is used twice on the way to `j`
+ check(j, expValue = 3, expEvaled = OSet(j, i), extraEvaled = 8)
+
+ b.counter += 1
+ // increment value by 4 because `b` is used four times on the way to `j`
+ check(j, expValue = 7, expEvaled = OSet(b, e, f, i, j), extraEvaled = 20)
+ }
+ }
+
+
+ }
+}
diff --git a/core/src/test/scala/forge/GraphTests.scala b/core/src/test/scala/forge/GraphTests.scala
new file mode 100644
index 00000000..572e459e
--- /dev/null
+++ b/core/src/test/scala/forge/GraphTests.scala
@@ -0,0 +1,187 @@
+package forge
+
+import utest._
+import TestUtil.test
+import forge.util.OSet
+
+object GraphTests extends TestSuite{
+
+ val tests = Tests{
+
+
+ val graphs = new TestGraphs()
+ import graphs._
+
+ 'discovery{
+ class CanNest{
+ val single = test()
+ val invisible: Any = test()
+ }
+ object outer {
+ val single = test()
+ val invisible: Any = test()
+ object nested{
+ val single = test()
+ val invisible: Any = test()
+
+ }
+ val classInstance = new CanNest
+
+ }
+ val discovered = Discovered[outer.type].apply(outer).map(x => (x._1, x._3))
+ val expected = Seq(
+ (List("classInstance", "single"), outer.classInstance.single),
+ (List("nested", "single"), outer.nested.single),
+ (List("single"), outer.single)
+ )
+ assert(discovered == expected)
+ }
+
+
+ 'topoSortedTransitiveTargets - {
+ def check(targets: OSet[Target[_]], expected: OSet[Target[_]]) = {
+ val result = Evaluator.topoSortedTransitiveTargets(targets).values
+ TestUtil.checkTopological(result)
+ assert(result == expected)
+ }
+
+ 'singleton - check(
+ targets = OSet(singleton.single),
+ expected = OSet(singleton.single)
+ )
+ 'pair - check(
+ targets = OSet(pair.down),
+ expected = OSet(pair.up, pair.down)
+ )
+ 'anonTriple - check(
+ targets = OSet(anonTriple.down),
+ expected = OSet(anonTriple.up, anonTriple.down.inputs(0), anonTriple.down)
+ )
+ 'diamond - check(
+ targets = OSet(diamond.down),
+ expected = OSet(diamond.up, diamond.left, diamond.right, diamond.down)
+ )
+ 'anonDiamond - check(
+ targets = OSet(diamond.down),
+ expected = OSet(
+ diamond.up,
+ diamond.down.inputs(0),
+ diamond.down.inputs(1),
+ diamond.down
+ )
+ )
+ 'bigSingleTerminal - {
+ val result = Evaluator.topoSortedTransitiveTargets(OSet(bigSingleTerminal.j)).values
+ TestUtil.checkTopological(result)
+ assert(result.size == 28)
+ }
+ }
+
+ 'groupAroundNamedTargets - {
+ def check[T: Discovered](base: T,
+ target: TestUtil.Test,
+ expected: OSet[(OSet[TestUtil.Test], Int)]) = {
+
+ val mapping = Discovered.mapping(base)
+ val topoSortedTransitive = Evaluator.topoSortedTransitiveTargets(OSet(target))
+
+ val grouped = Evaluator.groupAroundNamedTargets(topoSortedTransitive, mapping)
+ val flattened = OSet.from(grouped.values().flatMap(_.items))
+
+ TestUtil.checkTopological(flattened)
+ for(((expectedPresent, expectedSize), i) <- expected.items.zipWithIndex){
+ val grouping = grouped.lookupKey(i)
+ assert(
+ grouping.size == expectedSize,
+ grouping.filter(mapping.contains) == expectedPresent
+ )
+ }
+ }
+ 'singleton - check(
+ singleton,
+ singleton.single,
+ OSet(OSet(singleton.single) -> 1)
+ )
+ 'pair - check(
+ pair,
+ pair.down,
+ OSet(OSet(pair.up) -> 1, OSet(pair.down) -> 1)
+ )
+ 'anonTriple - check(
+ anonTriple,
+ anonTriple.down,
+ OSet(OSet(anonTriple.up) -> 1, OSet(anonTriple.down) -> 2)
+ )
+ 'diamond - check(
+ diamond,
+ diamond.down,
+ OSet(
+ OSet(diamond.up) -> 1,
+ OSet(diamond.left) -> 1,
+ OSet(diamond.right) -> 1,
+ OSet(diamond.down) -> 1
+ )
+ )
+ 'anonDiamond - check(
+ anonDiamond,
+ anonDiamond.down,
+ OSet(
+ OSet(anonDiamond.up) -> 1,
+ OSet(anonDiamond.down) -> 3
+ )
+ )
+ 'bigSingleTerminal - check(
+ bigSingleTerminal,
+ bigSingleTerminal.j,
+ OSet(
+ OSet(bigSingleTerminal.a) -> 3,
+ OSet(bigSingleTerminal.b) -> 2,
+ OSet(bigSingleTerminal.e) -> 9,
+ OSet(bigSingleTerminal.i) -> 6,
+ OSet(bigSingleTerminal.f) -> 4,
+ OSet(bigSingleTerminal.j) -> 4
+ )
+ )
+ }
+
+ 'labeling - {
+
+ def check[T: Discovered](base: T, t: Target[_], relPath: Option[String]) = {
+
+
+ val names: Seq[(Target[_], Seq[String])] = Discovered.mapping(base).mapValues(_.segments).toSeq
+ val nameMap = names.toMap
+
+ val targetLabel = nameMap.get(t).map(_.mkString("."))
+ assert(targetLabel == relPath)
+ }
+ 'singleton - check(singleton, singleton.single, Some("single"))
+ 'pair - {
+ check(pair, pair.up, Some("up"))
+ check(pair, pair.down, Some("down"))
+ }
+
+ 'anonTriple - {
+ check(anonTriple, anonTriple.up, Some("up"))
+ check(anonTriple, anonTriple.down.inputs(0), None)
+ check(anonTriple, anonTriple.down, Some("down"))
+ }
+
+ 'diamond - {
+ check(diamond, diamond.up, Some("up"))
+ check(diamond, diamond.left, Some("left"))
+ check(diamond, diamond.right, Some("right"))
+ check(diamond, diamond.down, Some("down"))
+ }
+
+ 'anonDiamond - {
+ check(anonDiamond, anonDiamond.up, Some("up"))
+ check(anonDiamond, anonDiamond.down.inputs(0), None)
+ check(anonDiamond, anonDiamond.down.inputs(1), None)
+ check(anonDiamond, anonDiamond.down, Some("down"))
+ }
+
+ }
+
+ }
+}
diff --git a/core/src/test/scala/forge/IntegrationTests.scala b/core/src/test/scala/forge/IntegrationTests.scala
new file mode 100644
index 00000000..03173eac
--- /dev/null
+++ b/core/src/test/scala/forge/IntegrationTests.scala
@@ -0,0 +1,138 @@
+package forge
+
+import java.io.FileOutputStream
+import java.util.jar.JarEntry
+
+import ammonite.ops._
+import forge.util.{Args, OSet, PathRef}
+import utest._
+
+object IntegrationTests extends TestSuite{
+ def compileAll(sources: Target[Seq[PathRef]]) = {
+ new Target.Subprocess(
+ Seq(sources),
+ args =>
+ Seq("javac") ++
+ args[Seq[PathRef]](0).map(_.path.toString) ++
+ Seq("-d", args.dest.toString)
+ ).map(_.dest)
+ }
+
+ def list(root: Target[PathRef]): Target[Seq[PathRef]] = {
+ root.map(x => ls.rec(x.path).map(PathRef(_)))
+ }
+
+ case class jarUp(roots: Target[PathRef]*) extends Target[PathRef]{
+
+ val inputs = roots
+ def evaluate(args: Args): PathRef = {
+
+ val output = new java.util.jar.JarOutputStream(new FileOutputStream(args.dest.toIO))
+ for{
+ root0 <- args.args
+ root = root0.asInstanceOf[PathRef]
+
+ path <- ls.rec(root.path)
+ if path.isFile
+ }{
+ val relative = path.relativeTo(root.path)
+ output.putNextEntry(new JarEntry(relative.toString))
+ output.write(read.bytes(path))
+ }
+ output.close()
+ PathRef(args.dest)
+ }
+ }
+
+ val tests = Tests{
+ 'javac {
+ val workspacePath = pwd / 'target / 'workspace / 'javac
+ val javacSrcPath = pwd / 'core / 'src / 'test / 'examples / 'javac
+ val javacDestPath = workspacePath / 'src
+
+ mkdir(pwd / 'target / 'workspace / 'javac)
+ cp(javacSrcPath, javacDestPath)
+
+ object Build {
+ val sourceRootPath = javacDestPath / 'src
+ val resourceRootPath = javacDestPath / 'resources
+
+ // sourceRoot -> allSources -> classFiles
+ // |
+ // v
+ // resourceRoot ----> jar
+ val sourceRoot = Target.path(sourceRootPath)
+ val resourceRoot = Target.path(resourceRootPath)
+ val allSources = list(sourceRoot)
+ val classFiles = compileAll(allSources)
+ val jar = jarUp(resourceRoot, classFiles)
+ }
+ import Build._
+ val mapping = Discovered.mapping(Build)
+
+ def check(targets: OSet[Target[_]], expected: OSet[Target[_]]) = {
+ val evaluator = new Evaluator(workspacePath, mapping)
+ val evaluated = evaluator.evaluate(targets).evaluated.filter(mapping.contains)
+ assert(evaluated == expected)
+ }
+
+ def append(path: Path, txt: String) = ammonite.ops.write.append(path, txt)
+
+
+ check(
+ targets = OSet(jar),
+ expected = OSet(resourceRoot, sourceRoot, allSources, classFiles, jar)
+ )
+
+ // Re-running with no changes results in nothing being evaluated
+ check(targets = OSet(jar), expected = OSet())
+
+ // Appending an empty string gets ignored due to file-content hashing
+ append(sourceRootPath / "Foo.java", "")
+ check(targets = OSet(jar), expected = OSet())
+
+ // Appending whitespace forces a recompile, but the classfilesend up
+ // exactly the same so no re-jarring.
+ append(sourceRootPath / "Foo.java", " ")
+ check(targets = OSet(jar), expected = OSet(sourceRoot, allSources, classFiles))
+
+ // Appending a new class changes the classfiles, which forces us to
+ // re-create the final jar
+ append(sourceRootPath / "Foo.java", "\nclass FooTwo{}")
+ check(targets = OSet(jar), expected = OSet(sourceRoot, allSources, classFiles, jar))
+
+ // Tweaking the resources forces rebuild of the final jar, without
+ // recompiling classfiles
+ append(resourceRootPath / "hello.txt", " ")
+ check(targets = OSet(jar), expected = OSet(resourceRoot, jar))
+
+ // Asking for an intermediate target forces things to be build up to that
+ // target only; these are re-used for any downstream targets requested
+ append(sourceRootPath / "Bar.java", "\nclass BarTwo{}")
+ append(resourceRootPath / "hello.txt", " ")
+ check(targets = OSet(classFiles), expected = OSet(sourceRoot, allSources, classFiles))
+ check(targets = OSet(jar), expected = OSet(resourceRoot, jar))
+ check(targets = OSet(allSources), expected = OSet())
+
+ append(sourceRootPath / "Bar.java", "\nclass BarThree{}")
+ append(resourceRootPath / "hello.txt", " ")
+ check(targets = OSet(resourceRoot), expected = OSet(resourceRoot))
+ check(targets = OSet(allSources), expected = OSet(sourceRoot, allSources))
+ check(targets = OSet(jar), expected = OSet(classFiles, jar))
+
+ val jarContents = %%('jar, "-tf", workspacePath/'jar)(workspacePath).out.string
+ val expectedJarContents =
+ """hello.txt
+ |test/Bar.class
+ |test/BarThree.class
+ |test/BarTwo.class
+ |test/Foo.class
+ |test/FooTwo.class
+ |""".stripMargin
+ assert(jarContents == expectedJarContents)
+
+ val executed = %%('java, "-cp", workspacePath/'jar, "test.Foo")(workspacePath).out.string
+ assert(executed == (31337 + 271828) + "\n")
+ }
+ }
+}
diff --git a/core/src/test/scala/forge/TarjanTests.scala b/core/src/test/scala/forge/TarjanTests.scala
new file mode 100644
index 00000000..5b118368
--- /dev/null
+++ b/core/src/test/scala/forge/TarjanTests.scala
@@ -0,0 +1,89 @@
+package forge
+import utest._
+object TarjanTests extends TestSuite{
+ def check(input: Seq[Seq[Int]], expected: Seq[Seq[Int]]) = {
+ val result = Tarjans(input).map(_.sorted)
+ val sortedExpected = expected.map(_.sorted)
+ assert(result == sortedExpected)
+ }
+ val tests = Tests{
+ //
+ 'empty - check(Seq(), Seq())
+
+ // (0)
+ 'singleton - check(Seq(Seq()), Seq(Seq(0)))
+
+
+ // (0)-.
+ // ^._/
+ 'selfCycle - check(Seq(Seq(0)), Seq(Seq(0)))
+
+ // (0) <-> (1)
+ 'simpleCycle- check(Seq(Seq(1), Seq(0)), Seq(Seq(1, 0)))
+
+ // (0) (1) (2)
+ 'multipleSingletons - check(
+ Seq(Seq(), Seq(), Seq()),
+ Seq(Seq(0), Seq(1), Seq(2))
+ )
+
+ // (0) -> (1) -> (2)
+ 'straightLineNoCycles- check(
+ Seq(Seq(1), Seq(2), Seq()),
+ Seq(Seq(2), Seq(1), Seq(0))
+ )
+
+ // (0) <- (1) <- (2)
+ 'straightLineNoCyclesReversed- check(
+ Seq(Seq(), Seq(0), Seq(1)),
+ Seq(Seq(0), Seq(1), Seq(2))
+ )
+
+ // (0) <-> (1) (2) -> (3) -> (4)
+ // ^.____________/
+ 'independentSimpleCycles - check(
+ Seq(Seq(1), Seq(0), Seq(3), Seq(4), Seq(2)),
+ Seq(Seq(1, 0), Seq(4, 3, 2))
+ )
+
+ // ___________________
+ // v \
+ // (0) <-> (1) (2) -> (3) -> (4)
+ // ^.____________/
+ 'independentLinkedCycles - check(
+ Seq(Seq(1), Seq(0), Seq(3), Seq(4), Seq(2, 1)),
+ Seq(Seq(1, 0), Seq(4, 3, 2))
+ )
+ // _____________
+ // / v
+ // (0) <-> (1) (2) -> (3) -> (4)
+ // ^.____________/
+ 'independentLinkedCycles2 - check(
+ Seq(Seq(1, 2), Seq(0), Seq(3), Seq(4), Seq(2)),
+ Seq(Seq(4, 3, 2), Seq(1, 0))
+ )
+
+ // _____________
+ // / v
+ // (0) <-> (1) (2) -> (3) -> (4)
+ // ^. ^.____________/
+ // \________________/
+ 'combinedCycles - check(
+ Seq(Seq(1, 2), Seq(0), Seq(3), Seq(4), Seq(2, 1)),
+ Seq(Seq(4, 3, 2, 1, 0))
+ )
+ //
+ // (0) <-> (1) <- (2) <- (3) <-> (4) <- (5)
+ // ^.____________/ / /
+ // / /
+ // (6) <- (7) <-/ (8) <-'
+ // / /
+ // v /
+ // (9) <--------'
+ 'combinedCycles - check(
+ Seq(Seq(1), Seq(0), Seq(0, 1), Seq(2, 4, 7, 9), Seq(3), Seq(4, 8), Seq(9), Seq(6), Seq(), Seq()),
+ Seq(Seq(0, 1), Seq(2), Seq(9), Seq(6), Seq(7), Seq(3, 4), Seq(8), Seq(5))
+ )
+
+ }
+} \ No newline at end of file
diff --git a/core/src/test/scala/forge/TestGraphs.scala b/core/src/test/scala/forge/TestGraphs.scala
new file mode 100644
index 00000000..0ee48a18
--- /dev/null
+++ b/core/src/test/scala/forge/TestGraphs.scala
@@ -0,0 +1,73 @@
+package forge
+
+import forge.TestUtil.test
+
+class TestGraphs(){
+ // single
+ object singleton {
+ val single = test()
+ }
+
+ // up---down
+ object pair {
+ val up = test()
+ val down = test(up)
+ }
+
+ // up---o---down
+ object anonTriple{
+ val up = test()
+ val down = test(test(up))
+ }
+
+ // left
+ // / \
+ // up down
+ // \ /
+ // right
+ object diamond{
+ val up = test()
+ val left = test(up)
+ val right = test(up)
+ val down = test(left, right)
+ }
+
+ // o
+ // / \
+ // up down
+ // \ /
+ // o
+ object anonDiamond{
+ val up = test()
+ val down = test(test(up), test(up))
+ }
+
+ // o g-----o
+ // \ \ \
+ // o o h-----I---o
+ // \ / \ / \ / \ \
+ // A---c--o E o-o \ \
+ // / \ / \ / \ o---J
+ // o d o--o o / /
+ // \ / \ / /
+ // o o---F---o
+ // / /
+ // o--B o
+ object bigSingleTerminal{
+ val a = test(test(), test())
+ val b = test(test())
+ val e = {
+ val c = test(a)
+ val d = test(a)
+ test(test(test(), test(c)), test(test(c, test(d, b))))
+ }
+ val f = test(test(test(), test(e)))
+
+ val i = {
+ val g = test()
+ val h = test(g, e)
+ test(test(g), test(test(h)))
+ }
+ val j = test(test(i), test(i, f), test(f))
+ }
+}
diff --git a/core/src/test/scala/forge/TestMain.scala b/core/src/test/scala/forge/TestMain.scala
new file mode 100644
index 00000000..c94e13f0
--- /dev/null
+++ b/core/src/test/scala/forge/TestMain.scala
@@ -0,0 +1,100 @@
+package forge
+import ammonite.ops._
+import java.io.File
+
+import coursier._
+import sbt.internal.inc.{FreshCompilerCache, ScalaInstance, ZincUtil}
+import sbt.internal.util.{ConsoleOut, MainAppender}
+import sbt.util.LogExchange
+import xsbti.api.{ClassLike, DependencyContext}
+import xsbti.compile._
+
+import scalaz.concurrent.Task
+
+object TestMain {
+ def main(args: Array[String]): Unit = {
+ val scalaVersion = "2.12.4"
+ val start = Resolution(
+ Set(
+ Dependency(Module("org.scala-lang", "scala-reflect"), scalaVersion),
+ Dependency(Module("org.scala-lang", "scala-compiler"), scalaVersion),
+ Dependency(Module("org.scala-lang", "scala-reflect"), scalaVersion),
+ Dependency(Module("org.scala-sbt", "compiler-bridge_2.12"), "1.0.3"),
+ Dependency(Module("com.lihaoyi", "sourcecode_2.12"), "0.1.4"),
+ Dependency(Module("com.lihaoyi", "pprint_2.12"), "0.5.3"),
+ Dependency(Module("com.lihaoyi", "ammonite_2.12.4"), "1.0.3"),
+ Dependency(Module("com.typesafe.play", "play-json_2.12"), "2.6.6"),
+ Dependency(Module("org.scala-sbt", "zinc_2.12"), "1.0.3")
+ )
+ )
+ val repositories = Seq(
+ Cache.ivy2Local,
+ MavenRepository("https://repo1.maven.org/maven2")
+ )
+
+ val fetch = Fetch.from(repositories, Cache.fetch())
+ val resolution = start.process.run(fetch).unsafePerformSync
+
+
+ val localArtifacts: Seq[File] = Task.gatherUnordered(
+ resolution.artifacts.map(Cache.file(_).run)
+ ).unsafePerformSync.flatMap(_.toOption)
+
+ pprint.log(localArtifacts)
+ def grepJar(s: String) = localArtifacts.find(_.toString.endsWith(s)).get
+
+ val scalac = ZincUtil.scalaCompiler(
+ new ScalaInstance(
+ version = scalaVersion,
+ loader = getClass.getClassLoader,
+ libraryJar = grepJar(s"scala-library-$scalaVersion.jar"),
+ compilerJar = grepJar(s"scala-compiler-$scalaVersion.jar"),
+ allJars = localArtifacts.toArray,
+ explicitActual = None
+ ),
+ grepJar("compiler-bridge_2.12-1.0.3.jar")
+ )
+
+ val outputDir = pwd/'target/'zinc
+ mkdir(outputDir)
+ val scalaFiles = ls.rec(pwd/'src/'main/'scala/'forge).filter(_.ext == "scala").map(_.toIO).toArray
+
+ pprint.log(scalaFiles)
+ scalac.apply(
+ sources = scalaFiles,
+ changes = new DependencyChanges {
+ def isEmpty = true
+ def modifiedBinaries() = Array[File]()
+ def modifiedClasses() = Array[String]()
+ },
+ classpath = localArtifacts.toArray,
+ singleOutput = outputDir.toIO,
+ options = Array(),
+ callback = new xsbti.AnalysisCallback {
+ def startSource(source: File) = ()
+ def apiPhaseCompleted() = ()
+ def enabled() = true
+ def binaryDependency(onBinaryEntry: File, onBinaryClassName: String, fromClassName: String, fromSourceFile: File, context: DependencyContext) = ()
+ def generatedNonLocalClass(source: File, classFile: File, binaryClassName: String, srcClassName: String) = ()
+ def problem(what: String, pos: xsbti.Position, msg: String, severity: xsbti.Severity, reported: Boolean) = ()
+ def dependencyPhaseCompleted() = ()
+ def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext) = ()
+ def generatedLocalClass(source: File, classFile: File) = ()
+ def api(sourceFile: File, classApi: ClassLike) = ()
+
+ def mainClass(sourceFile: File, className: String) = ()
+ def usedName(className: String, name: String, useScopes: java.util.EnumSet[xsbti.UseScope]) = ()
+ },
+ maximumErrors = 10,
+ cache = new FreshCompilerCache(),
+ log = {
+ val console = ConsoleOut.systemOut
+ val consoleAppender = MainAppender.defaultScreen(console)
+ val l = LogExchange.logger("Hello")
+ LogExchange.unbindLoggerAppenders("Hello")
+ LogExchange.bindLoggerAppenders("Hello", (consoleAppender -> sbt.util.Level.Warn) :: Nil)
+ l
+ }
+ )
+ }
+}
diff --git a/core/src/test/scala/forge/TestUtil.scala b/core/src/test/scala/forge/TestUtil.scala
new file mode 100644
index 00000000..9337fbe0
--- /dev/null
+++ b/core/src/test/scala/forge/TestUtil.scala
@@ -0,0 +1,36 @@
+package forge
+
+import forge.util.{Args, OSet}
+import utest.assert
+import scala.collection.mutable
+
+object TestUtil {
+ def test(inputs: Target[Int]*) = {
+ new Test(inputs, pure = inputs.nonEmpty)
+ }
+
+ /**
+ * A dummy target that takes any number of inputs, and whose output can be
+ * controlled externally, so you can construct arbitrary dataflow graphs and
+ * test how changes propagate.
+ */
+ class Test(val inputs: Seq[Target[Int]],
+ val pure: Boolean) extends Target[Int]{
+ var counter = 0
+ def evaluate(args: Args) = {
+ counter + args.args.map(_.asInstanceOf[Int]).sum
+ }
+
+ override def sideHash = counter
+ }
+ def checkTopological(targets: OSet[Target[_]]) = {
+ val seen = mutable.Set.empty[Target[_]]
+ for(t <- targets.items.reverseIterator){
+ seen.add(t)
+ for(upstream <- t.inputs){
+ assert(!seen(upstream))
+ }
+ }
+ }
+
+}
diff --git a/core/src/test/scala/forge/UTestFramework.scala b/core/src/test/scala/forge/UTestFramework.scala
new file mode 100644
index 00000000..3435e9c0
--- /dev/null
+++ b/core/src/test/scala/forge/UTestFramework.scala
@@ -0,0 +1,11 @@
+package forge
+
+class UTestFramework extends utest.runner.Framework {
+ override def exceptionStackFrameHighlighter(s: StackTraceElement) = {
+ s.getClassName.startsWith("forge.")
+ }
+ override def setup() = {
+ import ammonite.ops._
+ rm(pwd / 'target / 'workspace)
+ }
+}