summaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2017-11-03 23:44:39 -0700
committerLi Haoyi <haoyi.sg@gmail.com>2017-11-03 23:44:39 -0700
commit13270145903b457c906a9fa77bd152afb6448ef5 (patch)
treee85b7ed530e0c8e3c3041cbf17641857c448b602 /core/src/main/scala
parent66f1c5c2438aeb8f2496575f52c25b09cf5793a6 (diff)
downloadmill-13270145903b457c906a9fa77bd152afb6448ef5.tar.gz
mill-13270145903b457c906a9fa77bd152afb6448ef5.tar.bz2
mill-13270145903b457c906a9fa77bd152afb6448ef5.zip
Split up forge into `scalaplugin` an `core` subprojects, to allow us to use the `T#apply` macro in the implementation of `scalaplugin.Subproject`
Also needed to implement inter-`Subproject` dependencies so the `MetacircularTests` can continue to support the new layout
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/forge/Discovered.scala57
-rw-r--r--core/src/main/scala/forge/Evaluator.scala191
-rw-r--r--core/src/main/scala/forge/Main.scala25
-rw-r--r--core/src/main/scala/forge/Target.scala132
-rw-r--r--core/src/main/scala/forge/Tarjans.scala50
-rw-r--r--core/src/main/scala/forge/package.scala99
-rw-r--r--core/src/main/scala/forge/util/Args.scala9
-rw-r--r--core/src/main/scala/forge/util/Labelled.scala8
-rw-r--r--core/src/main/scala/forge/util/MultiBiMap.scala47
-rw-r--r--core/src/main/scala/forge/util/OSet.scala107
-rw-r--r--core/src/main/scala/forge/util/PathRef.scala57
11 files changed, 782 insertions, 0 deletions
diff --git a/core/src/main/scala/forge/Discovered.scala b/core/src/main/scala/forge/Discovered.scala
new file mode 100644
index 00000000..03577b1f
--- /dev/null
+++ b/core/src/main/scala/forge/Discovered.scala
@@ -0,0 +1,57 @@
+package forge
+
+import forge.util.Labelled
+import play.api.libs.json.Format
+
+import language.experimental.macros
+import reflect.macros.blackbox.Context
+
+class Discovered[T](val value: Seq[(Seq[String], Format[_], T => Target[_])]){
+ def apply(t: T) = value.map{case (a, f, b) => (a, f, b(t)) }
+
+}
+object Discovered {
+ def makeTuple[T, V](path: Seq[String], func: T => Target[V])(implicit f: Format[V]) = {
+ (path, f, func)
+ }
+
+
+ def mapping[T: Discovered](t: T): Map[Target[_], Labelled[_]] = {
+ implicitly[Discovered[T]].apply(t)
+ .map(x => x._3 -> Labelled(x._3.asInstanceOf[Target[Any]], x._2.asInstanceOf[Format[Any]], x._1))
+ .toMap
+ }
+
+ implicit def apply[T]: Discovered[T] = macro applyImpl[T]
+
+ def applyImpl[T: c.WeakTypeTag](c: Context): c.Expr[Discovered[T]] = {
+ import c.universe._
+ val tpe = c.weakTypeTag[T].tpe
+ def rec(segments: List[String], t: c.Type): Seq[Seq[String]] = for {
+ m <- t.members.toSeq
+ if m.isTerm && (m.asTerm.isGetter || m.asTerm.isLazy) || m.isModule
+ res <- {
+ val extendedSegments = m.name.toString :: segments
+ val self =
+ if (m.typeSignature.resultType <:< c.weakTypeOf[Target[_]]) Seq(extendedSegments)
+ else Nil
+ val children = rec(extendedSegments, m.typeSignature)
+ self ++ children
+ }
+ } yield res
+
+ val reversedPaths = rec(Nil, tpe)
+
+ val result = for(reversedPath <- reversedPaths.toList) yield {
+ val base = q"${TermName(c.freshName())}"
+ val segments = reversedPath.reverse.toList
+ val ident = segments.foldLeft[Tree](base)((prefix, name) =>
+ q"$prefix.${TermName(name)}"
+ )
+
+ q"forge.Discovered.makeTuple($segments, ($base: $tpe) => $ident)"
+ }
+
+ c.Expr[Discovered[T]](q"new _root_.forge.Discovered($result)")
+ }
+}
diff --git a/core/src/main/scala/forge/Evaluator.scala b/core/src/main/scala/forge/Evaluator.scala
new file mode 100644
index 00000000..50dc46d4
--- /dev/null
+++ b/core/src/main/scala/forge/Evaluator.scala
@@ -0,0 +1,191 @@
+package forge
+
+
+import play.api.libs.json.{Format, JsValue, Json}
+
+import scala.collection.mutable
+import ammonite.ops._
+import forge.util.{Args, Labelled, MultiBiMap, OSet}
+class Evaluator(workspacePath: Path,
+ labeling: Map[Target[_], Labelled[_]]){
+
+ def evaluate(targets: OSet[Target[_]]): Evaluator.Results = {
+ mkdir(workspacePath)
+
+ val sortedGroups = Evaluator.groupAroundNamedTargets(
+ Evaluator.topoSortedTransitiveTargets(targets),
+ labeling
+ )
+
+ val evaluated = new OSet.Mutable[Target[_]]
+ val results = mutable.LinkedHashMap.empty[Target[_], Any]
+
+ for (groupIndex <- sortedGroups.keys()){
+ val group = sortedGroups.lookupKey(groupIndex)
+
+ val (newResults, newEvaluated) = evaluateGroupCached(
+ group,
+ results,
+ sortedGroups
+ )
+ evaluated.appendAll(newEvaluated)
+ for((k, v) <- newResults) results.put(k, v)
+
+ }
+
+ Evaluator.Results(targets.items.map(results), evaluated)
+ }
+
+ def evaluateGroupCached(group: OSet[Target[_]],
+ results: collection.Map[Target[_], Any],
+ sortedGroups: MultiBiMap[Int, Target[_]]): (collection.Map[Target[_], Any], Seq[Target[_]]) = {
+
+
+ val (externalInputs, terminals) = partitionGroupInputOutput(group, results)
+
+ val inputsHash =
+ externalInputs.toIterator.map(results).toVector.hashCode +
+ group.toIterator.map(_.sideHash).toVector.hashCode()
+
+ val primeLabel = labeling(terminals.items(0)).segments
+
+
+ val targetDestPath = workspacePath / primeLabel
+ val metadataPath = targetDestPath / up / (targetDestPath.last + ".forge.json")
+
+ val cached = for{
+ json <- scala.util.Try(Json.parse(read.getInputStream(metadataPath))).toOption
+ (cachedHash, terminalResults) <- Json.fromJson[(Int, Seq[JsValue])](json).asOpt
+ if cachedHash == inputsHash
+ } yield terminalResults
+
+ cached match{
+ case Some(terminalResults) =>
+ val newResults = mutable.LinkedHashMap.empty[Target[_], Any]
+ for((terminal, res) <- terminals.items.zip(terminalResults)){
+ newResults(terminal) = labeling(terminal).format.reads(res).get
+ }
+ (newResults, Nil)
+
+ case _ =>
+ val (newResults, newEvaluated, terminalResults) = evaluateGroup(group, results, targetDestPath)
+
+ write.over(
+ metadataPath,
+ Json.prettyPrint(
+ Json.toJson(inputsHash -> terminals.toList.map(terminalResults))
+ ),
+ )
+
+ (newResults, newEvaluated)
+ }
+ }
+
+ def partitionGroupInputOutput(group: OSet[Target[_]],
+ results: collection.Map[Target[_], Any]) = {
+ val allInputs = group.items.flatMap(_.inputs)
+ val (internalInputs, externalInputs) = allInputs.partition(group.contains)
+ val internalInputSet = internalInputs.toSet
+ val terminals = group.filter(!internalInputSet(_))
+ (OSet.from(externalInputs.distinct), terminals)
+ }
+
+ def evaluateGroup(group: OSet[Target[_]],
+ results: collection.Map[Target[_], Any],
+ targetDestPath: Path) = {
+
+ rm(targetDestPath)
+ val terminalResults = mutable.LinkedHashMap.empty[Target[_], JsValue]
+ val newEvaluated = mutable.Buffer.empty[Target[_]]
+ val newResults = mutable.LinkedHashMap.empty[Target[_], Any]
+ for (target <- group.items) {
+ newEvaluated.append(target)
+ val targetInputValues = target.inputs.toVector.map(x =>
+ newResults.getOrElse(x, results(x))
+ )
+
+ val args = new Args(targetInputValues, targetDestPath)
+ val res = target.evaluate(args)
+ for(targetLabel <- labeling.get(target)){
+ terminalResults(target) = targetLabel
+ .format
+ .asInstanceOf[Format[Any]]
+ .writes(res.asInstanceOf[Any])
+ }
+ newResults(target) = res
+ }
+
+ (newResults, newEvaluated, terminalResults)
+ }
+
+}
+
+
+object Evaluator{
+ class TopoSorted private[Evaluator] (val values: OSet[Target[_]])
+ case class Results(values: Seq[Any], evaluated: OSet[Target[_]])
+ def groupAroundNamedTargets(topoSortedTargets: TopoSorted,
+ labeling: Map[Target[_], Labelled[_]]): MultiBiMap[Int, Target[_]] = {
+
+ val grouping = new MultiBiMap.Mutable[Int, Target[_]]()
+
+ var groupCount = 0
+
+ for(target <- topoSortedTargets.values.items.reverseIterator){
+ if (!grouping.containsValue(target)){
+ grouping.add(groupCount, target)
+ groupCount += 1
+ }
+
+ val targetGroup = grouping.lookupValue(target)
+ for(upstream <- target.inputs){
+ grouping.lookupValueOpt(upstream) match{
+ case None if !labeling.contains(upstream) =>
+ grouping.add(targetGroup, upstream)
+ case Some(upstreamGroup) if upstreamGroup == targetGroup =>
+ val upstreamTargets = grouping.removeAll(upstreamGroup)
+
+ grouping.addAll(targetGroup, upstreamTargets)
+ case _ => //donothing
+ }
+ }
+ }
+
+ val targetOrdering = topoSortedTargets.values.items.zipWithIndex.toMap
+ val output = new MultiBiMap.Mutable[Int, Target[_]]
+
+ // Sort groups amongst themselves, and sort the contents of each group
+ // before aggregating it into the final output
+ for(g <- grouping.values().toArray.sortBy(g => targetOrdering(g.items(0)))){
+ output.addAll(output.keys.length, g.toArray.sortBy(targetOrdering))
+ }
+ output
+ }
+
+ /**
+ * Takes the given targets, finds all the targets they transitively depend
+ * on, and sort them topologically. Fails if there are dependency cycles
+ */
+ def topoSortedTransitiveTargets(sourceTargets: OSet[Target[_]]): TopoSorted = {
+ val transitiveTargets = new OSet.Mutable[Target[_]]
+ def rec(t: Target[_]): Unit = {
+ if (transitiveTargets.contains(t)) () // do nothing
+ else {
+ transitiveTargets.append(t)
+ t.inputs.foreach(rec)
+ }
+ }
+
+ sourceTargets.items.foreach(rec)
+ val targetIndices = transitiveTargets.items.zipWithIndex.toMap
+
+ val numberedEdges =
+ for(t <- transitiveTargets.items)
+ yield t.inputs.map(targetIndices)
+
+ val sortedClusters = Tarjans(numberedEdges)
+ val nonTrivialClusters = sortedClusters.filter(_.length > 1)
+ assert(nonTrivialClusters.isEmpty, nonTrivialClusters)
+ new TopoSorted(OSet.from(sortedClusters.flatten.map(transitiveTargets.items)))
+ }
+} \ No newline at end of file
diff --git a/core/src/main/scala/forge/Main.scala b/core/src/main/scala/forge/Main.scala
new file mode 100644
index 00000000..d919d0e2
--- /dev/null
+++ b/core/src/main/scala/forge/Main.scala
@@ -0,0 +1,25 @@
+package forge
+
+import ammonite.ops._
+import ammonite.util.{Name, Res}
+import forge.util.OSet
+
+
+object Main {
+ def main(args: Array[String]): Unit = {
+
+ ammonite.Main().instantiateInterpreter() match{
+ case Right(interp) =>
+ val result = ammonite.main.Scripts.runScript(pwd, Path(args(0), pwd), interp, Nil)
+
+ val (obj, discovered) = result.asInstanceOf[Res.Success[(Any, forge.Discovered[Any])]].s
+ val mapping = Discovered.mapping(obj)(discovered)
+ val workspacePath = pwd / 'target / 'javac
+ val evaluator = new Evaluator(workspacePath, mapping)
+ val evaluated = evaluator.evaluate(OSet.from(mapping.keys)).evaluated.filter(mapping.contains)
+ (result, interp.watchedFiles)
+ case Left(problems) => problems
+ }
+ }
+
+}
diff --git a/core/src/main/scala/forge/Target.scala b/core/src/main/scala/forge/Target.scala
new file mode 100644
index 00000000..0e84a9b4
--- /dev/null
+++ b/core/src/main/scala/forge/Target.scala
@@ -0,0 +1,132 @@
+package forge
+
+
+import ammonite.ops.{ls, mkdir}
+import forge.util.{Args, PathRef}
+import play.api.libs.json.{Format, JsValue, Json}
+
+import scala.annotation.compileTimeOnly
+import language.experimental.macros
+import reflect.macros.blackbox.Context
+import scala.collection.mutable
+
+abstract class Target[T] extends Target.Ops[T]{
+ /**
+ * What other Targets does this Target depend on?
+ */
+ val inputs: Seq[Target[_]]
+
+ /**
+ * Evaluate this target
+ */
+ def evaluate(args: Args): T
+
+ /**
+ * Even if this target's inputs did not change, does it need to re-evaluate
+ * anyway?
+ */
+ def sideHash: Int = 0
+
+ @compileTimeOnly("Target#apply() can only be used with a T{...} block")
+ def apply(): T = ???
+}
+
+object Target{
+ class Target0[T](t: T) extends Target[T]{
+ lazy val t0 = t
+ val inputs = Nil
+ def evaluate(args: Args) = t0
+ }
+ class Target1[T](t: => Target[T]) extends Target[T]{
+ lazy val t0 = t
+ lazy val inputs = t0.inputs
+ def evaluate(args: Args) = t0.evaluate(args)
+ }
+ implicit def apply[T](t: => Target[T]): Target[T] = new Target1(t)
+ def apply[T](t: T): Target[T] = macro impl[T]
+ def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Target[T]] = {
+ import c.universe._
+ val bound = collection.mutable.Buffer.empty[(c.Tree, Symbol)]
+ val OptionGet = c.universe.typeOf[Target[_]].member(TermName("apply"))
+ object transformer extends c.universe.Transformer {
+ // Derived from @olafurpg's
+ // https://gist.github.com/olafurpg/596d62f87bf3360a29488b725fbc7608
+ override def transform(tree: c.Tree): c.Tree = tree match {
+ case t @ q"$fun.apply()" if t.symbol == OptionGet =>
+ val tempName = c.freshName(TermName("tmp"))
+ val tempSym = c.internal.newTermSymbol(c.internal.enclosingOwner, tempName)
+ c.internal.setInfo(tempSym, t.tpe)
+ val tempIdent = Ident(tempSym)
+ c.internal.setType(tempIdent, t.tpe)
+ bound.append((fun, tempSym))
+ tempIdent
+ case _ => super.transform(tree)
+ }
+ }
+ val transformed = transformer.transform(t.tree)
+ val (exprs, symbols) = bound.unzip
+
+ val bindings = symbols.map(c.internal.valDef(_))
+
+ val embedded = q"new forge.Target.Target1(forge.zipMap(..$exprs){ (..$bindings) => $transformed })"
+
+ c.Expr[Target[T]](embedded)
+ }
+
+ abstract class Ops[T]{ this: Target[T] =>
+ def map[V](f: T => V) = new Target.Mapped(this, f)
+
+ def filter(f: T => Boolean) = this
+ def withFilter(f: T => Boolean) = this
+ def zip[V](other: Target[V]) = new Target.Zipped(this, other)
+
+ }
+
+ def traverse[T](source: Seq[Target[T]]) = {
+ new Traverse[T](source)
+ }
+ class Traverse[T](val inputs: Seq[Target[T]]) extends Target[Seq[T]]{
+ def evaluate(args: Args) = {
+ for (i <- 0 until args.length)
+ yield args(i).asInstanceOf[T]
+ }
+
+ }
+ class Mapped[T, V](source: Target[T], f: T => V) extends Target[V]{
+ def evaluate(args: Args) = f(args(0))
+ val inputs = List(source)
+ }
+ class Zipped[T, V](source1: Target[T],
+ source2: Target[V]) extends Target[(T, V)]{
+ def evaluate(args: Args) = (args(0), args(1))
+ val inputs = List(source1, source2)
+ }
+
+ def path(path: ammonite.ops.Path) = new Path(path)
+ class Path(path: ammonite.ops.Path) extends Target[PathRef]{
+ def handle = PathRef(path)
+ def evaluate(args: Args) = handle
+ override def sideHash = handle.hashCode()
+ val inputs = Nil
+ }
+
+ class Subprocess(val inputs: Seq[Target[_]],
+ command: Args => Seq[String]) extends Target[Subprocess.Result] {
+
+ def evaluate(args: Args) = {
+ mkdir(args.dest)
+ import ammonite.ops._
+ implicit val path = ammonite.ops.Path(args.dest, pwd)
+ val toTarget = () // Shadow the implicit conversion :/
+ val output = %%(command(args))
+ assert(output.exitCode == 0)
+ Subprocess.Result(output, PathRef(args.dest))
+ }
+ }
+ object Subprocess{
+ case class Result(result: ammonite.ops.CommandResult, dest: PathRef)
+ object Result{
+ implicit val tsFormat: Format[Target.Subprocess.Result] = Json.format
+ }
+ }
+}
diff --git a/core/src/main/scala/forge/Tarjans.scala b/core/src/main/scala/forge/Tarjans.scala
new file mode 100644
index 00000000..9831fe7f
--- /dev/null
+++ b/core/src/main/scala/forge/Tarjans.scala
@@ -0,0 +1,50 @@
+package forge
+
+import collection.mutable
+// Adapted from
+// https://github.com/indy256/codelibrary/blob/c52247216258e84aac442a23273b7d8306ef757b/java/src/SCCTarjan.java
+object Tarjans {
+ def apply(graph0: Seq[Seq[Int]]): Seq[Seq[Int]] = {
+ val graph = graph0.map(_.toArray).toArray
+ val n = graph.length
+ val visited = new Array[Boolean](n)
+ val stack = mutable.ArrayBuffer.empty[Integer]
+ var time = 0
+ val lowlink = new Array[Int](n)
+ val components = mutable.ArrayBuffer.empty[Seq[Int]]
+
+
+ for (u <- 0 until n) {
+ if (!visited(u)) dfs(u)
+ }
+
+ def dfs(u: Int): Unit = {
+ lowlink(u) = time
+ time += 1
+ visited(u) = true
+ stack.append(u)
+ var isComponentRoot = true
+ for (v <- graph(u)) {
+ if (!visited(v)) dfs(v)
+ if (lowlink(u) > lowlink(v)) {
+ lowlink(u) = lowlink(v)
+ isComponentRoot = false
+ }
+ }
+ if (isComponentRoot) {
+ val component = mutable.Buffer.empty[Int]
+
+ var done = false
+ while (!done) {
+ val x = stack.last
+ stack.remove(stack.length - 1)
+ component.append(x)
+ lowlink(x) = Integer.MAX_VALUE
+ if (x == u) done = true
+ }
+ components.append(component)
+ }
+ }
+ components
+ }
+} \ No newline at end of file
diff --git a/core/src/main/scala/forge/package.scala b/core/src/main/scala/forge/package.scala
new file mode 100644
index 00000000..8c24bde6
--- /dev/null
+++ b/core/src/main/scala/forge/package.scala
@@ -0,0 +1,99 @@
+import play.api.libs.json._
+import ammonite.ops.{Bytes, Path}
+import coursier.Dependency
+import forge.util.Args
+package object forge {
+
+ val T = Target
+ type T[T] = Target[T]
+ def zipMap[R]()(f: () => R) = new Target.Target0(f())
+ def zipMap[A, R](a: T[A])(f: A => R) = a.map(f)
+ def zipMap[A, B, R](a: T[A], b: T[B])(f: (A, B) => R) = zip(a, b).map(f.tupled)
+ def zipMap[A, B, C, R](a: T[A], b: T[B], c: T[C])(f: (A, B, C) => R) = zip(a, b, c).map(f.tupled)
+ def zipMap[A, B, C, D, R](a: T[A], b: T[B], c: T[C], d: T[D])(f: (A, B, C, D) => R) = zip(a, b, c, d).map(f.tupled)
+ def zipMap[A, B, C, D, E, R](a: T[A], b: T[B], c: T[C], d: T[D], e: T[E])(f: (A, B, C, D, E) => R) = zip(a, b, c, d, e).map(f.tupled)
+ def zip() = new Target.Target0(())
+ def zip[A](a: T[A]) = a.map(Tuple1(_))
+ def zip[A, B](a: T[A], b: T[B]) = a.zip(b)
+ def zip[A, B, C](a: T[A], b: T[B], c: T[C]) = new T[(A, B, C)]{
+ val inputs = Seq(a, b, c)
+ def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2))
+ }
+ def zip[A, B, C, D](a: T[A], b: T[B], c: T[C], d: T[D]) = new T[(A, B, C, D)]{
+ val inputs = Seq(a, b, c, d)
+ def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2), args[D](3))
+ }
+ def zip[A, B, C, D, E](a: T[A], b: T[B], c: T[C], d: T[D], e: T[E]) = new T[(A, B, C, D, E)]{
+ val inputs = Seq(a, b, c, d, e)
+ def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2), args[D](3), args[E](4))
+ }
+ implicit object pathFormat extends Format[ammonite.ops.Path]{
+ def reads(json: JsValue) = json match{
+ case JsString(v) => JsSuccess(Path(v))
+ case _ => JsError("Paths must be a String")
+ }
+ def writes(o: Path) = JsString(o.toString)
+ }
+
+ implicit object bytesFormat extends Format[Bytes]{
+ def reads(json: JsValue) = json match{
+ case JsString(v) => JsSuccess(
+ new Bytes(javax.xml.bind.DatatypeConverter.parseBase64Binary(v))
+ )
+ case _ => JsError("Bytes must be a String")
+ }
+ def writes(o: Bytes) = {
+ JsString(javax.xml.bind.DatatypeConverter.printBase64Binary(o.array))
+ }
+ }
+
+ implicit def EitherFormat[T: Format, V: Format] = new Format[Either[T, V]]{
+ def reads(json: JsValue) = json match{
+ case JsObject(struct) =>
+ (struct.get("type"), struct.get("value")) match{
+ case (Some(JsString("Left")), Some(v)) => implicitly[Reads[T]].reads(v).map(Left(_))
+ case (Some(JsString("Right")), Some(v)) => implicitly[Reads[V]].reads(v).map(Right(_))
+ case _ => JsError("Either object layout is unknown")
+ }
+ case _ => JsError("Either must be an Object")
+ }
+ def writes(o: Either[T, V]) = o match{
+ case Left(v) => Json.obj("type" -> "Left", "value" -> implicitly[Writes[T]].writes(v))
+ case Right(v) => Json.obj("type" -> "Right", "value" -> implicitly[Writes[V]].writes(v))
+ }
+ }
+
+ implicit val crFormat: Format[ammonite.ops.CommandResult] = Json.format
+ implicit val modFormat: Format[coursier.Module] = Json.format
+ // https://github.com/playframework/play-json/issues/120
+ // implicit val depFormat: Format[coursier.Dependency] = Json.format
+ implicit val depFormat: Format[coursier.Dependency] = new Format[coursier.Dependency] {
+ def writes(o: Dependency) = {
+ Json.obj(
+ "module" -> Json.toJson(o.module),
+ "version" -> Json.toJson(o.version),
+ "configuration" -> Json.toJson(o.configuration),
+ "exclusions" -> Json.toJson(o.exclusions),
+ "attributes" -> Json.toJson(o.attributes),
+ "optional" -> Json.toJson(o.optional),
+ "transitive" -> Json.toJson(o.transitive)
+ )
+ }
+
+ def reads(json: JsValue) = json match{
+ case x: JsObject =>
+ JsSuccess(coursier.Dependency(
+ Json.fromJson[coursier.Module](x.value("module")).get,
+ Json.fromJson[String](x.value("version")).get,
+ Json.fromJson[String](x.value("configuration")).get,
+ Json.fromJson[coursier.Attributes](x.value("attributes")).get,
+ Json.fromJson[Set[(String, String)]](x.value("exclusions")).get,
+ Json.fromJson[Boolean](x.value("optional")).get,
+ Json.fromJson[Boolean](x.value("transitive")).get
+ ))
+
+ case _ => JsError("Dep must be an object")
+ }
+ }
+ implicit val attrFormat: Format[coursier.Attributes] = Json.format
+}
diff --git a/core/src/main/scala/forge/util/Args.scala b/core/src/main/scala/forge/util/Args.scala
new file mode 100644
index 00000000..23102572
--- /dev/null
+++ b/core/src/main/scala/forge/util/Args.scala
@@ -0,0 +1,9 @@
+package forge.util
+
+class Args(val args: IndexedSeq[_], val dest: ammonite.ops.Path){
+ def length = args.length
+ def apply[T](index: Int): T = {
+ if (index >= 0 && index < args.length) args(index).asInstanceOf[T]
+ else throw new IndexOutOfBoundsException(s"Index $index outside of range 0 - ${args.length}")
+ }
+}
diff --git a/core/src/main/scala/forge/util/Labelled.scala b/core/src/main/scala/forge/util/Labelled.scala
new file mode 100644
index 00000000..a79d2d93
--- /dev/null
+++ b/core/src/main/scala/forge/util/Labelled.scala
@@ -0,0 +1,8 @@
+package forge.util
+
+import forge.Target
+import play.api.libs.json.Format
+
+case class Labelled[T](target: Target[T],
+ format: Format[T],
+ segments: Seq[String])
diff --git a/core/src/main/scala/forge/util/MultiBiMap.scala b/core/src/main/scala/forge/util/MultiBiMap.scala
new file mode 100644
index 00000000..cb6ff280
--- /dev/null
+++ b/core/src/main/scala/forge/util/MultiBiMap.scala
@@ -0,0 +1,47 @@
+package forge.util
+
+import scala.collection.mutable
+
+/**
+ * A map from keys to collections of values: you can assign multiple values
+ * to any particular key. Also allows lookups in both directions: what values
+ * are assigned to a key or what key a value is assigned ti.
+ */
+trait MultiBiMap[K, V]{
+ def containsValue(v: V): Boolean
+ def lookupKey(k: K): OSet[V]
+ def lookupValue(v: V): K
+ def lookupValueOpt(v: V): Option[K]
+ def add(k: K, v: V): Unit
+ def removeAll(k: K): OSet[V]
+ def addAll(k: K, vs: TraversableOnce[V]): Unit
+ def keys(): Iterator[K]
+ def values(): Iterator[OSet[V]]
+}
+object MultiBiMap{
+ class Mutable[K, V]() extends MultiBiMap[K, V]{
+ private[this] val valueToKey = mutable.LinkedHashMap.empty[V, K]
+ private[this] val keyToValues = mutable.LinkedHashMap.empty[K, OSet.Mutable[V]]
+ def containsValue(v: V) = valueToKey.contains(v)
+ def lookupKey(k: K) = keyToValues(k)
+ def lookupValue(v: V) = valueToKey(v)
+ def lookupValueOpt(v: V) = valueToKey.get(v)
+ def add(k: K, v: V): Unit = {
+ valueToKey(v) = k
+ keyToValues.getOrElseUpdate(k, new OSet.Mutable[V]()).append(v)
+ }
+ def removeAll(k: K): OSet[V] = keyToValues.get(k) match {
+ case None => OSet()
+ case Some(vs) =>
+ vs.foreach(valueToKey.remove)
+
+ keyToValues.remove(k)
+ vs
+ }
+ def addAll(k: K, vs: TraversableOnce[V]): Unit = vs.foreach(this.add(k, _))
+
+ def keys() = keyToValues.keysIterator
+
+ def values() = keyToValues.valuesIterator
+ }
+}
diff --git a/core/src/main/scala/forge/util/OSet.scala b/core/src/main/scala/forge/util/OSet.scala
new file mode 100644
index 00000000..43743cdc
--- /dev/null
+++ b/core/src/main/scala/forge/util/OSet.scala
@@ -0,0 +1,107 @@
+package forge.util
+
+
+import play.api.libs.json._
+
+import scala.collection.mutable
+
+/**
+ * A collection with enforced uniqueness, fast contains and deterministic
+ * ordering. Raises an exception if a duplicate is found; call
+ * `toSeq.distinct` if you explicitly want to make it swallow duplicates
+ */
+trait OSet[V] extends TraversableOnce[V]{
+ def contains(v: V): Boolean
+ def items: IndexedSeq[V]
+ def flatMap[T](f: V => TraversableOnce[T]): OSet[T]
+ def map[T](f: V => T): OSet[T]
+ def filter(f: V => Boolean): OSet[V]
+ def collect[T](f: PartialFunction[V, T]): OSet[T]
+ def zipWithIndex: OSet[(V, Int)]
+ def reverse: OSet[V]
+}
+
+object OSet{
+ implicit def jsonFormat[T: Format]: Format[OSet[T]] = new Format[OSet[T]] {
+ def writes(o: OSet[T]) = JsArray(o.items.map(implicitly[Format[T]].writes))
+
+ def reads(json: JsValue) = json match{
+ case x: JsArray => implicitly[Format[Seq[T]]].reads(x).map(OSet.from)
+ case _ => JsError("OSet needs to be an Array")
+ }
+ }
+ def apply[V](items: V*) = from(items)
+
+ def from[V](items: TraversableOnce[V]): OSet[V] = {
+ val set = new OSet.Mutable[V]()
+ items.foreach(set.append)
+ set
+ }
+
+
+ class Mutable[V]() extends OSet[V]{
+
+ private[this] val set0 = mutable.LinkedHashSet.empty[V]
+ def contains(v: V) = set0.contains(v)
+ def append(v: V) = if (!contains(v)){
+ set0.add(v)
+
+ }else {
+ throw new Exception("Duplicated item inserted into OrderedSet: " + v)
+ }
+ def appendAll(vs: Seq[V]) = vs.foreach(append)
+ def items: IndexedSeq[V] = set0.toIndexedSeq
+ def set: collection.Set[V] = set0
+
+ def map[T](f: V => T): OSet[T] = {
+ val output = new OSet.Mutable[T]
+ for(i <- items) output.append(f(i))
+ output
+ }
+ def flatMap[T](f: V => TraversableOnce[T]): OSet[T] = {
+ val output = new OSet.Mutable[T]
+ for(i <- items) for(i0 <- f(i)) output.append(i0)
+ output
+ }
+ def filter(f: V => Boolean): OSet[V] = {
+ val output = new OSet.Mutable[V]
+ for(i <- items) if (f(i)) output.append(i)
+ output
+ }
+
+ def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x))
+
+ def zipWithIndex = {
+ var i = 0
+ this.map{ x =>
+ i += 1
+ (x, i-1)
+ }
+ }
+
+ def reverse = OSet.from(items.reverseIterator)
+
+ // Members declared in scala.collection.GenTraversableOnce
+ def isTraversableAgain: Boolean = items.isTraversableAgain
+ def toIterator: Iterator[V] = items.toIterator
+ def toStream: Stream[V] = items.toStream
+
+ // Members declared in scala.collection.TraversableOnce
+ def copyToArray[B >: V](xs: Array[B],start: Int,len: Int): Unit = items.copyToArray(xs, start, len)
+ def exists(p: V => Boolean): Boolean = items.exists(p)
+ def find(p: V => Boolean): Option[V] = items.find(p)
+ def forall(p: V => Boolean): Boolean = items.forall(p)
+ def foreach[U](f: V => U): Unit = items.foreach(f)
+ def hasDefiniteSize: Boolean = items.hasDefiniteSize
+ def isEmpty: Boolean = items.isEmpty
+ def seq: scala.collection.TraversableOnce[V] = items
+ def toTraversable: Traversable[V] = items
+
+ override def hashCode() = items.hashCode()
+ override def equals(other: Any) = other match{
+ case s: OSet[_] => items.equals(s.items)
+ case _ => super.equals(other)
+ }
+ override def toString = items.mkString("OSet(", ", ", ")")
+ }
+}
diff --git a/core/src/main/scala/forge/util/PathRef.scala b/core/src/main/scala/forge/util/PathRef.scala
new file mode 100644
index 00000000..dbe1ebbd
--- /dev/null
+++ b/core/src/main/scala/forge/util/PathRef.scala
@@ -0,0 +1,57 @@
+package forge
+package util
+
+import java.io.IOException
+import java.nio.file.{FileVisitResult, FileVisitor}
+import java.nio.file.attribute.BasicFileAttributes
+import java.security.MessageDigest
+import java.nio.{file => jnio}
+import play.api.libs.json.{Format, Json}
+
+
+/**
+ * A wrapper around `ammonite.ops.Path` that calculates it's hashcode based
+ * on the contents of the filesystem underneath it. Used to ensure filesystem
+ * changes can bust caches which are keyed off hashcodes.
+ */
+case class PathRef(path: ammonite.ops.Path){
+ val md5Hash = {
+ val digest = MessageDigest.getInstance("MD5")
+
+ val buffer = new Array[Byte](16 * 1024)
+ jnio.Files.walkFileTree(
+ path.toNIO,
+ new FileVisitor[jnio.Path] {
+ def preVisitDirectory(dir: jnio.Path, attrs: BasicFileAttributes) = {
+ digest.update(dir.toAbsolutePath.toString.getBytes)
+ FileVisitResult.CONTINUE
+ }
+
+ def visitFile(file: jnio.Path, attrs: BasicFileAttributes) = {
+ digest.update(file.toAbsolutePath.toString.getBytes)
+ val is = jnio.Files.newInputStream(file)
+ def rec(): Unit = {
+ val length = is.read(buffer)
+ if (length != -1){
+ digest.update(buffer, 0, length)
+ rec()
+ }
+ }
+ rec()
+ FileVisitResult.CONTINUE
+ }
+
+ def visitFileFailed(file: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
+ def postVisitDirectory(dir: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
+ }
+ )
+
+ java.util.Arrays.hashCode(digest.digest())
+
+ }
+ override def hashCode() = md5Hash
+}
+
+object PathRef{
+ implicit def jsonFormatter: Format[PathRef] = Json.format
+}