summaryrefslogtreecommitdiff
path: root/src/main/scala
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2017-11-03 23:44:39 -0700
committerLi Haoyi <haoyi.sg@gmail.com>2017-11-03 23:44:39 -0700
commit13270145903b457c906a9fa77bd152afb6448ef5 (patch)
treee85b7ed530e0c8e3c3041cbf17641857c448b602 /src/main/scala
parent66f1c5c2438aeb8f2496575f52c25b09cf5793a6 (diff)
downloadmill-13270145903b457c906a9fa77bd152afb6448ef5.tar.gz
mill-13270145903b457c906a9fa77bd152afb6448ef5.tar.bz2
mill-13270145903b457c906a9fa77bd152afb6448ef5.zip
Split up forge into `scalaplugin` an `core` subprojects, to allow us to use the `T#apply` macro in the implementation of `scalaplugin.Subproject`
Also needed to implement inter-`Subproject` dependencies so the `MetacircularTests` can continue to support the new layout
Diffstat (limited to 'src/main/scala')
-rw-r--r--src/main/scala/forge/Discovered.scala57
-rw-r--r--src/main/scala/forge/Evaluator.scala191
-rw-r--r--src/main/scala/forge/Main.scala25
-rw-r--r--src/main/scala/forge/Target.scala133
-rw-r--r--src/main/scala/forge/Tarjans.scala50
-rw-r--r--src/main/scala/forge/package.scala99
-rw-r--r--src/main/scala/forge/scalaplugin/Subproject.scala146
-rw-r--r--src/main/scala/forge/util/Args.scala9
-rw-r--r--src/main/scala/forge/util/Labelled.scala8
-rw-r--r--src/main/scala/forge/util/MultiBiMap.scala47
-rw-r--r--src/main/scala/forge/util/OSet.scala107
-rw-r--r--src/main/scala/forge/util/PathRef.scala57
12 files changed, 0 insertions, 929 deletions
diff --git a/src/main/scala/forge/Discovered.scala b/src/main/scala/forge/Discovered.scala
deleted file mode 100644
index a1a9aa65..00000000
--- a/src/main/scala/forge/Discovered.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-package forge
-
-import forge.util.Labelled
-import play.api.libs.json.Format
-
-import language.experimental.macros
-import reflect.macros.blackbox.Context
-
-class Discovered[T](val value: Seq[(Seq[String], Format[_], T => Target[_])]){
- def apply(t: T) = value.map{case (a, f, b) => (a, f, b(t)) }
-
-}
-object Discovered {
- def makeTuple[T, V](path: Seq[String], func: T => Target[V])(implicit f: Format[V]) = {
- (path, f, func)
- }
-
-
- def mapping[T: Discovered](t: T): Map[Target[_], Labelled[_]] = {
- implicitly[Discovered[T]].apply(t)
- .map(x => x._3 -> Labelled(x._3, x._2.asInstanceOf[Format[Any]], x._1))
- .toMap
- }
-
- implicit def apply[T]: Discovered[T] = macro applyImpl[T]
-
- def applyImpl[T: c.WeakTypeTag](c: Context): c.Expr[Discovered[T]] = {
- import c.universe._
- val tpe = c.weakTypeTag[T].tpe
- def rec(segments: List[String], t: c.Type): Seq[Seq[String]] = for {
- m <- t.members.toSeq
- if m.isTerm && (m.asTerm.isGetter || m.asTerm.isLazy) || m.isModule
- res <- {
- val extendedSegments = m.name.toString :: segments
- val self =
- if (m.typeSignature.resultType <:< c.weakTypeOf[Target[_]]) Seq(extendedSegments)
- else Nil
- val children = rec(extendedSegments, m.typeSignature)
- self ++ children
- }
- } yield res
-
- val reversedPaths = rec(Nil, tpe)
-
- val result = for(reversedPath <- reversedPaths.toList) yield {
- val base = q"${TermName(c.freshName())}"
- val segments = reversedPath.reverse.toList
- val ident = segments.foldLeft[Tree](base)((prefix, name) =>
- q"$prefix.${TermName(name)}"
- )
-
- q"forge.Discovered.makeTuple($segments, ($base: $tpe) => $ident)"
- }
-
- c.Expr[Discovered[T]](q"new _root_.forge.Discovered($result)")
- }
-}
diff --git a/src/main/scala/forge/Evaluator.scala b/src/main/scala/forge/Evaluator.scala
deleted file mode 100644
index 50dc46d4..00000000
--- a/src/main/scala/forge/Evaluator.scala
+++ /dev/null
@@ -1,191 +0,0 @@
-package forge
-
-
-import play.api.libs.json.{Format, JsValue, Json}
-
-import scala.collection.mutable
-import ammonite.ops._
-import forge.util.{Args, Labelled, MultiBiMap, OSet}
-class Evaluator(workspacePath: Path,
- labeling: Map[Target[_], Labelled[_]]){
-
- def evaluate(targets: OSet[Target[_]]): Evaluator.Results = {
- mkdir(workspacePath)
-
- val sortedGroups = Evaluator.groupAroundNamedTargets(
- Evaluator.topoSortedTransitiveTargets(targets),
- labeling
- )
-
- val evaluated = new OSet.Mutable[Target[_]]
- val results = mutable.LinkedHashMap.empty[Target[_], Any]
-
- for (groupIndex <- sortedGroups.keys()){
- val group = sortedGroups.lookupKey(groupIndex)
-
- val (newResults, newEvaluated) = evaluateGroupCached(
- group,
- results,
- sortedGroups
- )
- evaluated.appendAll(newEvaluated)
- for((k, v) <- newResults) results.put(k, v)
-
- }
-
- Evaluator.Results(targets.items.map(results), evaluated)
- }
-
- def evaluateGroupCached(group: OSet[Target[_]],
- results: collection.Map[Target[_], Any],
- sortedGroups: MultiBiMap[Int, Target[_]]): (collection.Map[Target[_], Any], Seq[Target[_]]) = {
-
-
- val (externalInputs, terminals) = partitionGroupInputOutput(group, results)
-
- val inputsHash =
- externalInputs.toIterator.map(results).toVector.hashCode +
- group.toIterator.map(_.sideHash).toVector.hashCode()
-
- val primeLabel = labeling(terminals.items(0)).segments
-
-
- val targetDestPath = workspacePath / primeLabel
- val metadataPath = targetDestPath / up / (targetDestPath.last + ".forge.json")
-
- val cached = for{
- json <- scala.util.Try(Json.parse(read.getInputStream(metadataPath))).toOption
- (cachedHash, terminalResults) <- Json.fromJson[(Int, Seq[JsValue])](json).asOpt
- if cachedHash == inputsHash
- } yield terminalResults
-
- cached match{
- case Some(terminalResults) =>
- val newResults = mutable.LinkedHashMap.empty[Target[_], Any]
- for((terminal, res) <- terminals.items.zip(terminalResults)){
- newResults(terminal) = labeling(terminal).format.reads(res).get
- }
- (newResults, Nil)
-
- case _ =>
- val (newResults, newEvaluated, terminalResults) = evaluateGroup(group, results, targetDestPath)
-
- write.over(
- metadataPath,
- Json.prettyPrint(
- Json.toJson(inputsHash -> terminals.toList.map(terminalResults))
- ),
- )
-
- (newResults, newEvaluated)
- }
- }
-
- def partitionGroupInputOutput(group: OSet[Target[_]],
- results: collection.Map[Target[_], Any]) = {
- val allInputs = group.items.flatMap(_.inputs)
- val (internalInputs, externalInputs) = allInputs.partition(group.contains)
- val internalInputSet = internalInputs.toSet
- val terminals = group.filter(!internalInputSet(_))
- (OSet.from(externalInputs.distinct), terminals)
- }
-
- def evaluateGroup(group: OSet[Target[_]],
- results: collection.Map[Target[_], Any],
- targetDestPath: Path) = {
-
- rm(targetDestPath)
- val terminalResults = mutable.LinkedHashMap.empty[Target[_], JsValue]
- val newEvaluated = mutable.Buffer.empty[Target[_]]
- val newResults = mutable.LinkedHashMap.empty[Target[_], Any]
- for (target <- group.items) {
- newEvaluated.append(target)
- val targetInputValues = target.inputs.toVector.map(x =>
- newResults.getOrElse(x, results(x))
- )
-
- val args = new Args(targetInputValues, targetDestPath)
- val res = target.evaluate(args)
- for(targetLabel <- labeling.get(target)){
- terminalResults(target) = targetLabel
- .format
- .asInstanceOf[Format[Any]]
- .writes(res.asInstanceOf[Any])
- }
- newResults(target) = res
- }
-
- (newResults, newEvaluated, terminalResults)
- }
-
-}
-
-
-object Evaluator{
- class TopoSorted private[Evaluator] (val values: OSet[Target[_]])
- case class Results(values: Seq[Any], evaluated: OSet[Target[_]])
- def groupAroundNamedTargets(topoSortedTargets: TopoSorted,
- labeling: Map[Target[_], Labelled[_]]): MultiBiMap[Int, Target[_]] = {
-
- val grouping = new MultiBiMap.Mutable[Int, Target[_]]()
-
- var groupCount = 0
-
- for(target <- topoSortedTargets.values.items.reverseIterator){
- if (!grouping.containsValue(target)){
- grouping.add(groupCount, target)
- groupCount += 1
- }
-
- val targetGroup = grouping.lookupValue(target)
- for(upstream <- target.inputs){
- grouping.lookupValueOpt(upstream) match{
- case None if !labeling.contains(upstream) =>
- grouping.add(targetGroup, upstream)
- case Some(upstreamGroup) if upstreamGroup == targetGroup =>
- val upstreamTargets = grouping.removeAll(upstreamGroup)
-
- grouping.addAll(targetGroup, upstreamTargets)
- case _ => //donothing
- }
- }
- }
-
- val targetOrdering = topoSortedTargets.values.items.zipWithIndex.toMap
- val output = new MultiBiMap.Mutable[Int, Target[_]]
-
- // Sort groups amongst themselves, and sort the contents of each group
- // before aggregating it into the final output
- for(g <- grouping.values().toArray.sortBy(g => targetOrdering(g.items(0)))){
- output.addAll(output.keys.length, g.toArray.sortBy(targetOrdering))
- }
- output
- }
-
- /**
- * Takes the given targets, finds all the targets they transitively depend
- * on, and sort them topologically. Fails if there are dependency cycles
- */
- def topoSortedTransitiveTargets(sourceTargets: OSet[Target[_]]): TopoSorted = {
- val transitiveTargets = new OSet.Mutable[Target[_]]
- def rec(t: Target[_]): Unit = {
- if (transitiveTargets.contains(t)) () // do nothing
- else {
- transitiveTargets.append(t)
- t.inputs.foreach(rec)
- }
- }
-
- sourceTargets.items.foreach(rec)
- val targetIndices = transitiveTargets.items.zipWithIndex.toMap
-
- val numberedEdges =
- for(t <- transitiveTargets.items)
- yield t.inputs.map(targetIndices)
-
- val sortedClusters = Tarjans(numberedEdges)
- val nonTrivialClusters = sortedClusters.filter(_.length > 1)
- assert(nonTrivialClusters.isEmpty, nonTrivialClusters)
- new TopoSorted(OSet.from(sortedClusters.flatten.map(transitiveTargets.items)))
- }
-} \ No newline at end of file
diff --git a/src/main/scala/forge/Main.scala b/src/main/scala/forge/Main.scala
deleted file mode 100644
index d919d0e2..00000000
--- a/src/main/scala/forge/Main.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package forge
-
-import ammonite.ops._
-import ammonite.util.{Name, Res}
-import forge.util.OSet
-
-
-object Main {
- def main(args: Array[String]): Unit = {
-
- ammonite.Main().instantiateInterpreter() match{
- case Right(interp) =>
- val result = ammonite.main.Scripts.runScript(pwd, Path(args(0), pwd), interp, Nil)
-
- val (obj, discovered) = result.asInstanceOf[Res.Success[(Any, forge.Discovered[Any])]].s
- val mapping = Discovered.mapping(obj)(discovered)
- val workspacePath = pwd / 'target / 'javac
- val evaluator = new Evaluator(workspacePath, mapping)
- val evaluated = evaluator.evaluate(OSet.from(mapping.keys)).evaluated.filter(mapping.contains)
- (result, interp.watchedFiles)
- case Left(problems) => problems
- }
- }
-
-}
diff --git a/src/main/scala/forge/Target.scala b/src/main/scala/forge/Target.scala
deleted file mode 100644
index 6a465a92..00000000
--- a/src/main/scala/forge/Target.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-package forge
-
-
-import ammonite.ops.{ls, mkdir}
-import forge.util.{Args, PathRef}
-import play.api.libs.json.{Format, JsValue, Json}
-
-import scala.annotation.compileTimeOnly
-import language.experimental.macros
-import reflect.macros.blackbox.Context
-import scala.collection.mutable
-
-abstract class Target[T] extends Target.Ops[T]{
- /**
- * What other Targets does this Target depend on?
- */
- val inputs: Seq[Target[_]]
-
- /**
- * Evaluate this target
- */
- def evaluate(args: Args): T
-
- /**
- * Even if this target's inputs did not change, does it need to re-evaluate
- * anyway?
- */
- def sideHash: Int = 0
-
- @compileTimeOnly("Target#apply() can only be used with a T{...} block")
- def apply(): T = ???
-}
-
-object Target{
- class Target0[T](t: T) extends Target[T]{
- lazy val t0 = t
- val inputs = Nil
- def evaluate(args: Args) = t0
- }
- class Target1[T](t: => Target[T]) extends Target[T]{
- lazy val t0 = t
- lazy val inputs = t0.inputs
- def evaluate(args: Args) = t0.evaluate(args)
- }
- implicit def toTarget[T](t: T): Target[T] = new Target0(t)
- implicit def apply[T](t: => Target[T]): Target[T] = new Target1(t)
- def raw[T](t: T): Target[T] = macro impl[T]
- def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Target[T]] = {
- import c.universe._
- val bound = collection.mutable.Buffer.empty[(c.Tree, Symbol)]
- val OptionGet = c.universe.typeOf[Target[_]].member(TermName("apply"))
- object transformer extends c.universe.Transformer {
- // Derived from @olafurpg's
- // https://gist.github.com/olafurpg/596d62f87bf3360a29488b725fbc7608
- override def transform(tree: c.Tree): c.Tree = tree match {
- case t @ q"$fun.apply()" if t.symbol == OptionGet =>
- val tempName = c.freshName(TermName("tmp"))
- val tempSym = c.internal.newTermSymbol(c.internal.enclosingOwner, tempName)
- c.internal.setInfo(tempSym, t.tpe)
- val tempIdent = Ident(tempSym)
- c.internal.setType(tempIdent, t.tpe)
- bound.append((fun, tempSym))
- tempIdent
- case _ => super.transform(tree)
- }
- }
- val transformed = transformer.transform(t.tree)
- val (exprs, symbols) = bound.unzip
-
- val bindings = symbols.map(c.internal.valDef(_))
-
- val embedded = q"forge.zipMap(..$exprs){ (..$bindings) => $transformed }"
-
- c.Expr[Target[T]](embedded)
- }
-
- abstract class Ops[T]{ this: Target[T] =>
- def map[V](f: T => V) = new Target.Mapped(this, f)
-
- def filter(f: T => Boolean) = this
- def withFilter(f: T => Boolean) = this
- def zip[V](other: Target[V]) = new Target.Zipped(this, other)
-
- }
-
- def traverse[T](source: Seq[Target[T]]) = {
- new Traverse[T](source)
- }
- class Traverse[T](val inputs: Seq[Target[T]]) extends Target[Seq[T]]{
- def evaluate(args: Args) = {
- for (i <- 0 until args.length)
- yield args(i).asInstanceOf[T]
- }
-
- }
- class Mapped[T, V](source: Target[T], f: T => V) extends Target[V]{
- def evaluate(args: Args) = f(args(0))
- val inputs = List(source)
- }
- class Zipped[T, V](source1: Target[T],
- source2: Target[V]) extends Target[(T, V)]{
- def evaluate(args: Args) = (args(0), args(1))
- val inputs = List(source1, source2)
- }
-
- def path(path: ammonite.ops.Path) = new Path(path)
- class Path(path: ammonite.ops.Path) extends Target[PathRef]{
- def handle = PathRef(path)
- def evaluate(args: Args) = handle
- override def sideHash = handle.hashCode()
- val inputs = Nil
- }
-
- class Subprocess(val inputs: Seq[Target[_]],
- command: Args => Seq[String]) extends Target[Subprocess.Result] {
-
- def evaluate(args: Args) = {
- mkdir(args.dest)
- import ammonite.ops._
- implicit val path = ammonite.ops.Path(args.dest, pwd)
- val toTarget = () // Shadow the implicit conversion :/
- val output = %%(command(args))
- assert(output.exitCode == 0)
- Subprocess.Result(output, PathRef(args.dest))
- }
- }
- object Subprocess{
- case class Result(result: ammonite.ops.CommandResult, dest: PathRef)
- object Result{
- implicit val tsFormat: Format[Target.Subprocess.Result] = Json.format
- }
- }
-}
diff --git a/src/main/scala/forge/Tarjans.scala b/src/main/scala/forge/Tarjans.scala
deleted file mode 100644
index 9831fe7f..00000000
--- a/src/main/scala/forge/Tarjans.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package forge
-
-import collection.mutable
-// Adapted from
-// https://github.com/indy256/codelibrary/blob/c52247216258e84aac442a23273b7d8306ef757b/java/src/SCCTarjan.java
-object Tarjans {
- def apply(graph0: Seq[Seq[Int]]): Seq[Seq[Int]] = {
- val graph = graph0.map(_.toArray).toArray
- val n = graph.length
- val visited = new Array[Boolean](n)
- val stack = mutable.ArrayBuffer.empty[Integer]
- var time = 0
- val lowlink = new Array[Int](n)
- val components = mutable.ArrayBuffer.empty[Seq[Int]]
-
-
- for (u <- 0 until n) {
- if (!visited(u)) dfs(u)
- }
-
- def dfs(u: Int): Unit = {
- lowlink(u) = time
- time += 1
- visited(u) = true
- stack.append(u)
- var isComponentRoot = true
- for (v <- graph(u)) {
- if (!visited(v)) dfs(v)
- if (lowlink(u) > lowlink(v)) {
- lowlink(u) = lowlink(v)
- isComponentRoot = false
- }
- }
- if (isComponentRoot) {
- val component = mutable.Buffer.empty[Int]
-
- var done = false
- while (!done) {
- val x = stack.last
- stack.remove(stack.length - 1)
- component.append(x)
- lowlink(x) = Integer.MAX_VALUE
- if (x == u) done = true
- }
- components.append(component)
- }
- }
- components
- }
-} \ No newline at end of file
diff --git a/src/main/scala/forge/package.scala b/src/main/scala/forge/package.scala
deleted file mode 100644
index 6a52d1a1..00000000
--- a/src/main/scala/forge/package.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-import play.api.libs.json._
-import ammonite.ops.{Bytes, Path}
-import coursier.Dependency
-import forge.util.Args
-package object forge {
-
- val T = Target
- type T[T] = Target[T]
- def zipMap[R]()(f: () => R) = T(f())
- def zipMap[A, R](a: T[A])(f: A => R) = a.map(f)
- def zipMap[A, B, R](a: T[A], b: T[B])(f: (A, B) => R) = zip(a, b).map(f.tupled)
- def zipMap[A, B, C, R](a: T[A], b: T[B], c: T[C])(f: (A, B, C) => R) = zip(a, b, c).map(f.tupled)
- def zipMap[A, B, C, D, R](a: T[A], b: T[B], c: T[C], d: T[D])(f: (A, B, C, D) => R) = zip(a, b, c, d).map(f.tupled)
- def zipMap[A, B, C, D, E, R](a: T[A], b: T[B], c: T[C], d: T[D], e: T[E])(f: (A, B, C, D, E) => R) = zip(a, b, c, d, e).map(f.tupled)
- def zip() = T(())
- def zip[A](a: T[A]) = a.map(Tuple1(_))
- def zip[A, B](a: T[A], b: T[B]) = a.zip(b)
- def zip[A, B, C](a: T[A], b: T[B], c: T[C]) = new T[(A, B, C)]{
- val inputs = Seq(a, b, c)
- def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2))
- }
- def zip[A, B, C, D](a: T[A], b: T[B], c: T[C], d: T[D]) = new T[(A, B, C, D)]{
- val inputs = Seq(a, b, c, d)
- def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2), args[D](3))
- }
- def zip[A, B, C, D, E](a: T[A], b: T[B], c: T[C], d: T[D], e: T[E]) = new T[(A, B, C, D, E)]{
- val inputs = Seq(a, b, c, d, e)
- def evaluate(args: Args) = (args[A](0), args[B](1), args[C](2), args[D](3), args[E](4))
- }
- implicit object pathFormat extends Format[ammonite.ops.Path]{
- def reads(json: JsValue) = json match{
- case JsString(v) => JsSuccess(Path(v))
- case _ => JsError("Paths must be a String")
- }
- def writes(o: Path) = JsString(o.toString)
- }
-
- implicit object bytesFormat extends Format[Bytes]{
- def reads(json: JsValue) = json match{
- case JsString(v) => JsSuccess(
- new Bytes(javax.xml.bind.DatatypeConverter.parseBase64Binary(v))
- )
- case _ => JsError("Bytes must be a String")
- }
- def writes(o: Bytes) = {
- JsString(javax.xml.bind.DatatypeConverter.printBase64Binary(o.array))
- }
- }
-
- implicit def EitherFormat[T: Format, V: Format] = new Format[Either[T, V]]{
- def reads(json: JsValue) = json match{
- case JsObject(struct) =>
- (struct.get("type"), struct.get("value")) match{
- case (Some(JsString("Left")), Some(v)) => implicitly[Reads[T]].reads(v).map(Left(_))
- case (Some(JsString("Right")), Some(v)) => implicitly[Reads[V]].reads(v).map(Right(_))
- case _ => JsError("Either object layout is unknown")
- }
- case _ => JsError("Either must be an Object")
- }
- def writes(o: Either[T, V]) = o match{
- case Left(v) => Json.obj("type" -> "Left", "value" -> implicitly[Writes[T]].writes(v))
- case Right(v) => Json.obj("type" -> "Right", "value" -> implicitly[Writes[V]].writes(v))
- }
- }
-
- implicit val crFormat: Format[ammonite.ops.CommandResult] = Json.format
- implicit val modFormat: Format[coursier.Module] = Json.format
- // https://github.com/playframework/play-json/issues/120
- // implicit val depFormat: Format[coursier.Dependency] = Json.format
- implicit val depFormat: Format[coursier.Dependency] = new Format[coursier.Dependency] {
- def writes(o: Dependency) = {
- Json.obj(
- "module" -> Json.toJson(o.module),
- "version" -> Json.toJson(o.version),
- "configuration" -> Json.toJson(o.configuration),
- "exclusions" -> Json.toJson(o.exclusions),
- "attributes" -> Json.toJson(o.attributes),
- "optional" -> Json.toJson(o.optional),
- "transitive" -> Json.toJson(o.transitive)
- )
- }
-
- def reads(json: JsValue) = json match{
- case x: JsObject =>
- JsSuccess(coursier.Dependency(
- Json.fromJson[coursier.Module](x.value("module")).get,
- Json.fromJson[String](x.value("version")).get,
- Json.fromJson[String](x.value("configuration")).get,
- Json.fromJson[coursier.Attributes](x.value("attributes")).get,
- Json.fromJson[Set[(String, String)]](x.value("exclusions")).get,
- Json.fromJson[Boolean](x.value("optional")).get,
- Json.fromJson[Boolean](x.value("transitive")).get
- ))
-
- case _ => JsError("Dep must be an object")
- }
- }
- implicit val attrFormat: Format[coursier.Attributes] = Json.format
-}
diff --git a/src/main/scala/forge/scalaplugin/Subproject.scala b/src/main/scala/forge/scalaplugin/Subproject.scala
deleted file mode 100644
index 93532ec9..00000000
--- a/src/main/scala/forge/scalaplugin/Subproject.scala
+++ /dev/null
@@ -1,146 +0,0 @@
-package forge
-package scalaplugin
-
-import java.io.File
-
-import ammonite.ops.{Path, ls, mkdir, pwd}
-import coursier.{Cache, Dependency, Fetch, MavenRepository, Module, Repository, Resolution}
-import forge.{Target => T}
-import forge.util.PathRef
-import sbt.internal.inc.{FreshCompilerCache, ScalaInstance, ZincUtil}
-import sbt.internal.util.{ConsoleOut, MainAppender}
-import sbt.util.LogExchange
-import xsbti.api.{ClassLike, DependencyContext}
-import xsbti.compile.DependencyChanges
-
-import scalaz.concurrent.Task
-object Subproject{
- def compileScala(scalaVersion: T[String],
- sources: T[PathRef],
- compileClasspath: T[Seq[PathRef]],
- outputPath: T[Path]): T[PathRef] = {
- for((scalaVersion, sources, compileClasspath, outputPath) <- zip(scalaVersion, sources, compileClasspath, outputPath))
- yield {
- val binaryScalaVersion = scalaVersion.split('.').dropRight(1).mkString(".")
- def grepJar(s: String) = {
- compileClasspath
- .find(_.path.toString.endsWith(s))
- .getOrElse(throw new Exception("Cannot find " + s))
- .path
- .toIO
- }
- val scalac = ZincUtil.scalaCompiler(
- new ScalaInstance(
- version = scalaVersion,
- loader = getClass.getClassLoader,
- libraryJar = grepJar(s"scala-library-$scalaVersion.jar"),
- compilerJar = grepJar(s"scala-compiler-$scalaVersion.jar"),
- allJars = compileClasspath.toArray.map(_.path.toIO),
- explicitActual = None
- ),
- grepJar(s"compiler-bridge_$binaryScalaVersion-1.0.3.jar")
- )
-
- mkdir(outputPath)
-
-
- scalac.apply(
- sources = ls.rec(sources.path).filter(_.isFile).map(_.toIO).toArray,
- changes = new DependencyChanges {
- def isEmpty = true
- def modifiedBinaries() = Array[File]()
- def modifiedClasses() = Array[String]()
- },
- classpath = compileClasspath.map(_.path.toIO).toArray,
- singleOutput = outputPath.toIO,
- options = Array(),
- callback = new xsbti.AnalysisCallback {
- def startSource(source: File) = ()
- def apiPhaseCompleted() = ()
- def enabled() = true
- def binaryDependency(onBinaryEntry: File, onBinaryClassName: String, fromClassName: String, fromSourceFile: File, context: DependencyContext) = ()
- def generatedNonLocalClass(source: File, classFile: File, binaryClassName: String, srcClassName: String) = ()
- def problem(what: String, pos: xsbti.Position, msg: String, severity: xsbti.Severity, reported: Boolean) = ()
- def dependencyPhaseCompleted() = ()
- def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext) = ()
- def generatedLocalClass(source: File, classFile: File) = ()
- def api(sourceFile: File, classApi: ClassLike) = ()
-
- def mainClass(sourceFile: File, className: String) = ()
- def usedName(className: String, name: String, useScopes: java.util.EnumSet[xsbti.UseScope]) = ()
- },
- maximumErrors = 10,
- cache = new FreshCompilerCache(),
- log = {
- val console = ConsoleOut.systemOut
- val consoleAppender = MainAppender.defaultScreen(console)
- val l = LogExchange.logger("Hello")
- LogExchange.unbindLoggerAppenders("Hello")
- LogExchange.bindLoggerAppenders("Hello", (consoleAppender -> sbt.util.Level.Warn) :: Nil)
- l
- }
- )
- PathRef(outputPath)
- }
- }
- def createJar(sourceDirs: T[Seq[PathRef]]) = ???
- def resolveDependencies(repositories: T[Seq[Repository]],
- deps: T[Seq[coursier.Dependency]]): T[Seq[PathRef]] = {
- for((repositories, deps) <- zip(repositories, deps)) yield {
- val start = Resolution(deps.toSet)
- val fetch = Fetch.from(repositories, Cache.fetch())
- val resolution = start.process.run(fetch).unsafePerformSync
- val localArtifacts: Seq[File] = Task.gatherUnordered(
- resolution.artifacts.map(Cache.file(_).run)
- ).unsafePerformSync.flatMap(_.toOption)
-
- localArtifacts.map(p => PathRef(Path(p)))
- }
- }
-}
-import Subproject._
-abstract class Subproject {
- val scalaVersion: T[String]
-
- val scalaBinaryVersion = T{ scalaVersion.map(_.split('.').dropRight(1).mkString(".")) }
- val deps = T{ Seq[coursier.Dependency]() }
- val compileDeps = T{ Seq[coursier.Dependency]() }
- val runDeps = T{ Seq[coursier.Dependency]() }
- val basePath: T[Path]
-
- val repositories: Seq[Repository] = Seq(
- Cache.ivy2Local,
- MavenRepository("https://repo1.maven.org/maven2")
- )
-
- val compileDepClasspath: T[Seq[PathRef]] = T(
- resolveDependencies(
- repositories,
- for((scalaVersion, scalaBinaryVersion, compileDeps, deps) <- zip(scalaVersion, scalaBinaryVersion, compileDeps, deps))
- yield deps ++ compileDeps ++ Seq(
- Dependency(Module("org.scala-lang", "scala-compiler"), scalaVersion),
- Dependency(Module("org.scala-sbt", s"compiler-bridge_$scalaBinaryVersion"), "1.0.3")
- )
- )
- )
- val runDepClasspath: T[Seq[PathRef]] = T(
- resolveDependencies(
- repositories,
- for((scalaVersion, runDeps, deps) <- zip(scalaVersion, runDeps, deps))
- yield deps ++ runDeps ++ Seq(
- Dependency(Module("org.scala-lang", "scala-library"), scalaVersion)
- )
- )
- )
-
- val sources = T{ basePath.map(p => PathRef(p / 'src)) }
- val outputPath = T{ basePath.map(p => p / 'out) }
- val resources = T{ basePath.map(p => PathRef(p / 'resources)) }
- val compiledPath = T{ outputPath.map(p => p / 'classpath) }
- val compiled = T{
- compileScala(scalaVersion, sources, compileDepClasspath, outputPath)
- }
-
- val classpath = T{ for((r, c) <- resources.zip(compiled)) yield Seq(r, c) }
-// val jar = T{ createJar(classpath) }
-}
diff --git a/src/main/scala/forge/util/Args.scala b/src/main/scala/forge/util/Args.scala
deleted file mode 100644
index 23102572..00000000
--- a/src/main/scala/forge/util/Args.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package forge.util
-
-class Args(val args: IndexedSeq[_], val dest: ammonite.ops.Path){
- def length = args.length
- def apply[T](index: Int): T = {
- if (index >= 0 && index < args.length) args(index).asInstanceOf[T]
- else throw new IndexOutOfBoundsException(s"Index $index outside of range 0 - ${args.length}")
- }
-}
diff --git a/src/main/scala/forge/util/Labelled.scala b/src/main/scala/forge/util/Labelled.scala
deleted file mode 100644
index a79d2d93..00000000
--- a/src/main/scala/forge/util/Labelled.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package forge.util
-
-import forge.Target
-import play.api.libs.json.Format
-
-case class Labelled[T](target: Target[T],
- format: Format[T],
- segments: Seq[String])
diff --git a/src/main/scala/forge/util/MultiBiMap.scala b/src/main/scala/forge/util/MultiBiMap.scala
deleted file mode 100644
index cb6ff280..00000000
--- a/src/main/scala/forge/util/MultiBiMap.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package forge.util
-
-import scala.collection.mutable
-
-/**
- * A map from keys to collections of values: you can assign multiple values
- * to any particular key. Also allows lookups in both directions: what values
- * are assigned to a key or what key a value is assigned ti.
- */
-trait MultiBiMap[K, V]{
- def containsValue(v: V): Boolean
- def lookupKey(k: K): OSet[V]
- def lookupValue(v: V): K
- def lookupValueOpt(v: V): Option[K]
- def add(k: K, v: V): Unit
- def removeAll(k: K): OSet[V]
- def addAll(k: K, vs: TraversableOnce[V]): Unit
- def keys(): Iterator[K]
- def values(): Iterator[OSet[V]]
-}
-object MultiBiMap{
- class Mutable[K, V]() extends MultiBiMap[K, V]{
- private[this] val valueToKey = mutable.LinkedHashMap.empty[V, K]
- private[this] val keyToValues = mutable.LinkedHashMap.empty[K, OSet.Mutable[V]]
- def containsValue(v: V) = valueToKey.contains(v)
- def lookupKey(k: K) = keyToValues(k)
- def lookupValue(v: V) = valueToKey(v)
- def lookupValueOpt(v: V) = valueToKey.get(v)
- def add(k: K, v: V): Unit = {
- valueToKey(v) = k
- keyToValues.getOrElseUpdate(k, new OSet.Mutable[V]()).append(v)
- }
- def removeAll(k: K): OSet[V] = keyToValues.get(k) match {
- case None => OSet()
- case Some(vs) =>
- vs.foreach(valueToKey.remove)
-
- keyToValues.remove(k)
- vs
- }
- def addAll(k: K, vs: TraversableOnce[V]): Unit = vs.foreach(this.add(k, _))
-
- def keys() = keyToValues.keysIterator
-
- def values() = keyToValues.valuesIterator
- }
-}
diff --git a/src/main/scala/forge/util/OSet.scala b/src/main/scala/forge/util/OSet.scala
deleted file mode 100644
index 43743cdc..00000000
--- a/src/main/scala/forge/util/OSet.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-package forge.util
-
-
-import play.api.libs.json._
-
-import scala.collection.mutable
-
-/**
- * A collection with enforced uniqueness, fast contains and deterministic
- * ordering. Raises an exception if a duplicate is found; call
- * `toSeq.distinct` if you explicitly want to make it swallow duplicates
- */
-trait OSet[V] extends TraversableOnce[V]{
- def contains(v: V): Boolean
- def items: IndexedSeq[V]
- def flatMap[T](f: V => TraversableOnce[T]): OSet[T]
- def map[T](f: V => T): OSet[T]
- def filter(f: V => Boolean): OSet[V]
- def collect[T](f: PartialFunction[V, T]): OSet[T]
- def zipWithIndex: OSet[(V, Int)]
- def reverse: OSet[V]
-}
-
-object OSet{
- implicit def jsonFormat[T: Format]: Format[OSet[T]] = new Format[OSet[T]] {
- def writes(o: OSet[T]) = JsArray(o.items.map(implicitly[Format[T]].writes))
-
- def reads(json: JsValue) = json match{
- case x: JsArray => implicitly[Format[Seq[T]]].reads(x).map(OSet.from)
- case _ => JsError("OSet needs to be an Array")
- }
- }
- def apply[V](items: V*) = from(items)
-
- def from[V](items: TraversableOnce[V]): OSet[V] = {
- val set = new OSet.Mutable[V]()
- items.foreach(set.append)
- set
- }
-
-
- class Mutable[V]() extends OSet[V]{
-
- private[this] val set0 = mutable.LinkedHashSet.empty[V]
- def contains(v: V) = set0.contains(v)
- def append(v: V) = if (!contains(v)){
- set0.add(v)
-
- }else {
- throw new Exception("Duplicated item inserted into OrderedSet: " + v)
- }
- def appendAll(vs: Seq[V]) = vs.foreach(append)
- def items: IndexedSeq[V] = set0.toIndexedSeq
- def set: collection.Set[V] = set0
-
- def map[T](f: V => T): OSet[T] = {
- val output = new OSet.Mutable[T]
- for(i <- items) output.append(f(i))
- output
- }
- def flatMap[T](f: V => TraversableOnce[T]): OSet[T] = {
- val output = new OSet.Mutable[T]
- for(i <- items) for(i0 <- f(i)) output.append(i0)
- output
- }
- def filter(f: V => Boolean): OSet[V] = {
- val output = new OSet.Mutable[V]
- for(i <- items) if (f(i)) output.append(i)
- output
- }
-
- def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x))
-
- def zipWithIndex = {
- var i = 0
- this.map{ x =>
- i += 1
- (x, i-1)
- }
- }
-
- def reverse = OSet.from(items.reverseIterator)
-
- // Members declared in scala.collection.GenTraversableOnce
- def isTraversableAgain: Boolean = items.isTraversableAgain
- def toIterator: Iterator[V] = items.toIterator
- def toStream: Stream[V] = items.toStream
-
- // Members declared in scala.collection.TraversableOnce
- def copyToArray[B >: V](xs: Array[B],start: Int,len: Int): Unit = items.copyToArray(xs, start, len)
- def exists(p: V => Boolean): Boolean = items.exists(p)
- def find(p: V => Boolean): Option[V] = items.find(p)
- def forall(p: V => Boolean): Boolean = items.forall(p)
- def foreach[U](f: V => U): Unit = items.foreach(f)
- def hasDefiniteSize: Boolean = items.hasDefiniteSize
- def isEmpty: Boolean = items.isEmpty
- def seq: scala.collection.TraversableOnce[V] = items
- def toTraversable: Traversable[V] = items
-
- override def hashCode() = items.hashCode()
- override def equals(other: Any) = other match{
- case s: OSet[_] => items.equals(s.items)
- case _ => super.equals(other)
- }
- override def toString = items.mkString("OSet(", ", ", ")")
- }
-}
diff --git a/src/main/scala/forge/util/PathRef.scala b/src/main/scala/forge/util/PathRef.scala
deleted file mode 100644
index dbe1ebbd..00000000
--- a/src/main/scala/forge/util/PathRef.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-package forge
-package util
-
-import java.io.IOException
-import java.nio.file.{FileVisitResult, FileVisitor}
-import java.nio.file.attribute.BasicFileAttributes
-import java.security.MessageDigest
-import java.nio.{file => jnio}
-import play.api.libs.json.{Format, Json}
-
-
-/**
- * A wrapper around `ammonite.ops.Path` that calculates it's hashcode based
- * on the contents of the filesystem underneath it. Used to ensure filesystem
- * changes can bust caches which are keyed off hashcodes.
- */
-case class PathRef(path: ammonite.ops.Path){
- val md5Hash = {
- val digest = MessageDigest.getInstance("MD5")
-
- val buffer = new Array[Byte](16 * 1024)
- jnio.Files.walkFileTree(
- path.toNIO,
- new FileVisitor[jnio.Path] {
- def preVisitDirectory(dir: jnio.Path, attrs: BasicFileAttributes) = {
- digest.update(dir.toAbsolutePath.toString.getBytes)
- FileVisitResult.CONTINUE
- }
-
- def visitFile(file: jnio.Path, attrs: BasicFileAttributes) = {
- digest.update(file.toAbsolutePath.toString.getBytes)
- val is = jnio.Files.newInputStream(file)
- def rec(): Unit = {
- val length = is.read(buffer)
- if (length != -1){
- digest.update(buffer, 0, length)
- rec()
- }
- }
- rec()
- FileVisitResult.CONTINUE
- }
-
- def visitFileFailed(file: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
- def postVisitDirectory(dir: jnio.Path, exc: IOException) = FileVisitResult.CONTINUE
- }
- )
-
- java.util.Arrays.hashCode(digest.digest())
-
- }
- override def hashCode() = md5Hash
-}
-
-object PathRef{
- implicit def jsonFormatter: Format[PathRef] = Json.format
-}