summaryrefslogtreecommitdiff
path: root/main/api
diff options
context:
space:
mode:
Diffstat (limited to 'main/api')
-rw-r--r--main/api/src/io/github/retronym/java9rtexport/Copy.java75
-rw-r--r--main/api/src/io/github/retronym/java9rtexport/Export.java103
-rw-r--r--main/api/src/mill/api/AggWrapper.scala120
-rw-r--r--main/api/src/mill/api/ClassLoader.scala63
-rw-r--r--main/api/src/mill/api/Ctx.scala55
-rw-r--r--main/api/src/mill/api/JsonFormatters.scala44
-rw-r--r--main/api/src/mill/api/Logger.scala41
-rw-r--r--main/api/src/mill/api/PathRef.scala122
-rw-r--r--main/api/src/mill/api/Result.scala36
9 files changed, 659 insertions, 0 deletions
diff --git a/main/api/src/io/github/retronym/java9rtexport/Copy.java b/main/api/src/io/github/retronym/java9rtexport/Copy.java
new file mode 100644
index 00000000..ac3615bb
--- /dev/null
+++ b/main/api/src/io/github/retronym/java9rtexport/Copy.java
@@ -0,0 +1,75 @@
+/*
+Copyright (C) 2012-2014 EPFL
+Copyright (C) 2012-2014 Typesafe, Inc.
+All rights reserved.
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package io.github.retronym.java9rtexport;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.file.*;
+import java.nio.file.attribute.*;
+import java.util.EnumSet;
+
+import static java.nio.file.StandardCopyOption.COPY_ATTRIBUTES;
+import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
+
+public class Copy {
+ public static void copyDirectory(final Path source, final Path target)
+ throws IOException {
+ Files.walkFileTree(source, EnumSet.of(FileVisitOption.FOLLOW_LINKS),
+ Integer.MAX_VALUE, new FileVisitor<Path>() {
+
+ @Override
+ public FileVisitResult preVisitDirectory(Path dir,
+ BasicFileAttributes sourceBasic) throws IOException {
+
+ String relative = source.relativize(dir).toString();
+ if (!Files.exists(target.getFileSystem().getPath(relative)))
+ Files.createDirectory(target.getFileSystem().getPath(relative));
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public FileVisitResult visitFile(Path file,
+ BasicFileAttributes attrs) throws IOException {
+ String relative = source.relativize(file).toString();
+ Files.copy(file, target.getFileSystem().getPath(relative), COPY_ATTRIBUTES, REPLACE_EXISTING);
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public FileVisitResult visitFileFailed(Path file, IOException e) throws IOException {
+ throw e;
+ }
+
+ @Override
+ public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException {
+ if (e != null) throw e;
+ return FileVisitResult.CONTINUE;
+ }
+ });
+ }
+
+} \ No newline at end of file
diff --git a/main/api/src/io/github/retronym/java9rtexport/Export.java b/main/api/src/io/github/retronym/java9rtexport/Export.java
new file mode 100644
index 00000000..b71ca555
--- /dev/null
+++ b/main/api/src/io/github/retronym/java9rtexport/Export.java
@@ -0,0 +1,103 @@
+/*
+Copyright (C) 2012-2014 EPFL
+Copyright (C) 2012-2014 Typesafe, Inc.
+All rights reserved.
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package io.github.retronym.java9rtexport;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.nio.file.*;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+public class Export {
+ private final static Object lock = new Object();
+ private static File tempFile = null;
+
+ public static String rtJarName = "rt-" + System.getProperty("java.version") + ".jar";
+
+ public static File rt() {
+ try {
+ synchronized (lock) {
+ if (tempFile == null) {
+ Path tempPath = Files.createTempFile("rt", ".jar");
+ tempFile = tempPath.toFile();
+ tempFile.deleteOnExit();
+ tempFile.delete();
+ FileSystem fileSystem = FileSystems.getFileSystem(URI.create("jrt:/"));
+ Path path = fileSystem.getPath("/modules");
+ URI uri = URI.create("jar:" + tempPath.toUri());
+ Map<String, String> env = new HashMap<>();
+ env.put("create", "true");
+ try (FileSystem zipfs = FileSystems.newFileSystem(uri, env)) {
+ Iterator<Path> iterator = Files.list(path).iterator();
+ while (iterator.hasNext()) {
+ Path next = iterator.next();
+ Copy.copyDirectory(next, zipfs.getPath("/"));
+ }
+ }
+ }
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ System.exit(-1);
+ }
+ return tempFile;
+ }
+
+ public static boolean rtTo(File dest, boolean verbose) {
+ try {
+ if (!dest.exists()) {
+ if (verbose) {
+ System.out.println("Copying Java " +
+ System.getProperty("java.version") +
+ " runtime jar to " +
+ dest.getParentFile() +
+ " ...");
+ System.out.flush();
+ }
+ dest.getParentFile().mkdirs();
+ java.nio.file.Files.copy(rt().toPath(), dest.toPath());
+ return true;
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ System.exit(-1);
+ }
+ return false;
+ }
+
+ public static File rtAt(File dir, boolean verbose) {
+ File f = new File(dir, rtJarName);
+ rtTo(f, verbose);
+ return f;
+ }
+
+ public static File rtAt(File dir) {
+ return rtAt(dir, false);
+ }
+} \ No newline at end of file
diff --git a/main/api/src/mill/api/AggWrapper.scala b/main/api/src/mill/api/AggWrapper.scala
new file mode 100644
index 00000000..98d46d68
--- /dev/null
+++ b/main/api/src/mill/api/AggWrapper.scala
@@ -0,0 +1,120 @@
+package mill.api
+
+
+
+import scala.collection.mutable
+object Strict extends AggWrapper(true)
+object Loose extends AggWrapper(false)
+
+sealed class AggWrapper(strictUniqueness: Boolean){
+ /**
+ * A collection with enforced uniqueness, fast contains and deterministic
+ * ordering. Raises an exception if a duplicate is found; call
+ * `toSeq.distinct` if you explicitly want to make it swallow duplicates
+ */
+ trait Agg[V] extends TraversableOnce[V]{
+ def contains(v: V): Boolean
+ def items: Iterator[V]
+ def indexed: IndexedSeq[V]
+ def flatMap[T](f: V => TraversableOnce[T]): Agg[T]
+ def map[T](f: V => T): Agg[T]
+ def filter(f: V => Boolean): Agg[V]
+ def withFilter(f: V => Boolean): Agg[V]
+ def collect[T](f: PartialFunction[V, T]): Agg[T]
+ def zipWithIndex: Agg[(V, Int)]
+ def reverse: Agg[V]
+ def zip[T](other: Agg[T]): Agg[(V, T)]
+ def ++[T >: V](other: TraversableOnce[T]): Agg[T]
+ def length: Int
+ }
+
+ object Agg{
+ def empty[V]: Agg[V] = new Agg.Mutable[V]
+ implicit def jsonFormat[T: upickle.default.ReadWriter]: upickle.default.ReadWriter[Agg[T]] =
+ upickle.default.readwriter[Seq[T]].bimap[Agg[T]](
+ _.toList,
+ Agg.from(_)
+ )
+
+ def apply[V](items: V*) = from(items)
+
+ implicit def from[V](items: TraversableOnce[V]): Agg[V] = {
+ val set = new Agg.Mutable[V]()
+ items.foreach(set.append)
+ set
+ }
+
+
+ class Mutable[V]() extends Agg[V]{
+
+ private[this] val set0 = mutable.LinkedHashSet.empty[V]
+ def contains(v: V) = set0.contains(v)
+ def append(v: V) = if (!contains(v)){
+ set0.add(v)
+
+ }else if (strictUniqueness){
+ throw new Exception("Duplicated item inserted into OrderedSet: " + v)
+ }
+ def appendAll(vs: Seq[V]) = vs.foreach(append)
+ def items = set0.iterator
+ def indexed: IndexedSeq[V] = items.toIndexedSeq
+ def set: collection.Set[V] = set0
+
+ def map[T](f: V => T): Agg[T] = {
+ val output = new Agg.Mutable[T]
+ for(i <- items) output.append(f(i))
+ output
+ }
+ def flatMap[T](f: V => TraversableOnce[T]): Agg[T] = {
+ val output = new Agg.Mutable[T]
+ for(i <- items) for(i0 <- f(i)) output.append(i0)
+ output
+ }
+ def filter(f: V => Boolean): Agg[V] = {
+ val output = new Agg.Mutable[V]
+ for(i <- items) if (f(i)) output.append(i)
+ output
+ }
+ def withFilter(f: V => Boolean): Agg[V] = filter(f)
+
+ def collect[T](f: PartialFunction[V, T]) = this.filter(f.isDefinedAt).map(x => f(x))
+
+ def zipWithIndex = {
+ var i = 0
+ this.map{ x =>
+ i += 1
+ (x, i-1)
+ }
+ }
+
+ def reverse = Agg.from(indexed.reverseIterator)
+
+ def zip[T](other: Agg[T]) = Agg.from(items.zip(other.items))
+ def ++[T >: V](other: TraversableOnce[T]) = Agg.from(items ++ other)
+ def length: Int = set0.size
+
+ // Members declared in scala.collection.GenTraversableOnce
+ def isTraversableAgain: Boolean = items.isTraversableAgain
+ def toIterator: Iterator[V] = items.toIterator
+ def toStream: Stream[V] = items.toStream
+
+ // Members declared in scala.collection.TraversableOnce
+ def copyToArray[B >: V](xs: Array[B], start: Int,len: Int): Unit = items.copyToArray(xs, start, len)
+ def exists(p: V => Boolean): Boolean = items.exists(p)
+ def find(p: V => Boolean): Option[V] = items.find(p)
+ def forall(p: V => Boolean): Boolean = items.forall(p)
+ def foreach[U](f: V => U): Unit = items.foreach(f)
+ def hasDefiniteSize: Boolean = items.hasDefiniteSize
+ def isEmpty: Boolean = items.isEmpty
+ def seq: scala.collection.TraversableOnce[V] = items
+ def toTraversable: Traversable[V] = items.toTraversable
+
+ override def hashCode() = items.map(_.hashCode()).sum
+ override def equals(other: Any) = other match{
+ case s: Agg[_] => items.sameElements(s.items)
+ case _ => super.equals(other)
+ }
+ override def toString = items.mkString("Agg(", ", ", ")")
+ }
+ }
+}
diff --git a/main/api/src/mill/api/ClassLoader.scala b/main/api/src/mill/api/ClassLoader.scala
new file mode 100644
index 00000000..198cbf6c
--- /dev/null
+++ b/main/api/src/mill/api/ClassLoader.scala
@@ -0,0 +1,63 @@
+package mill.api
+
+import java.net.{URL, URLClassLoader}
+
+
+import io.github.retronym.java9rtexport.Export
+
+import scala.util.Try
+
+object ClassLoader {
+ def java9OrAbove = !System.getProperty("java.specification.version").startsWith("1.")
+ def create(urls: Seq[URL],
+ parent: java.lang.ClassLoader)
+ (implicit ctx: Ctx.Home): URLClassLoader = {
+ create(urls, parent, _ => None)
+ }
+ def create(urls: Seq[URL],
+ parent: java.lang.ClassLoader,
+ customFindClass: String => Option[Class[_]])
+ (implicit ctx: Ctx.Home): URLClassLoader = {
+ new URLClassLoader(
+ makeUrls(urls).toArray,
+ refinePlatformParent(parent)
+ ) {
+ override def findClass(name: String): Class[_] = {
+ if (name.startsWith("com.sun.jna")) getClass.getClassLoader.loadClass(name)
+ else customFindClass(name).getOrElse(super.findClass(name))
+ }
+ }
+ }
+
+
+ /**
+ * Return `ClassLoader.getPlatformClassLoader` for java 9 and above, if parent class loader is null,
+ * otherwise return same parent class loader.
+ * More details: https://docs.oracle.com/javase/9/migrate/toc.htm#JSMIG-GUID-A868D0B9-026F-4D46-B979-901834343F9E
+ *
+ * `ClassLoader.getPlatformClassLoader` call is implemented via runtime reflection, cause otherwise
+ * mill could be compiled only with jdk 9 or above. We don't want to introduce this restriction now.
+ */
+ private def refinePlatformParent(parent: java.lang.ClassLoader): ClassLoader = {
+ if (!java9OrAbove || parent != null) parent
+ else {
+ // Make sure when `parent == null`, we only delegate java.* classes
+ // to the parent getPlatformClassLoader. This is necessary because
+ // in Java 9+, somehow the getPlatformClassLoader ends up with all
+ // sorts of other non-java stuff on it's classpath, which is not what
+ // we want for an "isolated" classloader!
+ classOf[ClassLoader]
+ .getMethod("getPlatformClassLoader")
+ .invoke(null)
+ .asInstanceOf[ClassLoader]
+ }
+ }
+
+ private def makeUrls(urls: Seq[URL])(implicit ctx: Ctx.Home): Seq[URL] = {
+ if (java9OrAbove) {
+ urls :+ Export.rtAt(ctx.home.toIO).toURI.toURL
+ } else {
+ urls
+ }
+ }
+}
diff --git a/main/api/src/mill/api/Ctx.scala b/main/api/src/mill/api/Ctx.scala
new file mode 100644
index 00000000..567da003
--- /dev/null
+++ b/main/api/src/mill/api/Ctx.scala
@@ -0,0 +1,55 @@
+package mill.api
+
+
+import scala.annotation.{StaticAnnotation, compileTimeOnly}
+import scala.language.implicitConversions
+
+object Ctx{
+ @compileTimeOnly("Target.ctx() can only be used with a T{...} block")
+ @ImplicitStub
+ implicit def taskCtx: Ctx = ???
+
+ object Dest {
+ implicit def pathToCtx(path: os.Path): Dest = new Dest { def dest = path }
+ }
+ trait Dest{
+ def dest: os.Path
+ }
+ trait Log{
+ def log: Logger
+ }
+ trait Home{
+ def home: os.Path
+ }
+ trait Env{
+ def env: Map[String, String]
+ }
+ object Log{
+ implicit def logToCtx(l: Logger): Log = new Log { def log = l }
+ }
+ trait Args{
+ def args: IndexedSeq[_]
+ }
+
+ def defaultHome = os.home / ".mill" / "ammonite"
+
+ class ImplicitStub extends StaticAnnotation
+}
+class Ctx(val args: IndexedSeq[_],
+ dest0: () => os.Path,
+ val log: Logger,
+ val home: os.Path,
+ val env : Map[String, String])
+ extends Ctx.Dest
+ with Ctx.Log
+ with Ctx.Args
+ with Ctx.Home
+ with Ctx.Env {
+
+ def dest = dest0()
+ def length = args.length
+ def apply[T](index: Int): T = {
+ if (index >= 0 && index < args.length) args(index).asInstanceOf[T]
+ else throw new IndexOutOfBoundsException(s"Index $index outside of range 0 - ${args.length}")
+ }
+}
diff --git a/main/api/src/mill/api/JsonFormatters.scala b/main/api/src/mill/api/JsonFormatters.scala
new file mode 100644
index 00000000..918fa693
--- /dev/null
+++ b/main/api/src/mill/api/JsonFormatters.scala
@@ -0,0 +1,44 @@
+package mill.api
+
+import upickle.default.{ReadWriter => RW}
+import scala.util.matching.Regex
+object JsonFormatters extends JsonFormatters
+trait JsonFormatters {
+ implicit val pathReadWrite: RW[os.Path] = upickle.default.readwriter[String]
+ .bimap[os.Path](
+ _.toString,
+ os.Path(_)
+ )
+
+ implicit val regexReadWrite: RW[Regex] = upickle.default.readwriter[String]
+ .bimap[Regex](
+ _.pattern.toString,
+ _.r
+ )
+
+ implicit val bytesReadWrite: RW[os.Bytes] = upickle.default.readwriter[String]
+ .bimap(
+ o => java.util.Base64.getEncoder.encodeToString(o.array),
+ str => new os.Bytes(java.util.Base64.getDecoder.decode(str))
+ )
+
+
+ implicit lazy val crFormat: RW[os.CommandResult] = upickle.default.macroRW
+
+ implicit val stackTraceRW = upickle.default.readwriter[ujson.Obj].bimap[StackTraceElement](
+ ste => ujson.Obj(
+ "declaringClass" -> ujson.Str(ste.getClassName),
+ "methodName" -> ujson.Str(ste.getMethodName),
+ "fileName" -> ujson.Str(ste.getFileName),
+ "lineNumber" -> ujson.Num(ste.getLineNumber)
+ ),
+ {case json: ujson.Obj =>
+ new StackTraceElement(
+ json("declaringClass").str.toString,
+ json("methodName").str.toString,
+ json("fileName").str.toString,
+ json("lineNumber").num.toInt
+ )
+ }
+ )
+}
diff --git a/main/api/src/mill/api/Logger.scala b/main/api/src/mill/api/Logger.scala
new file mode 100644
index 00000000..4ae6e74d
--- /dev/null
+++ b/main/api/src/mill/api/Logger.scala
@@ -0,0 +1,41 @@
+package mill.api
+
+import java.io._
+
+/**
+ * The standard logging interface of the Mill build tool.
+ *
+ * Contains these primary logging methods, in order of increasing importance:
+ *
+ * - `debug` : internal debug messages normally not shown to the user;
+ * mostly useful when debugging issues
+ *
+ * - `ticker`: short-lived logging output where consecutive lines over-write
+ * each other; useful for information which is transient and disposable
+ *
+ * - `info`: miscellaneous logging output which isn't part of the main output
+ * a user is looking for, but useful to provide context on what Mill is doing
+ *
+ * - `error`: logging output which represents problems the user should care
+ * about
+ *
+ *
+ * Also contains the two forwarded stdout and stderr streams, for code executed
+ * by Mill to use directly. Typically these correspond to the stdout and stderr,
+ * but when `show` is used both are forwarded to stderr and stdout is only
+ * used to display the final `show` output for easy piping.
+ */
+trait Logger {
+ def colored: Boolean
+
+ val errorStream: PrintStream
+ val outputStream: PrintStream
+ val inStream: InputStream
+
+ def info(s: String): Unit
+ def error(s: String): Unit
+ def ticker(s: String): Unit
+ def debug(s: String): Unit
+
+ def close(): Unit = ()
+}
diff --git a/main/api/src/mill/api/PathRef.scala b/main/api/src/mill/api/PathRef.scala
new file mode 100644
index 00000000..24f3627e
--- /dev/null
+++ b/main/api/src/mill/api/PathRef.scala
@@ -0,0 +1,122 @@
+package mill.api
+
+import java.io.IOException
+import java.nio.file.attribute.BasicFileAttributes
+import java.nio.file.{FileVisitResult, FileVisitor}
+import java.nio.{file => jnio}
+import java.security.{DigestOutputStream, MessageDigest}
+
+import upickle.default.{ReadWriter => RW}
+
+
+/**
+ * A wrapper around `os.Path` that calculates it's hashcode based
+ * on the contents of the filesystem underneath it. Used to ensure filesystem
+ * changes can bust caches which are keyed off hashcodes.
+ */
+case class PathRef(path: os.Path, quick: Boolean, sig: Int){
+ override def hashCode() = sig
+}
+
+object PathRef{
+ def apply(path: os.Path, quick: Boolean = false) = {
+ val sig = {
+ val digest = MessageDigest.getInstance("MD5")
+ val digestOut = new DigestOutputStream(DummyOutputStream, digest)
+ if (os.exists(path)){
+ for((path, attrs) <- os.walk.attrs(path, includeTarget = true, followLinks = true)){
+ digest.update(path.toString.getBytes)
+ if (!attrs.isDir) {
+ if (quick){
+ val value = (attrs.mtime, attrs.size).hashCode()
+ digest.update((value >>> 24).toByte)
+ digest.update((value >>> 16).toByte)
+ digest.update((value >>> 8).toByte)
+ digest.update(value.toByte)
+ } else if (jnio.Files.isReadable(path.toNIO)) {
+ val is = os.read.inputStream(path)
+ IO.stream(is, digestOut)
+ is.close()
+ }
+ }
+ }
+ }
+
+ java.util.Arrays.hashCode(digest.digest())
+
+ }
+ new PathRef(path, quick, sig)
+ }
+
+ implicit def jsonFormatter: RW[PathRef] = upickle.default.readwriter[String].bimap[PathRef](
+ p => {
+ (if (p.quick) "qref" else "ref") + ":" +
+ String.format("%08x", p.sig: Integer) + ":" +
+ p.path.toString()
+ },
+ s => {
+ val Array(prefix, hex, path) = s.split(":", 3)
+ PathRef(
+ os.Path(path),
+ prefix match{ case "qref" => true case "ref" => false},
+ // Parsing to a long and casting to an int is the only way to make
+ // round-trip handling of negative numbers work =(
+ java.lang.Long.parseLong(hex, 16).toInt
+ )
+ }
+ )
+}
+
+
+import java.io.{InputStream, OutputStream}
+
+/**
+ * Misc IO utilities, eventually probably should be pushed upstream into
+ * ammonite-ops
+ */
+object IO {
+ def stream(src: InputStream, dest: OutputStream) = {
+ val buffer = new Array[Byte](4096)
+ while ( {
+ src.read(buffer) match {
+ case -1 => false
+ case n =>
+ dest.write(buffer, 0, n)
+ true
+ }
+ }) ()
+ }
+
+
+ def unpackZip(src: os.Path, dest: os.RelPath = "unpacked")
+ (implicit ctx: Ctx.Dest) = {
+
+ val byteStream = os.read.inputStream(src)
+ val zipStream = new java.util.zip.ZipInputStream(byteStream)
+ while({
+ zipStream.getNextEntry match{
+ case null => false
+ case entry =>
+ if (!entry.isDirectory) {
+ val entryDest = ctx.dest / dest / os.RelPath(entry.getName)
+ os.makeDir.all(entryDest / os.up)
+ val fileOut = new java.io.FileOutputStream(entryDest.toString)
+ IO.stream(zipStream, fileOut)
+ fileOut.close()
+ }
+ zipStream.closeEntry()
+ true
+ }
+ })()
+ PathRef(ctx.dest / dest)
+ }
+}
+
+import java.io.{ByteArrayInputStream, OutputStream}
+
+object DummyInputStream extends ByteArrayInputStream(Array())
+object DummyOutputStream extends java.io.OutputStream{
+ override def write(b: Int) = ()
+ override def write(b: Array[Byte]) = ()
+ override def write(b: Array[Byte], off: Int, len: Int) = ()
+}
diff --git a/main/api/src/mill/api/Result.scala b/main/api/src/mill/api/Result.scala
new file mode 100644
index 00000000..b4071a99
--- /dev/null
+++ b/main/api/src/mill/api/Result.scala
@@ -0,0 +1,36 @@
+package mill.api
+
+sealed trait Result[+T]{
+ def map[V](f: T => V): Result[V]
+ def asSuccess: Option[Result.Success[T]] = None
+}
+object Result{
+ implicit def create[T](t: => T): Result[T] = {
+ try Success(t)
+ catch { case e: Throwable => Exception(e, new OuterStack(new java.lang.Exception().getStackTrace)) }
+ }
+ case class Success[+T](value: T) extends Result[T]{
+ def map[V](f: T => V) = Result.Success(f(value))
+ override def asSuccess = Some(this)
+ }
+ case object Skipped extends Result[Nothing]{
+ def map[V](f: Nothing => V) = this
+ }
+ sealed trait Failing[+T] extends Result[T]{
+ def map[V](f: T => V): Failing[V]
+ }
+ case class Failure[T](msg: String, value: Option[T] = None) extends Failing[T]{
+ def map[V](f: T => V) = Result.Failure(msg, value.map(f(_)))
+ }
+ case class Exception(throwable: Throwable, outerStack: OuterStack) extends Failing[Nothing]{
+ def map[V](f: Nothing => V) = this
+ }
+ class OuterStack(val value: Seq[StackTraceElement]){
+ override def hashCode() = value.hashCode()
+
+ override def equals(obj: scala.Any) = obj match{
+ case o: OuterStack => value.equals(o.value)
+ case _ => false
+ }
+ }
+} \ No newline at end of file