summaryrefslogtreecommitdiff
path: root/main
diff options
context:
space:
mode:
authorTobias Roeser <le.petit.fou@web.de>2019-02-19 15:59:19 +0100
committerTobias Roeser <le.petit.fou@web.de>2019-02-19 15:59:19 +0100
commitcbe92711ffb1b89445f4e2653a6197e494542fae (patch)
tree291b1ca88c4db99c6aec66cf14e5ab854dcd70ea /main
parent258523a8ca8f0eba557ee0118da2e6a2050185a1 (diff)
downloadmill-cbe92711ffb1b89445f4e2653a6197e494542fae.tar.gz
mill-cbe92711ffb1b89445f4e2653a6197e494542fae.tar.bz2
mill-cbe92711ffb1b89445f4e2653a6197e494542fae.zip
Documented some mill.api classes
Diffstat (limited to 'main')
-rw-r--r--main/api/src/mill/api/Ctx.scala59
-rw-r--r--main/api/src/mill/api/IO.scala70
-rw-r--r--main/api/src/mill/api/JsonFormatters.scala21
-rw-r--r--main/api/src/mill/api/PathRef.scala96
4 files changed, 146 insertions, 100 deletions
diff --git a/main/api/src/mill/api/Ctx.scala b/main/api/src/mill/api/Ctx.scala
index 567da003..4ccf5a7d 100644
--- a/main/api/src/mill/api/Ctx.scala
+++ b/main/api/src/mill/api/Ctx.scala
@@ -1,53 +1,72 @@
package mill.api
-
import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.implicitConversions
-object Ctx{
- @compileTimeOnly("Target.ctx() can only be used with a T{...} block")
+import os.Path
+
+/**
+ * Provides access to various resources in the context of a currently execution Target.
+ */
+object Ctx {
+ @compileTimeOnly("Target.ctx() / T.ctx() can only be used with a T{...} block")
@ImplicitStub
implicit def taskCtx: Ctx = ???
+ /** Access to the targets destination path. */
+ trait Dest {
+ def dest: os.Path
+ }
object Dest {
implicit def pathToCtx(path: os.Path): Dest = new Dest { def dest = path }
}
- trait Dest{
- def dest: os.Path
- }
- trait Log{
+
+ /** Access to the targets [[Logger]] instance. */
+ trait Log {
def log: Logger
}
- trait Home{
+ object Log {
+ implicit def logToCtx(l: Logger): Log = new Log { def log = l }
+ }
+
+ /** Access to the projects home path. */
+ trait Home {
def home: os.Path
}
- trait Env{
+
+ /** Access to the current system environment settings. */
+ trait Env {
def env: Map[String, String]
}
- object Log{
- implicit def logToCtx(l: Logger): Log = new Log { def log = l }
- }
- trait Args{
+
+ trait Args {
def args: IndexedSeq[_]
}
def defaultHome = os.home / ".mill" / "ammonite"
+ /**
+ * Marker annotation.
+ */
class ImplicitStub extends StaticAnnotation
}
-class Ctx(val args: IndexedSeq[_],
- dest0: () => os.Path,
- val log: Logger,
- val home: os.Path,
- val env : Map[String, String])
+
+
+class Ctx(
+ val args: IndexedSeq[_],
+ dest0: () => os.Path,
+ val log: Logger,
+ val home: os.Path,
+ val env: Map[String, String]
+)
extends Ctx.Dest
with Ctx.Log
with Ctx.Args
with Ctx.Home
with Ctx.Env {
- def dest = dest0()
- def length = args.length
+ def dest: Path = dest0()
+ def length: Int = args.length
def apply[T](index: Int): T = {
if (index >= 0 && index < args.length) args(index).asInstanceOf[T]
else throw new IndexOutOfBoundsException(s"Index $index outside of range 0 - ${args.length}")
diff --git a/main/api/src/mill/api/IO.scala b/main/api/src/mill/api/IO.scala
new file mode 100644
index 00000000..8fa5003d
--- /dev/null
+++ b/main/api/src/mill/api/IO.scala
@@ -0,0 +1,70 @@
+package mill.api
+
+import java.io.{InputStream, OutputStream}
+
+/**
+ * Misc IO utilities, eventually probably should be pushed upstream into
+ * ammonite-ops
+ */
+object IO {
+
+ /**
+ * Pump the data from the `src` stream into the `dest` stream.
+ */
+ def stream(src: InputStream, dest: OutputStream): Unit = {
+ val buffer = new Array[Byte](4096)
+ while ({
+ src.read(buffer) match {
+ case -1 => false
+ case n =>
+ dest.write(buffer, 0, n)
+ true
+ }
+ }) ()
+ }
+
+ /**
+ * Unpacks the given `src` path into the context specific destination directory.
+ * @param src The ZIP file
+ * @param dest The relative ouput folder under the context specifix destination directory.
+ * @param ctx The target context
+ * @return The [[PathRef]] to the unpacked folder.
+ */
+ def unpackZip(src: os.Path, dest: os.RelPath = "unpacked")(implicit ctx: Ctx.Dest): PathRef = {
+
+ val byteStream = os.read.inputStream(src)
+ val zipStream = new java.util.zip.ZipInputStream(byteStream)
+ while ({
+ zipStream.getNextEntry match {
+ case null => false
+ case entry =>
+ if (!entry.isDirectory) {
+ val entryDest = ctx.dest / dest / os.RelPath(entry.getName)
+ os.makeDir.all(entryDest / os.up)
+ val fileOut = new java.io.FileOutputStream(entryDest.toString)
+ IO.stream(zipStream, fileOut)
+ fileOut.close()
+ }
+ zipStream.closeEntry()
+ true
+ }
+ }) ()
+ PathRef(ctx.dest / dest)
+ }
+}
+
+import java.io.ByteArrayInputStream
+
+/**
+ * A dummy input stream containing an empty byte array.
+ */
+object DummyInputStream extends ByteArrayInputStream(Array())
+
+/**
+ * A dummy output stream that does nothing with what it consumes (think of it as `/dev/null`).
+ */
+object DummyOutputStream extends java.io.OutputStream {
+ override def write(b: Int): Unit = ()
+ override def write(b: Array[Byte]): Unit = ()
+ override def write(b: Array[Byte], off: Int, len: Int): Unit = ()
+}
diff --git a/main/api/src/mill/api/JsonFormatters.scala b/main/api/src/mill/api/JsonFormatters.scala
index 918fa693..23d7fad0 100644
--- a/main/api/src/mill/api/JsonFormatters.scala
+++ b/main/api/src/mill/api/JsonFormatters.scala
@@ -2,7 +2,12 @@ package mill.api
import upickle.default.{ReadWriter => RW}
import scala.util.matching.Regex
+
object JsonFormatters extends JsonFormatters
+
+/**
+ * Defines various default JSON formatters used in mill.
+ */
trait JsonFormatters {
implicit val pathReadWrite: RW[os.Path] = upickle.default.readwriter[String]
.bimap[os.Path](
@@ -22,7 +27,6 @@ trait JsonFormatters {
str => new os.Bytes(java.util.Base64.getDecoder.decode(str))
)
-
implicit lazy val crFormat: RW[os.CommandResult] = upickle.default.macroRW
implicit val stackTraceRW = upickle.default.readwriter[ujson.Obj].bimap[StackTraceElement](
@@ -32,13 +36,14 @@ trait JsonFormatters {
"fileName" -> ujson.Str(ste.getFileName),
"lineNumber" -> ujson.Num(ste.getLineNumber)
),
- {case json: ujson.Obj =>
- new StackTraceElement(
- json("declaringClass").str.toString,
- json("methodName").str.toString,
- json("fileName").str.toString,
- json("lineNumber").num.toInt
- )
+ {
+ case json: ujson.Obj =>
+ new StackTraceElement(
+ json("declaringClass").str.toString,
+ json("methodName").str.toString,
+ json("fileName").str.toString,
+ json("lineNumber").num.toInt
+ )
}
)
}
diff --git a/main/api/src/mill/api/PathRef.scala b/main/api/src/mill/api/PathRef.scala
index 24f3627e..29b2ee3c 100644
--- a/main/api/src/mill/api/PathRef.scala
+++ b/main/api/src/mill/api/PathRef.scala
@@ -1,33 +1,36 @@
package mill.api
-import java.io.IOException
-import java.nio.file.attribute.BasicFileAttributes
-import java.nio.file.{FileVisitResult, FileVisitor}
import java.nio.{file => jnio}
import java.security.{DigestOutputStream, MessageDigest}
import upickle.default.{ReadWriter => RW}
-
/**
- * A wrapper around `os.Path` that calculates it's hashcode based
- * on the contents of the filesystem underneath it. Used to ensure filesystem
- * changes can bust caches which are keyed off hashcodes.
- */
-case class PathRef(path: os.Path, quick: Boolean, sig: Int){
- override def hashCode() = sig
+ * A wrapper around `os.Path` that calculates it's hashcode based
+ * on the contents of the filesystem underneath it. Used to ensure filesystem
+ * changes can bust caches which are keyed off hashcodes.
+ */
+case class PathRef(path: os.Path, quick: Boolean, sig: Int) {
+ override def hashCode(): Int = sig
}
-object PathRef{
- def apply(path: os.Path, quick: Boolean = false) = {
+object PathRef {
+ /**
+ * Create a [[PathRef]] by recursively digesting the content of a given `path`.
+ * @param path The digested path.
+ * @param quick If `true` the digest is only based to some file attributes (like mtime and size).
+ * If `false` the digest is created of the files content.
+ * @return
+ */
+ def apply(path: os.Path, quick: Boolean = false): PathRef = {
val sig = {
val digest = MessageDigest.getInstance("MD5")
val digestOut = new DigestOutputStream(DummyOutputStream, digest)
- if (os.exists(path)){
- for((path, attrs) <- os.walk.attrs(path, includeTarget = true, followLinks = true)){
+ if (os.exists(path)) {
+ for ((path, attrs) <- os.walk.attrs(path, includeTarget = true, followLinks = true)) {
digest.update(path.toString.getBytes)
if (!attrs.isDir) {
- if (quick){
+ if (quick) {
val value = (attrs.mtime, attrs.size).hashCode()
digest.update((value >>> 24).toByte)
digest.update((value >>> 16).toByte)
@@ -48,17 +51,20 @@ object PathRef{
new PathRef(path, quick, sig)
}
+ /**
+ * Default JSON formatter for [[PathRef]].
+ */
implicit def jsonFormatter: RW[PathRef] = upickle.default.readwriter[String].bimap[PathRef](
p => {
(if (p.quick) "qref" else "ref") + ":" +
- String.format("%08x", p.sig: Integer) + ":" +
- p.path.toString()
+ String.format("%08x", p.sig: Integer) + ":" +
+ p.path.toString()
},
s => {
val Array(prefix, hex, path) = s.split(":", 3)
PathRef(
os.Path(path),
- prefix match{ case "qref" => true case "ref" => false},
+ prefix match { case "qref" => true case "ref" => false },
// Parsing to a long and casting to an int is the only way to make
// round-trip handling of negative numbers work =(
java.lang.Long.parseLong(hex, 16).toInt
@@ -66,57 +72,3 @@ object PathRef{
}
)
}
-
-
-import java.io.{InputStream, OutputStream}
-
-/**
- * Misc IO utilities, eventually probably should be pushed upstream into
- * ammonite-ops
- */
-object IO {
- def stream(src: InputStream, dest: OutputStream) = {
- val buffer = new Array[Byte](4096)
- while ( {
- src.read(buffer) match {
- case -1 => false
- case n =>
- dest.write(buffer, 0, n)
- true
- }
- }) ()
- }
-
-
- def unpackZip(src: os.Path, dest: os.RelPath = "unpacked")
- (implicit ctx: Ctx.Dest) = {
-
- val byteStream = os.read.inputStream(src)
- val zipStream = new java.util.zip.ZipInputStream(byteStream)
- while({
- zipStream.getNextEntry match{
- case null => false
- case entry =>
- if (!entry.isDirectory) {
- val entryDest = ctx.dest / dest / os.RelPath(entry.getName)
- os.makeDir.all(entryDest / os.up)
- val fileOut = new java.io.FileOutputStream(entryDest.toString)
- IO.stream(zipStream, fileOut)
- fileOut.close()
- }
- zipStream.closeEntry()
- true
- }
- })()
- PathRef(ctx.dest / dest)
- }
-}
-
-import java.io.{ByteArrayInputStream, OutputStream}
-
-object DummyInputStream extends ByteArrayInputStream(Array())
-object DummyOutputStream extends java.io.OutputStream{
- override def write(b: Int) = ()
- override def write(b: Array[Byte]) = ()
- override def write(b: Array[Byte], off: Int, len: Int) = ()
-}