summaryrefslogtreecommitdiff
path: root/scalalib/src/test/resource/better-files/core
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2017-12-31 22:56:45 -0800
committerLi Haoyi <haoyi.sg@gmail.com>2018-01-01 10:10:19 -0800
commitcf5fb5fdfd477e0bb4ffa0e4fec3a8ec01bf5cf1 (patch)
treef3897463b6f55019f2bbd59ba5cff73cf0fb571f /scalalib/src/test/resource/better-files/core
parent6996c01a391cb9aaa27268dd1f0cf0a1749ade21 (diff)
downloadmill-cf5fb5fdfd477e0bb4ffa0e4fec3a8ec01bf5cf1.tar.gz
mill-cf5fb5fdfd477e0bb4ffa0e4fec3a8ec01bf5cf1.tar.bz2
mill-cf5fb5fdfd477e0bb4ffa0e4fec3a8ec01bf5cf1.zip
Split Acyclic/Jawn/BetterFiles tests into their own `integration/` test suite.
Those tests now download a snapshot of the relevant git repo rather than vendoring the files, and use a bare `build.sc` instead of having the build object be included in the test classpath. Tests pass using `sbt integration/test`, but `mill integration.test` still doesn't work
Diffstat (limited to 'scalalib/src/test/resource/better-files/core')
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala155
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala1257
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala72
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala324
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala91
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala83
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala183
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala23
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala100
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala74
-rw-r--r--scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala66
-rw-r--r--scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala15
-rw-r--r--scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala61
-rw-r--r--scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala549
-rw-r--r--scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala360
-rw-r--r--scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala250
-rw-r--r--scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala79
17 files changed, 0 insertions, 3742 deletions
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala
deleted file mode 100644
index 3bacd91d..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala
+++ /dev/null
@@ -1,155 +0,0 @@
-package better.files
-
-import java.nio.charset.Charset
-import java.nio.file.attribute.{PosixFileAttributes, PosixFilePermission, PosixFilePermissions}
-import java.util.zip.Deflater
-
-import scala.collection.JavaConverters._
-
-/**
- * Do file ops using a UNIX command line DSL
- */
-object Dsl {
- def ~ : File =
- File.home
-
- def pwd: File =
- File.currentWorkingDirectory
-
- def cwd: File =
- pwd
-
- val `..`: File => File =
- _.parent
-
- val `.`: File => File =
- identity
-
- /**
- * Adds some symbolic operations to file
- * @param file
- */
- implicit class SymbolicOperations(val file: File) {
- /**
- * Allows navigation up e.g. file / .. / ..
- *
- * @param f
- * @return
- */
- def /(f: File => File): File =
- f(file)
-
- def <<(line: String)(implicit charset: Charset = defaultCharset): file.type =
- file.appendLines(line)(charset)
-
- def >>:(line: String)(implicit charset: Charset = defaultCharset): file.type =
- file.appendLines(line)(charset)
-
- def <(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): file.type =
- file.write(text)(openOptions, charset)
-
- def `>:`(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): file.type =
- file.write(text)(openOptions, charset)
-
- def `!`(implicit charset: Charset = defaultCharset): String =
- file.contentAsString(charset)
-
- def `===`(that: File): Boolean =
- file.isSameContentAs(that)
-
- def !==(that: File): Boolean =
- !(file === that)
- }
-
- def cp(from: File, to: File): File = {
- if (to.isDirectory) {
- from.copyToDirectory(to)
- } else {
- from.copyTo(to, overwrite = true)
- }
- }
-
- def mv(from: File, to: File): File = {
- if (to.isDirectory) {
- from.moveToDirectory(to)
- } else {
- from.moveTo(to, overwrite = true)
- }
- }
-
- def rm(file: File): File =
- file.delete(swallowIOExceptions = true)
-
- def del(file: File): File =
- rm(file)
-
- def ln(file1: File, file2: File): File =
- file1.linkTo(file2)
-
- def ln_s(file1: File, file2: File): File =
- file1.symbolicLinkTo(file2)
-
- def cat(files: File*): Seq[Iterator[Byte]] =
- files.map(_.bytes)
-
- def ls(file: File): Files =
- file.list
-
- def dir(file: File): Files =
- ls(file)
-
- def ls_r(file: File): Files =
- file.listRecursively
-
- def touch(file: File): File =
- file.touch()
-
- def mkdir(file: File): File =
- file.createDirectory()
-
- def md5(file: File): String =
- file.md5
-
- def sha1(file: File): String =
- file.sha1
-
- def sha256(file: File): String =
- file.sha256
-
- def sha512(file: File): String =
- file.sha512
-
- def mkdirs(file: File): File =
- file.createDirectories()
-
- def chown(owner: String, file: File): File =
- file.setOwner(owner)
-
- def chgrp(group: String, file: File): File =
- file.setGroup(group)
-
- /**
- * Update permission of this file
- *
- * @param permissions Must be 9 character POSIX permission representation e.g. "rwxr-x---"
- * @param file
- * @return file
- */
- def chmod(permissions: String, file: File): File =
- file.setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet)
-
- def chmod_+(permission: PosixFilePermission, file: File): File =
- file.addPermission(permission)
-
- def chmod_-(permission: PosixFilePermission, file: File): File =
- file.removePermission(permission)
-
- def stat(file: File): PosixFileAttributes =
- file.posixAttributes
-
- def unzip(zipFile: File)(destination: File)(implicit charset: Charset = defaultCharset): destination.type =
- zipFile.unzipTo(destination)(charset)
-
- def zip(files: File*)(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): destination.type =
- destination.zipIn(files.iterator, compressionLevel)(charset)
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala
deleted file mode 100644
index eb11cd93..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala
+++ /dev/null
@@ -1,1257 +0,0 @@
-package better.files
-
-import java.io.{File => JFile, _}
-import java.net.{URI, URL}
-import java.nio.charset.Charset
-import java.nio.channels._
-import java.nio.file._
-import java.nio.file.attribute._
-import java.security.{DigestInputStream, MessageDigest}
-import java.time.Instant
-import java.util.regex.Pattern
-import java.util.zip._
-import javax.xml.bind.DatatypeConverter
-
-import scala.collection.JavaConverters._
-import scala.concurrent.ExecutionContext
-import scala.util.Properties
-import scala.util.matching.Regex
-
-/**
- * Scala wrapper around java.nio.files.Path
- */
-class File private(val path: Path)(implicit val fileSystem: FileSystem = path.getFileSystem) {
- //TODO: LinkOption?
-
- def pathAsString: String =
- path.toString
-
- def toJava: JFile =
- new JFile(path.toAbsolutePath.toString)
-
- /**
- * Name of file
- * Certain files may not have a name e.g. root directory - returns empty string in that case
- *
- * @return
- */
- def name: String =
- nameOption.getOrElse("")
-
- /**
- * Certain files may not have a name e.g. root directory - returns None in that case
- *
- * @return
- */
- def nameOption: Option[String] =
- Option(path.getFileName).map(_.toString)
-
- def root: File =
- path.getRoot
-
- def nameWithoutExtension: String =
- nameWithoutExtension(includeAll = true)
-
- /**
- * @param includeAll
- * For files with multiple extensions e.g. "bundle.tar.gz"
- * nameWithoutExtension(includeAll = true) returns "bundle"
- * nameWithoutExtension(includeAll = false) returns "bundle.tar"
- * @return
- */
- def nameWithoutExtension(includeAll: Boolean): String =
- if (hasExtension) name.substring(0, indexOfExtension(includeAll)) else name
-
- /**
- * @return extension (including the dot) of this file if it is a regular file and has an extension, else None
- */
- def extension: Option[String] =
- extension()
-
- /**
- * @param includeDot whether the dot should be included in the extension or not
- * @param includeAll whether all extension tokens should be included, or just the last one e.g. for bundle.tar.gz should it be .tar.gz or .gz
- * @param toLowerCase to lowercase the extension or not e.g. foo.HTML should have .html or .HTML
- * @return extension of this file if it is a regular file and has an extension, else None
- */
- def extension(includeDot: Boolean = true, includeAll: Boolean = false, toLowerCase: Boolean = true): Option[String] =
- when(hasExtension) {
- val dot = indexOfExtension(includeAll)
- val index = if (includeDot) dot else dot + 1
- val extension = name.substring(index)
- if (toLowerCase) extension.toLowerCase else extension
- }
-
- private[this] def indexOfExtension(includeAll: Boolean) =
- if (includeAll) name.indexOf(".") else name.lastIndexOf(".")
-
- /**
- * Returns the extension if file is a regular file
- * If file is unreadable or does not exist, it is assumed to be not a regular file
- * See: https://github.com/pathikrit/better-files/issues/89
- *
- * @return
- */
- def hasExtension: Boolean =
- (isRegularFile || notExists) && name.contains(".")
-
- /**
- * Changes the file-extension by renaming this file; if file does not have an extension, it adds the extension
- * Example usage file"foo.java".changeExtensionTo(".scala")
- */
- def changeExtensionTo(extension: String): File =
- if (isRegularFile) renameTo(s"$nameWithoutExtension$extension") else this
-
- def contentType: Option[String] =
- Option(Files.probeContentType(path))
-
- /**
- * Return parent of this file
- * NOTE: This API returns null if this file is the root;
- * please use parentOption if you expect to handle roots
- *
- * @see parentOption
- * @return
- */
- def parent: File =
- parentOption.orNull
-
- /**
- *
- * @return Some(parent) of this file or None if this is the root and thus has no parent
- */
- def parentOption: Option[File] =
- Option(path.getParent).map(File.apply)
-
- def /(child: String): File =
- path.resolve(child)
-
- def /(child: Symbol): File =
- this / child.name
-
- def createChild(child: String, asDirectory: Boolean = false, createParents: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): File =
- (this / child).createIfNotExists(asDirectory, createParents)(attributes, linkOptions)
-
- /**
- * Create this file. If it exists, don't do anything
- *
- * @param asDirectory If you want this file to be created as a directory instead, set this to true (false by default)
- * @param createParents If you also want all the parents to be created from root to this file (false by defailt)
- * @param attributes
- * @param linkOptions
- * @return
- */
- def createIfNotExists(asDirectory: Boolean = false, createParents: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
- if (exists(linkOptions)) {
- this
- } else if (asDirectory) {
- createDirectories()(attributes)
- } else {
- if (createParents) parent.createDirectories()(attributes)
- try {
- createFile()(attributes)
- } catch {
- case _: FileAlreadyExistsException if isRegularFile(linkOptions) => // We don't really care if it exists already
- }
- this
- }
- }
-
- /**
- * Create this file
- *
- * @param attributes
- * @return
- */
- def createFile()(implicit attributes: File.Attributes = File.Attributes.default): this.type = {
- Files.createFile(path, attributes: _*)
- this
- }
-
- def exists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- Files.exists(path, linkOptions: _*)
-
- def notExists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- Files.notExists(path, linkOptions: _*)
-
- def sibling(name: String): File =
- path.resolveSibling(name)
-
- def isSiblingOf(sibling: File): Boolean =
- sibling.isChildOf(parent)
-
- def siblings: Files =
- parent.list.filterNot(_ == this)
-
- def isChildOf(parent: File): Boolean =
- parent.isParentOf(this)
-
- /**
- * Check if this directory contains this file
- *
- * @param file
- * @return true if this is a directory and it contains this file
- */
- def contains(file: File): Boolean =
- isDirectory && (file.path startsWith path)
-
- def isParentOf(child: File): Boolean =
- contains(child)
-
- def bytes: Iterator[Byte] =
- newInputStream.buffered.bytes //TODO: ManagedResource here?
-
- def loadBytes: Array[Byte] =
- Files.readAllBytes(path)
-
- def byteArray: Array[Byte] =
- loadBytes
-
- /**
- * Create this directory
- *
- * @param attributes
- * @return
- */
- def createDirectory()(implicit attributes: File.Attributes = File.Attributes.default): this.type = {
- Files.createDirectory(path, attributes: _*)
- this
- }
-
- /**
- * Create this directory and all its parents
- * Unlike the JDK, this by default sanely handles the JDK-8130464 bug
- * If you want default Java behaviour, use File.LinkOptions.noFollow
- *
- * @param attributes
- * @return
- */
- def createDirectories()(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
- try {
- Files.createDirectories(path, attributes: _*)
- } catch {
- case _: FileAlreadyExistsException if isDirectory(linkOptions) => // work around for JDK-8130464
- }
- this
- }
-
- def chars(implicit charset: Charset = defaultCharset): Iterator[Char] =
- newBufferedReader(charset).chars //TODO: ManagedResource here?
-
- /**
- * Load all lines from this file
- * Note: Large files may cause an OutOfMemory in which case, use the streaming version @see lineIterator
- *
- * @param charset
- * @return all lines in this file
- */
- def lines(implicit charset: Charset = defaultCharset): Traversable[String] =
- Files.readAllLines(path, charset).asScala
-
- /**
- * Iterate over lines in a file (auto-close stream on complete)
- * NOTE: If the iteration is partial, it may leave a stream open
- * If you want partial iteration use @see lines()
- *
- * @param charset
- * @return
- */
- def lineIterator(implicit charset: Charset = defaultCharset): Iterator[String] =
- Files.lines(path, charset).toAutoClosedIterator
-
- def tokens(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): Iterator[String] =
- newBufferedReader(charset).tokens(splitter)
-
- def contentAsString(implicit charset: Charset = defaultCharset): String =
- new String(byteArray, charset)
-
- def printLines(lines: Iterator[Any])(implicit openOptions: File.OpenOptions = File.OpenOptions.append): this.type = {
- for {
- pw <- printWriter()(openOptions)
- line <- lines
- } pw.println(line)
- this
- }
-
- /**
- * For large number of lines that may not fit in memory, use printLines
- *
- * @param lines
- * @param charset
- * @return
- */
- def appendLines(lines: String*)(implicit charset: Charset = defaultCharset): this.type = {
- Files.write(path, lines.asJava, charset, File.OpenOptions.append: _*)
- this
- }
-
- def appendLine(line: String = "")(implicit charset: Charset = defaultCharset): this.type =
- appendLines(line)(charset)
-
- def append(text: String)(implicit charset: Charset = defaultCharset): this.type =
- appendByteArray(text.getBytes(charset))
-
- def appendText(text: String)(implicit charset: Charset = defaultCharset): this.type =
- append(text)(charset)
-
- def appendByteArray(bytes: Array[Byte]): this.type = {
- Files.write(path, bytes, File.OpenOptions.append: _*)
- this
- }
-
- def appendBytes(bytes: Iterator[Byte]): this.type =
- writeBytes(bytes)(openOptions = File.OpenOptions.append)
-
- /**
- * Write byte array to file. For large contents consider using the writeBytes
- *
- * @param bytes
- * @return this
- */
- def writeByteArray(bytes: Array[Byte])(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = {
- Files.write(path, bytes, openOptions: _*)
- this
- }
-
- def writeBytes(bytes: Iterator[Byte])(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = {
- outputStream(openOptions).foreach(_.buffered write bytes)
- this
- }
-
- def write(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type =
- writeByteArray(text.getBytes(charset))(openOptions)
-
- def writeText(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type =
- write(text)(openOptions, charset)
-
- def overwrite(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type =
- write(text)(openOptions, charset)
-
- def newRandomAccess(mode: File.RandomAccessMode = File.RandomAccessMode.read): RandomAccessFile =
- new RandomAccessFile(toJava, mode.value)
-
- def randomAccess(mode: File.RandomAccessMode = File.RandomAccessMode.read): ManagedResource[RandomAccessFile] =
- newRandomAccess(mode).autoClosed //TODO: Mode enum?
-
- def newBufferedReader(implicit charset: Charset = defaultCharset): BufferedReader =
- Files.newBufferedReader(path, charset)
-
- def bufferedReader(implicit charset: Charset = defaultCharset): ManagedResource[BufferedReader] =
- newBufferedReader(charset).autoClosed
-
- def newBufferedWriter(implicit charset: Charset = defaultCharset, openOptions: File.OpenOptions = File.OpenOptions.default): BufferedWriter =
- Files.newBufferedWriter(path, charset, openOptions: _*)
-
- def bufferedWriter(implicit charset: Charset = defaultCharset, openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[BufferedWriter] =
- newBufferedWriter(charset, openOptions).autoClosed
-
- def newFileReader: FileReader =
- new FileReader(toJava)
-
- def fileReader: ManagedResource[FileReader] =
- newFileReader.autoClosed
-
- def newFileWriter(append: Boolean = false): FileWriter =
- new FileWriter(toJava, append)
-
- def fileWriter(append: Boolean = false): ManagedResource[FileWriter] =
- newFileWriter(append).autoClosed
-
- def newPrintWriter(autoFlush: Boolean = false)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): PrintWriter =
- new PrintWriter(newOutputStream(openOptions), autoFlush)
-
- def printWriter(autoFlush: Boolean = false)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[PrintWriter] =
- newPrintWriter(autoFlush)(openOptions).autoClosed
-
- def newInputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): InputStream =
- Files.newInputStream(path, openOptions: _*)
-
- def inputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[InputStream] =
- newInputStream(openOptions).autoClosed
-
- //TODO: Move this to inputstream implicit
- def newDigestInputStream(digest: MessageDigest)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): DigestInputStream =
- new DigestInputStream(newInputStream(openOptions), digest)
-
- def digestInputStream(digest: MessageDigest)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[DigestInputStream] =
- newDigestInputStream(digest)(openOptions).autoClosed
-
- def newScanner(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): Scanner =
- Scanner(newBufferedReader(charset), splitter)
-
- def scanner(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): ManagedResource[Scanner] =
- newScanner(splitter)(charset).autoClosed
-
- def newOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): OutputStream =
- Files.newOutputStream(path, openOptions: _*)
-
- def outputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[OutputStream] =
- newOutputStream(openOptions).autoClosed
-
- def newZipOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): ZipOutputStream =
- new ZipOutputStream(newOutputStream(openOptions), charset)
-
- def zipInputStream(implicit charset: Charset = defaultCharset): ManagedResource[ZipInputStream] =
- newZipInputStream(charset).autoClosed
-
- def newZipInputStream(implicit charset: Charset = defaultCharset): ZipInputStream =
- new ZipInputStream(new FileInputStream(toJava).buffered, charset)
-
- def zipOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): ManagedResource[ZipOutputStream] =
- newZipOutputStream(openOptions, charset).autoClosed
-
- def newFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default, attributes: File.Attributes = File.Attributes.default): FileChannel =
- FileChannel.open(path, openOptions.toSet.asJava, attributes: _*)
-
- def fileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default, attributes: File.Attributes = File.Attributes.default): ManagedResource[FileChannel] =
- newFileChannel(openOptions, attributes).autoClosed
-
- def newAsynchronousFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default): AsynchronousFileChannel =
- AsynchronousFileChannel.open(path, openOptions: _*)
-
- def asynchronousFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[AsynchronousFileChannel] =
- newAsynchronousFileChannel(openOptions).autoClosed
-
- def newWatchService: WatchService =
- fileSystem.newWatchService()
-
- def watchService: ManagedResource[WatchService] =
- newWatchService.autoClosed
-
- /**
- * Serialize a object using Java's serializer into this file
- *
- * @param obj
- * @return
- */
- def writeSerialized(obj: Serializable)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = {
- createIfNotExists().outputStream(openOptions).foreach(_.asObjectOutputStream().serialize(obj).flush())
- this
- }
-
- /**
- * Deserialize a object using Java's default serialization from this file
- *
- * @return
- */
- def readDeserialized[A](implicit openOptions: File.OpenOptions = File.OpenOptions.default): A =
- inputStream(openOptions).map(_.asObjectInputStream().deserialize[A])
-
- def register(service: WatchService, events: File.Events = File.Events.all): this.type = {
- path.register(service, events.toArray)
- this
- }
-
- def digest(algorithm: MessageDigest): Array[Byte] = {
- listRelativePaths.toSeq.sorted foreach { relativePath =>
- val file: File = path.resolve(relativePath)
- if(file.isDirectory) {
- algorithm.update(relativePath.toString.getBytes)
- } else {
- file.digestInputStream(algorithm).foreach(_.pipeTo(NullOutputStream))
- }
- }
- algorithm.digest()
- }
-
- /**
- * Set a file attribute e.g. file("dos:system") = true
- *
- * @param attribute
- * @param value
- * @param linkOptions
- * @return
- */
- def update(attribute: String, value: Any)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
- Files.setAttribute(path, attribute, value, linkOptions : _*)
- this
- }
-
- /**
- * @return checksum of this file (or directory) in hex format
- */
- def checksum(algorithm: MessageDigest): String =
- DatatypeConverter.printHexBinary(digest(algorithm))
-
- def md5: String =
- checksum("MD5")
-
- def sha1: String =
- checksum("SHA-1")
-
- def sha256: String =
- checksum("SHA-256")
-
- def sha512: String =
- checksum("SHA-512")
-
- /**
- * @return Some(target) if this is a symbolic link (to target) else None
- */
- def symbolicLink: Option[File] =
- when(isSymbolicLink)(new File(Files.readSymbolicLink(path)))
-
- /**
- * @return true if this file (or the file found by following symlink) is a directory
- */
- def isDirectory(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- Files.isDirectory(path, linkOptions: _*)
-
- /**
- * @return true if this file (or the file found by following symlink) is a regular file
- */
- def isRegularFile(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- Files.isRegularFile(path, linkOptions: _*)
-
- def isSymbolicLink: Boolean =
- Files.isSymbolicLink(path)
-
- def isHidden: Boolean =
- Files.isHidden(path)
-
- /**
- * Check if a file is locked.
- *
- * @param mode The random access mode.
- * @param position The position at which the locked region is to start; must be non-negative.
- * @param size The size of the locked region; must be non-negative, and the sum position + size must be non-negative.
- * @param isShared true to request a shared lock, false to request an exclusive lock.
- * @return True if the file is locked, false otherwise.
- */
- def isLocked(mode: File.RandomAccessMode, position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- try {
- usingLock(mode) {channel =>
- channel.tryLock(position, size, isShared).release()
- false
- }
- } catch {
- case _: OverlappingFileLockException | _: NonWritableChannelException | _: NonReadableChannelException => true
-
- // Windows throws a `FileNotFoundException` if the file is locked (see: https://github.com/pathikrit/better-files/pull/194)
- case _: FileNotFoundException if verifiedExists(linkOptions).getOrElse(true) => true
- }
-
- /**
- * @see https://docs.oracle.com/javase/tutorial/essential/io/check.html
- * @see https://stackoverflow.com/questions/30520179/why-does-file-exists-return-true-even-though-files-exists-in-the-nio-files
- *
- * @return
- * Some(true) if file is guaranteed to exist
- * Some(false) if file is guaranteed to not exist
- * None if the status is unknown e.g. if file is unreadable
- */
- def verifiedExists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Option[Boolean] = {
- if (exists(linkOptions)) {
- Some(true)
- } else if(notExists(linkOptions)) {
- Some(false)
- } else {
- None
- }
- }
-
- def usingLock[U](mode: File.RandomAccessMode)(f: FileChannel => U): U =
- newRandomAccess(mode).getChannel.autoClosed.map(f)
-
- def isReadLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false) =
- isLocked(File.RandomAccessMode.read, position, size, isShared)
-
- def isWriteLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false) =
- isLocked(File.RandomAccessMode.readWrite, position, size, isShared)
-
- def list: Files =
- Files.list(path)
-
- def children: Files = list
-
- def entries: Files = list
-
- def listRecursively(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
- walk()(visitOptions).filterNot(isSamePathAs)
-
- /**
- * Walk the directory tree recursively upto maxDepth
- *
- * @param maxDepth
- * @return List of children in BFS maxDepth level deep (includes self since self is at depth = 0)
- */
- def walk(maxDepth: Int = Int.MaxValue)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
- Files.walk(path, maxDepth, visitOptions: _*) //TODO: that ignores I/O errors?
-
- def pathMatcher(syntax: File.PathMatcherSyntax, includePath: Boolean)(pattern: String): PathMatcher =
- syntax(this, pattern, includePath)
-
- /**
- * Util to glob from this file's path
- *
- *
- * @param includePath If true, we don't need to set path glob patterns
- * e.g. instead of **//*.txt we just use *.txt
- * @return Set of files that matched
- */
- //TODO: Consider removing `syntax` as implicit. You often want to control this on a per method call basis
- def glob(pattern: String, includePath: Boolean = true)(implicit syntax: File.PathMatcherSyntax = File.PathMatcherSyntax.default, visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
- pathMatcher(syntax, includePath)(pattern).matches(this)(visitOptions)
-
- /**
- * Util to match from this file's path using Regex
- *
- * @param includePath If true, we don't need to set path glob patterns
- * e.g. instead of **//*.txt we just use *.txt
- * @see glob
- * @return Set of files that matched
- */
- def globRegex(pattern: Regex, includePath: Boolean = true)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
- glob(pattern.regex, includePath)(syntax = File.PathMatcherSyntax.regex, visitOptions = visitOptions)
-
- /**
- * More Scala friendly way of doing Files.walk
- * Note: This is lazy (returns an Iterator) and won't evaluate till we reify the iterator (e.g. using .toList)
- *
- * @param matchFilter
- * @return
- */
- def collectChildren(matchFilter: File => Boolean)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
- walk()(visitOptions).filter(matchFilter)
-
- def uri: URI =
- path.toUri
-
- def url: URL =
- uri.toURL
-
- /**
- * @return file size (for directories, return size of the directory) in bytes
- */
- def size(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Long =
- walk()(visitOptions).map(f => Files.size(f.path)).sum
-
- def permissions(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Set[PosixFilePermission] =
- Files.getPosixFilePermissions(path, linkOptions: _*).asScala.toSet
-
- def permissionsAsString(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String =
- PosixFilePermissions.toString(permissions(linkOptions).asJava)
-
- def setPermissions(permissions: Set[PosixFilePermission]): this.type = {
- Files.setPosixFilePermissions(path, permissions.asJava)
- this
- }
-
- def addPermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type =
- setPermissions(permissions(linkOptions) + permission)
-
- def removePermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type =
- setPermissions(permissions(linkOptions) - permission)
-
- /**
- * test if file has this permission
- */
- def testPermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- permissions(linkOptions)(permission)
-
- def isOwnerReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.OWNER_READ)(linkOptions)
-
- def isOwnerWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.OWNER_WRITE)(linkOptions)
-
- def isOwnerExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.OWNER_EXECUTE)(linkOptions)
-
- def isGroupReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.GROUP_READ)(linkOptions)
-
- def isGroupWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.GROUP_WRITE)(linkOptions)
-
- def isGroupExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.GROUP_EXECUTE)(linkOptions)
-
- def isOthersReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.OTHERS_READ)(linkOptions)
-
- def isOthersWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.OTHERS_WRITE)(linkOptions)
-
- def isOthersExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- testPermission(PosixFilePermission.OTHERS_EXECUTE)(linkOptions)
-
- /**
- * This differs from the above as this checks if the JVM can read this file even though the OS cannot in certain platforms
- *
- * @see isOwnerReadable
- * @return
- */
- def isReadable: Boolean =
- toJava.canRead
-
- def isWriteable: Boolean =
- toJava.canWrite
-
- def isExecutable: Boolean =
- toJava.canExecute
-
- def attributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): BasicFileAttributes =
- Files.readAttributes(path, classOf[BasicFileAttributes], linkOptions: _*)
-
- def posixAttributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): PosixFileAttributes =
- Files.readAttributes(path, classOf[PosixFileAttributes], linkOptions: _*)
-
- def dosAttributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): DosFileAttributes =
- Files.readAttributes(path, classOf[DosFileAttributes], linkOptions: _*)
-
- def owner(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): UserPrincipal =
- Files.getOwner(path, linkOptions: _*)
-
- def ownerName(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String =
- owner(linkOptions).getName
-
- def group(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): GroupPrincipal =
- posixAttributes(linkOptions).group()
-
- def groupName(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String =
- group(linkOptions).getName
-
- def setOwner(owner: String): this.type = {
- Files.setOwner(path, fileSystem.getUserPrincipalLookupService.lookupPrincipalByName(owner))
- this
- }
-
- def setGroup(group: String): this.type = {
- Files.setOwner(path, fileSystem.getUserPrincipalLookupService.lookupPrincipalByGroupName(group))
- this
- }
-
- /**
- * Similar to the UNIX command touch - create this file if it does not exist and set its last modification time
- */
- def touch(time: Instant = Instant.now())(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
- Files.setLastModifiedTime(createIfNotExists()(attributes, linkOptions).path, FileTime.from(time))
- this
- }
-
- def lastModifiedTime(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Instant =
- Files.getLastModifiedTime(path, linkOptions: _*).toInstant
-
- /**
- * Deletes this file or directory
- *
- * @param swallowIOExceptions If this is set to true, any exception thrown is swallowed
- */
- def delete(swallowIOExceptions: Boolean = false): this.type = {
- try {
- if (isDirectory) list.foreach(_.delete(swallowIOExceptions))
- Files.delete(path)
- } catch {
- case _: IOException if swallowIOExceptions => //e.printStackTrace() //swallow
- }
- this
- }
-
- def renameTo(newName: String): File =
- moveTo(path.resolveSibling(newName))
-
- /**
- *
- * @param destination
- * @param overwrite
- * @return destination
- */
- def moveTo(destination: File, overwrite: Boolean = false): destination.type = {
- Files.move(path, destination.path, File.CopyOptions(overwrite): _*)
- destination
- }
-
- /**
- * Moves this file into the given directory
- * @param directory
- *
- * @return the File referencing the new file created under destination
- */
- def moveToDirectory(directory: File)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): File = {
- require(directory.isDirectory(linkOptions), s"$directory must be a directory")
- moveTo(directory / this.name)
- }
-
- /**
- *
- * @param destination
- * @param overwrite
- * @return destination
- */
- def copyTo(destination: File, overwrite: Boolean = false)(implicit copyOptions: File.CopyOptions = File.CopyOptions(overwrite)): destination.type = {
- if (isDirectory) {//TODO: maxDepth?
- Files.walkFileTree(path, new SimpleFileVisitor[Path] {
- def newPath(subPath: Path): Path = destination.path.resolve(path.relativize(subPath))
-
- override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes) = {
- Files.createDirectories(newPath(dir))
- super.preVisitDirectory(dir, attrs)
- }
-
- override def visitFile(file: Path, attrs: BasicFileAttributes) = {
- Files.copy(file, newPath(file), copyOptions: _*)
- super.visitFile(file, attrs)
- }
- })
- } else {
- Files.copy(path, destination.path, copyOptions: _*)
- }
- destination
- }
-
- /**
- * Copies this file into the given directory
- * @param directory
- *
- * @return the File referencing the new file created under destination
- */
- def copyToDirectory(directory: File)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default, copyOptions: File.CopyOptions = File.CopyOptions.default): File = {
- require(directory.isDirectory(linkOptions), s"$directory must be a directory")
- copyTo(directory / this.name)(copyOptions)
- }
-
- def symbolicLinkTo(destination: File)(implicit attributes: File.Attributes = File.Attributes.default): destination.type = {
- Files.createSymbolicLink(path, destination.path, attributes: _*)
- destination
- }
-
- def linkTo(destination: File, symbolic: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default): destination.type = {
- if (symbolic) {
- symbolicLinkTo(destination)(attributes)
- } else {
- Files.createLink(destination.path, path)
- destination
- }
- }
-
- def listRelativePaths(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Iterator[Path] =
- walk()(visitOptions).map(relativize)
-
- def relativize(destination: File): Path =
- path.relativize(destination.path)
-
- def isSamePathAs(that: File): Boolean =
- this.path == that.path
-
- def isSameFileAs(that: File): Boolean =
- Files.isSameFile(this.path, that.path)
-
- /**
- * @return true if this file is exactly same as that file
- * For directories, it checks for equivalent directory structure
- */
- def isSameContentAs(that: File): Boolean =
- isSimilarContentAs(that)
-
- /**
- * Almost same as isSameContentAs but uses faster md5 hashing to compare (and thus small chance of false positive)
- * Also works for directories
- *
- * @param that
- * @return
- */
- def isSimilarContentAs(that: File): Boolean =
- this.md5 == that.md5
-
- override def equals(obj: Any) = {
- obj match {
- case file: File => isSamePathAs(file)
- case _ => false
- }
- }
-
- /**
- * @param linkOptions
- * @return true if file is not present or empty directory or 0-bytes file
- */
- def isEmpty(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = {
- if (isDirectory(linkOptions)) {
- children.isEmpty
- } else if (isRegularFile(linkOptions)) {
- toJava.length() == 0
- } else {
- notExists(linkOptions)
- }
- }
-
- /**
- *
- * @param linkOptions
- * @return for directories, true if it has no children, false otherwise
- * for files, true if it is a 0-byte file, false otherwise
- * else true if it exists, false otherwise
- */
- def nonEmpty(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
- !isEmpty(linkOptions)
-
- /**
- * If this is a directory, remove all its children
- * If its a file, empty the contents
- *
- * @return this
- */
- def clear()(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
- if (isDirectory(linkOptions)) {
- children.foreach(_.delete())
- } else {
- writeByteArray(Array.emptyByteArray)(File.OpenOptions.default)
- }
- this
- }
-
- def deleteOnExit(): this.type = {
- toJava.deleteOnExit()
- this
- }
-
- override def hashCode =
- path.hashCode()
-
- override def toString =
- pathAsString
-
- /**
- * Zips this file (or directory)
- *
- * @param destination The destination file; Creates this if it does not exists
- * @return The destination zip file
- */
- def zipTo(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): destination.type = {
- val files = if (isDirectory) children else Iterator(this)
- destination.zipIn(files, compressionLevel)(charset)
- }
-
- /**
- * zip to a temp directory
- *
- * @return the target directory
- */
- def zip(compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): File =
- zipTo(destination = File.newTemporaryFile(name, ".zip"), compressionLevel)(charset)
-
- /**
- * Unzips this zip file
- *
- * @param destination destination folder; Creates this if it does not exist
- * @param zipFilter An optional param to reject or accept unzipping a file
- * @return The destination where contents are unzipped
- */
- def unzipTo(destination: File, zipFilter: ZipEntry => Boolean = _ => true)(implicit charset: Charset = defaultCharset): destination.type = {
- for {
- zipFile <- new ZipFile(toJava, charset).autoClosed
- entry <- zipFile.entries().asScala if zipFilter(entry)
- } entry.extractTo(destination, zipFile.getInputStream(entry))
- destination
- }
-
- /**
- * Streamed unzipping is slightly slower but supports larger files and more encodings
- * @see https://github.com/pathikrit/better-files/issues/152
- *
- * @param destinationDirectory destination folder; Creates this if it does not exist
- * @return The destination where contents are unzipped
- */
- def streamedUnzip(destinationDirectory: File = File.newTemporaryDirectory(name))(implicit charset: Charset = defaultCharset): destinationDirectory.type = {
- for {
- zipIn <- zipInputStream(charset)
- } zipIn.mapEntries(_.extractTo(destinationDirectory, zipIn)).size
- destinationDirectory
- }
-
- def unGzipTo(destinationDirectory: File = File.newTemporaryDirectory())(implicit openOptions: File.OpenOptions = File.OpenOptions.default): destinationDirectory.type = {
- for {
- in <- inputStream(openOptions)
- out <- destinationDirectory.outputStream(openOptions)
- } in.buffered.pipeTo(out.buffered)
- destinationDirectory
- }
-
- /**
- * Adds these files into this zip file
- * Example usage: File("test.zip").zipIn(Seq(file"hello.txt", file"hello2.txt"))
- *
- * @param files
- * @param compressionLevel
- * @param charset
- * @return this
- */
- def zipIn(files: Files, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(charset: Charset = defaultCharset): this.type = {
- for {
- output <- newZipOutputStream(File.OpenOptions.default, charset).withCompressionLevel(compressionLevel).autoClosed
- input <- files
- file <- input.walk()
- name = input.parent relativize file
- } output.add(file, name.toString)
- this
- }
-
- /**
- * unzip to a temporary zip file
- *
- * @return the zip file
- */
- def unzip(zipFilter: ZipEntry => Boolean = _ => true)(implicit charset: Charset = defaultCharset): File =
- unzipTo(destination = File.newTemporaryDirectory(name), zipFilter)(charset)
-
- /**
- * Java's temporary files/directories are not cleaned up by default.
- * If we explicitly call `.deleteOnExit()`, it gets added to shutdown handler which is not ideal
- * for long running systems with millions of temporary files as:
- * a) it would slowdown shutdown and
- * b) occupy unnecessary disk-space during app lifetime
- *
- * This util auto-deletes the resource when done using the ManagedResource facility
- *
- * Example usage:
- * File.temporaryDirectory().foreach(tempDir => doSomething(tempDir)
- *
- * @return
- */
- def toTemporary: ManagedResource[File] =
- new ManagedResource(this)(Disposable.fileDisposer)
-
- //TODO: add features from https://github.com/sbt/io
-}
-
-object File {
- /**
- * Get a file from a resource
- * Note: Use resourceToFile instead as this may not actually always load the file
- * See: http://stackoverflow.com/questions/676250/different-ways-of-loading-a-file-as-an-inputstream
- *
- * @param name
- * @return
- */
- def resource(name: String): File =
- File(currentClassLoader().getResource(name))
-
- /**
- * Copies a resource into a file
- *
- * @param name
- * @param destination File where resource is copied into, if not specified a temp file is created
- * @return
- */
- def copyResource(name: String)(destination: File = File.newTemporaryFile(prefix = name)): destination.type = {
- for {
- in <- resourceAsStream(name).autoClosed
- out <- destination.outputStream
- } in.pipeTo(out)
- destination
- }
-
- def newTemporaryDirectory(prefix: String = "", parent: Option[File] = None)(implicit attributes: Attributes = Attributes.default): File = {
- parent match {
- case Some(dir) => Files.createTempDirectory(dir.path, prefix, attributes: _*)
- case _ => Files.createTempDirectory(prefix, attributes: _*)
- }
- }
-
- def temporaryDirectory(prefix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default): ManagedResource[File] =
- newTemporaryDirectory(prefix, parent)(attributes).toTemporary
-
- def usingTemporaryDirectory[U](prefix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default)(f: File => U): Unit =
- temporaryDirectory(prefix, parent, attributes).foreach(f)
-
- def newTemporaryFile(prefix: String = "", suffix: String = "", parent: Option[File] = None)(implicit attributes: Attributes = Attributes.default): File = {
- parent match {
- case Some(dir) => Files.createTempFile(dir.path, prefix, suffix, attributes: _*)
- case _ => Files.createTempFile(prefix, suffix, attributes: _*)
- }
- }
-
- def temporaryFile[U](prefix: String = "", suffix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default): ManagedResource[File] =
- newTemporaryFile(prefix, suffix, parent)(attributes).toTemporary
-
- def usingTemporaryFile[U](prefix: String = "", suffix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default)(f: File => U): Unit =
- temporaryFile(prefix, suffix, parent, attributes).foreach(f)
-
- implicit def apply(path: Path): File =
- new File(path.toAbsolutePath.normalize())
-
- def apply(path: String, fragments: String*): File =
- Paths.get(path, fragments: _*)
-
- /**
- * Get File to path with help of reference anchor.
- *
- * Anchor is used as a reference in case that path is not absolute.
- * Anchor could be path to directory or path to file.
- * If anchor is file, then file's parent dir is used as an anchor.
- *
- * If anchor itself is relative, then anchor is used together with current working directory.
- *
- * NOTE: If anchor is non-existing path on filesystem, then it's always treated as file,
- * e.g. it's last component is removed when it is used as an anchor.
- *
- * @param anchor path to be used as anchor
- * @param path as string
- * @param fragments optional path fragments
- * @return absolute, normalize path
- */
- def apply(anchor: File, path: String, fragments: String*): File = {
- val p = Paths.get(path, fragments: _*)
- if (p.isAbsolute) {
- p
- } else if (anchor.isDirectory) {
- anchor / p.toString
- } else {
- anchor.parent / p.toString
- }
- }
-
- def apply(url: URL): File =
- apply(url.toURI)
-
- def apply(uri: URI): File =
- Paths.get(uri)
-
- def roots: Iterable[File] =
- FileSystems.getDefault.getRootDirectories.asScala.map(File.apply)
-
- def root: File =
- roots.head
-
- def home: File =
- Properties.userHome.toFile
-
- def temp: File =
- Properties.tmpDir.toFile
-
- def currentWorkingDirectory: File =
- File("")
-
- type Attributes = Seq[FileAttribute[_]]
- object Attributes {
- val default : Attributes = Seq.empty
- }
-
- type CopyOptions = Seq[CopyOption]
- object CopyOptions {
- def apply(overwrite: Boolean) : CopyOptions = (if (overwrite) Seq(StandardCopyOption.REPLACE_EXISTING) else default) ++ LinkOptions.default
- val default : CopyOptions = Seq.empty //Seq(StandardCopyOption.COPY_ATTRIBUTES)
- }
-
- type Events = Seq[WatchEvent.Kind[_]]
- object Events {
- val all : Events = Seq(StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.ENTRY_DELETE)
- val default : Events = all
- }
-
- type OpenOptions = Seq[OpenOption]
- object OpenOptions {
- val append : OpenOptions = Seq(StandardOpenOption.APPEND, StandardOpenOption.CREATE)
- val default : OpenOptions = Seq.empty
- }
-
- type LinkOptions = Seq[LinkOption]
- object LinkOptions {
- val follow : LinkOptions = Seq.empty
- val noFollow : LinkOptions = Seq(LinkOption.NOFOLLOW_LINKS)
- val default : LinkOptions = follow
- }
-
- type VisitOptions = Seq[FileVisitOption]
- object VisitOptions {
- val follow : VisitOptions = Seq(FileVisitOption.FOLLOW_LINKS)
- val default : VisitOptions = Seq.empty
- }
-
- type Order = Ordering[File]
- object Order {
- val bySize : Order = Ordering.by(_.size)
- val byName : Order = Ordering.by(_.name)
- val byDepth : Order = Ordering.by(_.path.getNameCount)
- val byModificationTime : Order = Ordering.by(_.lastModifiedTime)
- val byDirectoriesLast : Order = Ordering.by(_.isDirectory)
- val byDirectoriesFirst : Order = byDirectoriesLast.reverse
- val default : Order = byDirectoriesFirst.andThenBy(byName)
- }
-
- abstract class PathMatcherSyntax(name: String) {
-
- /**
- * Return PathMatcher from this file
- *
- * @param file
- * @param pattern
- * @param includePath If this is true, no need to include path matchers
- * e.g. instead of "**//*.txt" we can simply use *.txt
- * @return
- */
- def apply(file: File, pattern: String, includePath: Boolean): PathMatcher = {
- val escapedPath = if (includePath) escapePath(file.path.toString + file.fileSystem.getSeparator) else ""
- file.fileSystem.getPathMatcher(s"$name:$escapedPath$pattern")
- }
-
- def escapePath(path: String): String
- }
- object PathMatcherSyntax {
- val glob: PathMatcherSyntax = new PathMatcherSyntax("glob") {
- override def escapePath(path: String) = path
- .replaceAllLiterally("\\", "\\\\")
- .replaceAllLiterally("*", "\\*")
- .replaceAllLiterally("?", "\\?")
- .replaceAllLiterally("{", "\\{")
- .replaceAllLiterally("}", "\\}")
- .replaceAllLiterally("[", "\\[")
- .replaceAllLiterally("]", "\\]")
- }
-
- val regex: PathMatcherSyntax = new PathMatcherSyntax("regex") {
- override def escapePath(path: String) = Pattern.quote(path)
- }
-
- val default: PathMatcherSyntax = glob
- }
-
- class RandomAccessMode private(val value: String)
- object RandomAccessMode {
- val read = new RandomAccessMode("r")
- val readWrite = new RandomAccessMode("rw")
- val readWriteMetadataSynchronous = new RandomAccessMode("rws")
- val readWriteContentSynchronous = new RandomAccessMode("rwd")
- }
-
- def numberOfOpenFileDescriptors(): Long = {
- java.lang.management.ManagementFactory
- .getPlatformMBeanServer
- .getAttribute(new javax.management.ObjectName("java.lang:type=OperatingSystem"), "OpenFileDescriptorCount")
- .asInstanceOf[Long]
- }
-
- /**
- * Implement this interface to monitor the root file
- */
- trait Monitor extends AutoCloseable {
- val root: File
-
- /**
- * Dispatch a StandardWatchEventKind to an appropriate callback
- * Override this if you don't want to manually handle onDelete/onCreate/onModify separately
- *
- * @param eventType
- * @param file
- */
- def onEvent(eventType: WatchEvent.Kind[Path], file: File, count: Int): Unit = eventType match {
- case StandardWatchEventKinds.ENTRY_CREATE => onCreate(file, count)
- case StandardWatchEventKinds.ENTRY_MODIFY => onModify(file, count)
- case StandardWatchEventKinds.ENTRY_DELETE => onDelete(file, count)
- }
-
- def start()(implicit executionContext: ExecutionContext): Unit
-
- def onCreate(file: File, count: Int): Unit
-
- def onModify(file: File, count: Int): Unit
-
- def onDelete(file: File, count: Int): Unit
-
- def onUnknownEvent(event: WatchEvent[_], count: Int): Unit
-
- def onException(exception: Throwable): Unit
-
- def stop(): Unit = close()
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala
deleted file mode 100644
index f6f139f2..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package better.files
-
-import java.nio.file._
-
-import scala.concurrent.ExecutionContext
-import scala.util.Try
-import scala.util.control.NonFatal
-
-/**
- * Implementation of File.Monitor
- *
- * @param root
- * @param maxDepth
- */
-abstract class FileMonitor(val root: File, maxDepth: Int) extends File.Monitor {
- protected[this] val service = root.newWatchService
-
- def this(root: File, recursive: Boolean = true) = this(root, if (recursive) Int.MaxValue else 0)
-
- /**
- * If watching non-directory, don't react to siblings
- * @param target
- * @return
- */
- protected[this] def reactTo(target: File) = root.isDirectory || root.isSamePathAs(target)
-
- protected[this] def process(key: WatchKey) = {
- val path = key.watchable().asInstanceOf[Path]
-
- import scala.collection.JavaConverters._
- key.pollEvents().asScala foreach {
- case event: WatchEvent[Path] @unchecked =>
- val target: File = path.resolve(event.context())
- if (reactTo(target)) {
- if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE) {
- val depth = root.relativize(target).getNameCount
- watch(target, (maxDepth - depth) max 0) // auto-watch new files in a directory
- }
- onEvent(event.kind(), target, event.count())
- }
- case event => if (reactTo(path)) onUnknownEvent(event, event.count())
- }
- key.reset()
- }
-
- protected[this] def watch(file: File, depth: Int): Unit = {
- def toWatch: Files = if (file.isDirectory) {
- file.walk(depth).filter(f => f.isDirectory && f.exists)
- } else {
- when(file.exists)(file.parent).iterator // There is no way to watch a regular file; so watch its parent instead
- }
- try {
- toWatch.foreach(f => Try[Unit](f.register(service)).recover(PartialFunction(onException)).get)
- } catch {
- case NonFatal(e) => onException(e)
- }
- }
-
- override def start()(implicit executionContext: ExecutionContext) = {
- watch(root, maxDepth)
- executionContext.execute(() => Iterator.continually(service.take()).foreach(process))
- }
-
- override def close() = service.close()
-
- // Although this class is abstract, we give provide implementations so user can choose to implement a subset of these
- override def onCreate(file: File, count: Int) = {}
- override def onModify(file: File, count: Int) = {}
- override def onDelete(file: File, count: Int) = {}
- override def onUnknownEvent(event: WatchEvent[_], count: Int) = {}
- override def onException(exception: Throwable) = {}
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala
deleted file mode 100644
index 322b5f40..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala
+++ /dev/null
@@ -1,324 +0,0 @@
-package better.files
-
-import java.io.{File => JFile, _}
-import java.nio.MappedByteBuffer
-import java.nio.channels.FileChannel
-import java.nio.charset.Charset
-import java.nio.file.{Path, PathMatcher}
-import java.security.MessageDigest
-import java.util.StringTokenizer
-import java.util.stream.{Stream => JStream}
-import java.util.zip._
-
-import scala.annotation.tailrec
-import scala.collection.JavaConverters._
-import scala.util.Try
-
-/**
- * Container for various implicits
- */
-trait Implicits {
-
- //TODO: Rename all Ops to Extensions
-
- implicit class StringInterpolations(sc: StringContext) {
- def file(args: Any*): File =
- value(args).toFile
-
- private[this] def value(args: Seq[Any]) =
- sc.s(args: _*)
- }
-
- implicit class StringOps(str: String) {
- def toFile: File =
- File(str)
-
- def /(child: String): File =
- toFile / child
- }
-
- implicit class FileOps(file: JFile) {
- def toScala: File =
- File(file.getPath)
- }
-
- implicit class SymbolExtensions(symbol: Symbol) {
- def /(child: Symbol): File =
- File(symbol.name) / child
- }
-
- implicit class IteratorExtensions[A](it: Iterator[A]) {
- def withHasNext(f: => Boolean): Iterator[A] = new Iterator[A] {
- override def hasNext = f && it.hasNext
- override def next() = it.next()
- }
- }
-
- implicit class InputStreamOps(in: InputStream) {
- def pipeTo(out: OutputStream, bufferSize: Int = defaultBufferSize): out.type =
- pipeTo(out, Array.ofDim[Byte](bufferSize))
-
- /**
- * Pipe an input stream to an output stream using a byte buffer
- */
- @tailrec final def pipeTo(out: OutputStream, buffer: Array[Byte]): out.type = {
- val n = in.read(buffer)
- if (n > 0) {
- out.write(buffer, 0, n)
- pipeTo(out, buffer)
- } else {
- out
- }
- }
-
- def asString(closeStream: Boolean = true, bufferSize: Int = defaultBufferSize)(implicit charset: Charset = defaultCharset): String = {
- try {
- new ByteArrayOutputStream(bufferSize).autoClosed
- .map(pipeTo(_, bufferSize = bufferSize).toString(charset.displayName()))
- } finally {
- if (closeStream) in.close()
- }
- }
-
- def buffered: BufferedInputStream =
- new BufferedInputStream(in)
-
- def buffered(bufferSize: Int): BufferedInputStream =
- new BufferedInputStream(in, bufferSize)
-
- def gzipped: GZIPInputStream =
- new GZIPInputStream(in)
-
- /**
- * If bufferSize is set to less than or equal to 0, we don't buffer
- * @param bufferSize
- * @return
- */
- def asObjectInputStream(bufferSize: Int = defaultBufferSize): ObjectInputStream =
- new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize))
-
- /**
- * @param bufferSize If bufferSize is set to less than or equal to 0, we don't buffer
- * Code adapted from:
- * https://github.com/apache/commons-io/blob/master/src/main/java/org/apache/commons/io/input/ClassLoaderObjectInputStream.java
- *
- * @return A special ObjectInputStream that loads a class based on a specified ClassLoader rather than the default
- * This is useful in dynamic container environments.
- */
- def asObjectInputStreamUsingClassLoader(classLoader: ClassLoader = getClass.getClassLoader, bufferSize: Int = defaultBufferSize): ObjectInputStream =
- new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize)) {
- override protected def resolveClass(objectStreamClass: ObjectStreamClass): Class[_] =
- try {
- Class.forName(objectStreamClass.getName, false, classLoader)
- } catch {
- case _: ClassNotFoundException ⇒ super.resolveClass(objectStreamClass)
- }
-
- override protected def resolveProxyClass(interfaces: Array[String]): Class[_] = {
- try {
- java.lang.reflect.Proxy.getProxyClass(
- classLoader,
- interfaces.map(interface => Class.forName(interface, false, classLoader)) : _*
- )
- } catch {
- case _: ClassNotFoundException | _: IllegalArgumentException => super.resolveProxyClass(interfaces)
- }
- }
- }
-
- def reader(implicit charset: Charset = defaultCharset): InputStreamReader =
- new InputStreamReader(in, charset)
-
- def lines(implicit charset: Charset = defaultCharset): Iterator[String] =
- reader(charset).buffered.lines().toAutoClosedIterator
-
- def bytes: Iterator[Byte] =
- in.autoClosed.flatMap(res => eofReader(res.read()).map(_.toByte))
- }
-
- implicit class OutputStreamOps(val out: OutputStream) {
- def buffered: BufferedOutputStream =
- new BufferedOutputStream(out)
-
- def buffered(bufferSize: Int): BufferedOutputStream =
- new BufferedOutputStream(out, bufferSize)
-
- def gzipped: GZIPOutputStream =
- new GZIPOutputStream(out)
-
- def writer(implicit charset: Charset = defaultCharset): OutputStreamWriter =
- new OutputStreamWriter(out, charset)
-
- def printWriter(autoFlush: Boolean = false): PrintWriter =
- new PrintWriter(out, autoFlush)
-
- def write(bytes: Iterator[Byte], bufferSize: Int = defaultBufferSize): out.type = {
- bytes.grouped(bufferSize).foreach(buffer => out.write(buffer.toArray))
- out.flush()
- out
- }
-
- def tee(out2: OutputStream): OutputStream =
- new TeeOutputStream(out, out2)
-
- /**
- * If bufferSize is set to less than or equal to 0, we don't buffer
- * @param bufferSize
- * @return
- */
- def asObjectOutputStream(bufferSize: Int = defaultBufferSize): ObjectOutputStream =
- new ObjectOutputStream(if (bufferSize <= 0) out else buffered(bufferSize))
- }
-
- implicit class ReaderOps(reader: Reader) {
- def buffered: BufferedReader =
- new BufferedReader(reader)
-
- def toInputStream(implicit charset: Charset = defaultCharset): InputStream =
- new ReaderInputStream(reader)(charset)
- }
-
- implicit class BufferedReaderOps(reader: BufferedReader) {
- def chars: Iterator[Char] =
- reader.autoClosed.flatMap(res => eofReader(res.read()).map(_.toChar))
-
- def tokens(splitter: StringSplitter = StringSplitter.default): Iterator[String] =
- reader.lines().toAutoClosedIterator.flatMap(splitter.split)
- }
-
- implicit class WriterOps(writer: Writer) {
- def buffered: BufferedWriter =
- new BufferedWriter(writer)
-
- def outputstream(implicit charset: Charset = defaultCharset): OutputStream =
- new WriterOutputStream(writer)(charset)
- }
-
- implicit class FileChannelOps(fc: FileChannel) {
- def toMappedByteBuffer: MappedByteBuffer =
- fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size())
- }
-
- implicit class PathMatcherOps(matcher: PathMatcher) {
- def matches(file: File)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default) =
- file.collectChildren(child => matcher.matches(child.path))(visitOptions)
- }
-
- implicit class ObjectInputStreamOps(ois: ObjectInputStream) {
- def deserialize[A]: A =
- ois.readObject().asInstanceOf[A]
- }
-
- implicit class ObjectOutputStreamOps(val oos: ObjectOutputStream) {
- def serialize(obj: Serializable): oos.type = {
- oos.writeObject(obj)
- oos
- }
- }
-
- implicit class ZipOutputStreamOps(val out: ZipOutputStream) {
-
- /**
- * Correctly set the compression level
- * See: http://stackoverflow.com/questions/1206970/creating-zip-using-zip-utility
- *
- * @param level
- * @return
- */
- def withCompressionLevel(level: Int): out.type = {
- out.setLevel(level)
- if (level == Deflater.NO_COMPRESSION) out.setMethod(ZipOutputStream.DEFLATED)
- out
- }
-
- def add(file: File, name: String): out.type = {
- val relativeName = name.stripSuffix(file.fileSystem.getSeparator)
- val entryName = if (file.isDirectory) s"$relativeName/" else relativeName // make sure to end directories in ZipEntry with "/"
- out.putNextEntry(new ZipEntry(entryName))
- if (file.isRegularFile) file.inputStream.foreach(_.pipeTo(out))
- out.closeEntry()
- out
- }
-
- def +=(file: File): out.type =
- add(file, file.name)
- }
-
- implicit class ZipInputStreamOps(val in: ZipInputStream) {
- def mapEntries[A](f: ZipEntry => A): Iterator[A] = new Iterator[A] {
- private[this] var entry = in.getNextEntry
-
- override def hasNext = entry != null
-
- override def next() = {
- val result = Try(f(entry))
- Try(in.closeEntry())
- entry = in.getNextEntry
- result.get
- }
- }
- }
-
- implicit class ZipEntryOps(val entry: ZipEntry) {
- /**
- * Extract this ZipEntry under this rootDir
- *
- * @param rootDir directory under which this entry is extracted
- * @param inputStream use this inputStream when this entry is a file
- * @return the extracted file
- */
- def extractTo(rootDir: File, inputStream: => InputStream): File = {
- val child = rootDir.createChild(entry.getName, asDirectory = entry.isDirectory, createParents = true)
- if (!entry.isDirectory) child.outputStream.foreach(inputStream.pipeTo(_))
- child
- }
- }
-
- implicit class CloseableOps[A <: AutoCloseable](resource: A) {
- /**
- * Lightweight automatic resource management
- * Closes the resource when done e.g.
- * <pre>
- * for {
- * in <- file.newInputStream.autoClosed
- * } in.write(bytes)
- * // in is closed now
- * </pre>
- *
- * @return
- */
- def autoClosed: ManagedResource[A] =
- new ManagedResource(resource)(Disposable.closableDisposer)
- }
-
- implicit class JStreamOps[A](stream: JStream[A]) {
- /**
- * Closes this stream when iteration is complete
- * It will NOT close the stream if it is not depleted!
- *
- * @return
- */
- def toAutoClosedIterator: Iterator[A] =
- stream.autoClosed.flatMap(_.iterator().asScala)
- }
-
- private[files] implicit class OrderingOps[A](order: Ordering[A]) {
- def andThenBy(order2: Ordering[A]): Ordering[A] =
- Ordering.comparatorToOrdering(order.thenComparing(order2))
- }
-
- implicit def stringToMessageDigest(algorithmName: String): MessageDigest =
- MessageDigest.getInstance(algorithmName)
-
- implicit def stringToCharset(charsetName: String): Charset =
- Charset.forName(charsetName)
-
- implicit def tokenizerToIterator(s: StringTokenizer): Iterator[String] =
- Iterator.continually(s.nextToken()).withHasNext(s.hasMoreTokens)
-
- //implicit def posixPermissionToFileAttribute(perm: PosixFilePermission) =
- // PosixFilePermissions.asFileAttribute(Set(perm))
-
- private[files] implicit def pathStreamToFiles(files: JStream[Path]): Files =
- files.toAutoClosedIterator.map(File.apply)
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala
deleted file mode 100644
index dad5ecb8..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-package better.files
-
-import java.util.concurrent.atomic.AtomicBoolean
-
-import scala.util.Try
-import scala.util.control.NonFatal
-
-/**
- * A typeclass to denote a disposable resource
- * @tparam A
- */
-trait Disposable[-A] {
- def dispose(resource: A): Unit
-
- def disposeSilently(resource: A): Unit = {
- val _ = Try(dispose(resource))
- }
-}
-
-object Disposable {
- def apply[A](disposeMethod: A => Any): Disposable[A] = new Disposable[A] {
- override def dispose(resource: A) = {
- val _ = disposeMethod(resource)
- }
- }
-
- implicit val closableDisposer: Disposable[AutoCloseable] =
- Disposable(_.close())
-
- val fileDisposer: Disposable[File] =
- Disposable(_.delete(swallowIOExceptions = true))
-}
-
-class ManagedResource[A](resource: A)(implicit disposer: Disposable[A]) {
- private[this] val isDisposed = new AtomicBoolean(false)
- private[this] def disposeOnce() = if (!isDisposed.getAndSet(true)) disposer.dispose(resource)
-
- // This is the Scala equivalent of how javac compiles try-with-resources,
- // Except that fatal exceptions while disposing take precedence over exceptions thrown previously
- private[this] def disposeOnceAndThrow(e1: Throwable) = {
- try {
- disposeOnce()
- } catch {
- case NonFatal(e2) => e1.addSuppressed(e2)
- case e2: Throwable =>
- e2.addSuppressed(e1)
- throw e2
- }
- throw e1
- }
-
- def foreach[U](f: A => U): Unit = {
- val _ = map(f)
- }
-
- def map[B](f: A => B): B = {
- try {
- f(resource)
- } catch {
- case e1: Throwable => disposeOnceAndThrow(e1)
- } finally {
- disposeOnce()
- }
- }
-
- def withFilter(f: A => Boolean): this.type = {
- if (!f(resource)) disposeOnce()
- this
- }
-
- /**
- * This handles lazy operations (e.g. Iterators)
- * for which resource needs to be disposed only after iteration is done
- *
- * @param f
- * @tparam B
- * @return
- */
- def flatMap[B](f: A => Iterator[B]): Iterator[B] = {
- val it = f(resource)
- it withHasNext {
- try {
- val result = it.hasNext
- if (!result) disposeOnce()
- result
- } catch {
- case e1: Throwable => disposeOnceAndThrow(e1)
- }
- }
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala
deleted file mode 100644
index f9b792cc..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-package better.files
-
-import java.io.{InputStream, Reader}
-import java.nio.{ByteBuffer, CharBuffer}
-import java.nio.charset.{Charset, CharsetEncoder, CoderResult, CodingErrorAction}
-
-import scala.annotation.tailrec
-
-/**
- * Code ported from Java to Scala:
- * https://github.com/apache/commons-io/blob/c0eb48f7e83987c5ed112b82f0d651aff5149ae4/src/main/java/org/apache/commons/io/input/ReaderInputStream.java
- */
-class ReaderInputStream(reader: Reader, encoder: CharsetEncoder, bufferSize: Int) extends InputStream {
-
- def this(reader: Reader, bufferSize: Int = defaultBufferSize)(implicit charset: Charset = defaultCharset) =
- this(reader = reader, encoder = charset.newEncoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE), bufferSize = bufferSize)
-
- /**
- * CharBuffer used as input for the decoder. It should be reasonably
- * large as we read data from the underlying Reader into this buffer.
- */
- private[this] val encoderIn = CharBuffer.allocate(bufferSize).flip().asInstanceOf[CharBuffer]
-
- /**
- * ByteBuffer used as output for the decoder. This buffer can be small
- * as it is only used to transfer data from the decoder to the buffer provided by the caller.
- */
- private[this] val encoderOut = ByteBuffer.allocate(bufferSize>>4).flip().asInstanceOf[ByteBuffer]
-
- private[this] var lastCoderResult = CoderResult.UNDERFLOW
- private[this] var endOfInput = false
-
- private[this] def fillBuffer() = {
- assert(!endOfInput)
- if (lastCoderResult.isUnderflow) {
- val position = encoderIn.compact().position
- // We don't use Reader#read(CharBuffer) here because it is more efficient to write directly to the underlying char array
- // since the default implementation copies data to a temporary char array anyway
- reader.read(encoderIn.array, position, encoderIn.remaining) match {
- case EOF => endOfInput = true
- case c => encoderIn.position(position + c)
- }
- encoderIn.flip()
- }
- lastCoderResult = encoder.encode(encoderIn, encoderOut.compact(), endOfInput)
- encoderOut.flip()
- }
-
- override def read(b: Array[Byte], off: Int, len: Int) = {
- if (len < 0 || off < 0 || (off + len) > b.length) throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len)
- if (len == 0) {
- 0 // Always return 0 if len == 0
- } else {
- var read = 0
- @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) {
- if (encoderOut.hasRemaining) {
- val c = encoderOut.remaining min len
- encoderOut.get(b, off, c)
- read += c
- loop(off + c, len - c)
- } else if (!endOfInput) {
- fillBuffer()
- loop(off, len)
- }
- }
- loop(off, len)
- if (read == 0 && endOfInput) EOF else read
- }
- }
-
- @tailrec final override def read() = {
- if (encoderOut.hasRemaining) {
- encoderOut.get & 0xFF
- } else if (endOfInput) {
- EOF
- } else {
- fillBuffer()
- read()
- }
- }
-
- override def close() = reader.close()
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala
deleted file mode 100644
index be6ebb3f..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala
+++ /dev/null
@@ -1,183 +0,0 @@
-package better.files
-
-import java.io.{InputStream, LineNumberReader, Reader, StringReader}
-import java.nio.charset.Charset
-import java.time.format.DateTimeFormatter
-import java.util.StringTokenizer
-
-trait Scanner extends Iterator[String] with AutoCloseable {
- def lineNumber(): Int
-
- def next[A](implicit scan: Scannable[A]): A = scan(this)
-
- def nextLine(): String
-
- def lines: Iterator[String] = Iterator.continually(nextLine()).withHasNext(hasNext)
-}
-
-/**
- * Faster, safer and more idiomatic Scala replacement for java.util.Scanner
- * See: http://codeforces.com/blog/entry/7018
- */
-object Scanner {
-
- def apply(str: String): Scanner =
- Scanner(str, StringSplitter.default)
-
- def apply(str: String, splitter: StringSplitter): Scanner =
- Scanner(new StringReader(str), splitter)
-
- def apply(reader: Reader): Scanner =
- Scanner(reader, StringSplitter.default)
-
- def apply(reader: Reader, splitter: StringSplitter): Scanner =
- Scanner(new LineNumberReader(reader.buffered), splitter)
-
- def apply(inputStream: InputStream)(implicit charset: Charset = defaultCharset): Scanner =
- Scanner(inputStream, StringSplitter.default)(charset)
-
- def apply(inputStream: InputStream, splitter: StringSplitter)(implicit charset: Charset): Scanner =
- Scanner(inputStream.reader(charset), splitter)
-
- def apply(reader: LineNumberReader, splitter: StringSplitter): Scanner = new Scanner {
- private[this] val tokens = reader.tokens(splitter)
- override def lineNumber() = reader.getLineNumber
- override def nextLine() = reader.readLine()
- override def next() = tokens.next()
- override def hasNext = tokens.hasNext
- override def close() = reader.close()
- }
-
- val stdin: Scanner = Scanner(System.in)
-
- trait Read[A] { // TODO: Move to own subproject when this is fixed https://github.com/typelevel/cats/issues/932
- def apply(s: String): A
- }
-
- object Read {
- def apply[A](f: String => A): Read[A] = new Read[A] {
- override def apply(s: String) = f(s)
- }
- implicit val string : Read[String] = Read(identity)
- implicit val boolean : Read[Boolean] = Read(_.toBoolean)
- implicit val byte : Read[Byte] = Read(_.toByte) //TODO: https://issues.scala-lang.org/browse/SI-9706
- implicit val short : Read[Short] = Read(_.toShort)
- implicit val int : Read[Int] = Read(_.toInt)
- implicit val long : Read[Long] = Read(_.toLong)
- implicit val bigInt : Read[BigInt] = Read(BigInt(_))
- implicit val float : Read[Float] = Read(_.toFloat)
- implicit val double : Read[Double] = Read(_.toDouble)
- implicit val bigDecimal : Read[BigDecimal] = Read(BigDecimal(_))
- implicit def option[A: Read] : Read[Option[A]] = Read(s => when(s.nonEmpty)(implicitly[Read[A]].apply(s)))
-
- // Java's time readers
- import java.time._
- import java.sql.{Date => SqlDate, Time => SqlTime, Timestamp => SqlTimestamp}
-
- implicit val duration : Read[Duration] = Read(Duration.parse(_))
- implicit val instant : Read[Instant] = Read(Instant.parse(_))
- implicit val localDateTime : Read[LocalDateTime] = Read(LocalDateTime.parse(_))
- implicit val localDate : Read[LocalDate] = Read(LocalDate.parse(_))
- implicit val monthDay : Read[MonthDay] = Read(MonthDay.parse(_))
- implicit val offsetDateTime : Read[OffsetDateTime] = Read(OffsetDateTime.parse(_))
- implicit val offsetTime : Read[OffsetTime] = Read(OffsetTime.parse(_))
- implicit val period : Read[Period] = Read(Period.parse(_))
- implicit val year : Read[Year] = Read(Year.parse(_))
- implicit val yearMonth : Read[YearMonth] = Read(YearMonth.parse(_))
- implicit val zonedDateTime : Read[ZonedDateTime] = Read(ZonedDateTime.parse(_))
- implicit val sqlDate : Read[SqlDate] = Read(SqlDate.valueOf)
- implicit val sqlTime : Read[SqlTime] = Read(SqlTime.valueOf)
- implicit val sqlTimestamp : Read[SqlTimestamp] = Read(SqlTimestamp.valueOf)
-
- /**
- * Use this to create custom readers e.g. to read a LocalDate using some custom format
- * val readLocalDate: Read[LocalDate] = Read.temporalQuery(format = myFormat, query = LocalDate.from)
- * @param format
- * @param query
- * @tparam A
- * @return
- */
- def temporalQuery[A](format: DateTimeFormatter, query: temporal.TemporalQuery[A]): Read[A] =
- Read(format.parse(_, query))
- }
-}
-
-/**
- * Implement this trait to make thing parsable
- * In most cases, use Scanner.Read typeclass when you simply need access to one String token
- * Use Scannable typeclass if you need access to the full scanner e.g. to detect encodings etc.
- */
-trait Scannable[A] {
- def apply(scanner: Scanner): A
-}
-
-object Scannable {
- def apply[A](f: Scanner => A): Scannable[A] = new Scannable[A] {
- override def apply(scanner: Scanner) = f(scanner)
- }
-
- implicit def fromRead[A](implicit read: Scanner.Read[A]): Scannable[A] =
- Scannable(s => read(s.next()))
-
- implicit def tuple2[T1, T2](implicit t1: Scannable[T1], t2: Scannable[T2]): Scannable[(T1, T2)] =
- Scannable(s => t1(s) -> t2(s))
-
- implicit def iterator[A](implicit scanner: Scannable[A]): Scannable[Iterator[A]] =
- Scannable(s => Iterator.continually(scanner(s)).withHasNext(s.hasNext))
-}
-
-trait StringSplitter {
- def split(s: String): TraversableOnce[String]
-}
-object StringSplitter {
- val default = StringSplitter.anyOf(" \t\t\n\r")
-
- /**
- * Split string on this character
- * This will return exactly 1 + n number of items where n is the number of occurence of delimiter in String s
- *
- * @param delimiter
- * @return
- */
- def on(delimiter: Char): StringSplitter = new StringSplitter {
- override def split(s: String) = new Iterator[String] {
- private[this] var i = 0
- private[this] var j = -1
- private[this] val c = delimiter.toInt
- _next()
-
- private[this] def _next() = {
- i = j + 1
- val k = s.indexOf(c, i)
- j = if (k < 0) s.length else k
- }
-
- override def hasNext = i <= s.length
-
- override def next() = {
- val res = s.substring(i, j)
- _next()
- res
- }
- }
- }
-
- /**
- * Split this string using ANY of the characters from delimiters
- *
- * @param delimiters
- * @param includeDelimiters
- * @return
- */
- def anyOf(delimiters: String, includeDelimiters: Boolean = false): StringSplitter =
- s => new StringTokenizer(s, delimiters, includeDelimiters)
-
- /**
- * Split string using a regex pattern
- *
- * @param pattern
- * @return
- */
- def regex(pattern: String): StringSplitter =
- s => s.split(pattern, -1)
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala
deleted file mode 100644
index 1da25b09..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package better.files
-
-import java.io.OutputStream
-
-/**
- * Write to multiple outputstreams at once
- * If error happens on any one while doing an operation, only the last error is reported
- * @param outs
- */
-class TeeOutputStream(outs: OutputStream*) extends OutputStream {
- override def write(b: Int) = tryAll(outs)(_.write(b))
- override def flush() = tryAll(outs)(_.flush())
- override def write(b: Array[Byte]) = tryAll(outs)(_.write(b))
- override def write(b: Array[Byte], off: Int, len: Int) = tryAll(outs)(_.write(b, off, len))
- override def close() = tryAll(outs)(_.close())
-}
-
-/**
- * A sink outputstream similar to /dev/null - just consumes everything
- */
-object NullOutputStream extends OutputStream {
- override def write(b: Int) = {}
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala
deleted file mode 100644
index be81f628..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-package better.files
-
-import java.nio.charset._
-import java.nio.{BufferOverflowException, ByteBuffer, CharBuffer}
-
-import scala.collection.JavaConverters._
-
-/**
- * A Unicode charset that handles byte-order markers
- *
- * @param underlyingCharset Use this charset if no known byte-order marker is detected; use this for encoding too
- * @param writeByteOrderMarkers If set, write BOMs while encoding
- */
-class UnicodeCharset(underlyingCharset: Charset, writeByteOrderMarkers: Boolean)
- extends Charset(underlyingCharset.name(), underlyingCharset.aliases().asScala.toArray) {
- override def newDecoder() = new UnicodeDecoder(underlyingCharset)
- override def newEncoder() = if (writeByteOrderMarkers) new BomEncoder(underlyingCharset) else underlyingCharset.newEncoder()
- override def contains(cs: Charset) = underlyingCharset.contains(cs)
-}
-
-/**
- * A Unicode decoder that uses the Unicode byte-order marker (BOM) to auto-detect the encoding
- * (if none detected, falls back on the defaultCharset). This also gets around a bug in the JDK
- * (http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4508058) where BOM is not consumed for UTF-8.
- * See: https://github.com/pathikrit/better-files/issues/107
- *
- * @param defaultCharset Use this charset if no known byte-order marker is detected
- */
-class UnicodeDecoder(defaultCharset: Charset) extends CharsetDecoder(defaultCharset, 1, 1) {
- import UnicodeCharset.bomTable
-
- private[this] var inferredCharset: Option[Charset] = None
-
- @annotation.tailrec
- private[this] def decode(in: ByteBuffer, out: CharBuffer, candidates: Set[Charset] = Set.empty): CoderResult = {
- if (isCharsetDetected) {
- detectedCharset().newDecoder().decode(in, out, true)
- } else if (candidates.isEmpty || !in.hasRemaining) {
- inferredCharset = Some(defaultCharset)
- in.rewind()
- decode(in, out)
- } else if (candidates.forall(c => bomTable(c).length == in.position())) {
- inferredCharset = candidates.headOption.ensuring(candidates.size == 1, "Ambiguous BOMs found")
- decode(in, out)
- } else {
- val idx = in.position()
- val byte = in.get()
- def isPossible(charset: Charset) = bomTable(charset).lift(idx).contains(byte)
- decode(in, out, candidates.filter(isPossible))
- }
- }
-
- override def decodeLoop(in: ByteBuffer, out: CharBuffer) = decode(in = in, out = out, candidates = bomTable.keySet)
-
- override def isCharsetDetected = inferredCharset.isDefined
-
- override def isAutoDetecting = true
-
- override def implReset() = inferredCharset = None
-
- override def detectedCharset() = inferredCharset.getOrElse(throw new IllegalStateException("Insufficient bytes read to determine charset"))
-}
-
-/**
- * Encoder that writes the BOM for this charset
- * @param charset
- */
-class BomEncoder(charset: Charset) extends CharsetEncoder(charset, 1, 1) {
- private[this] val bom = UnicodeCharset.bomTable.getOrElse(charset, throw new IllegalArgumentException(s"$charset does not support BOMs")).toArray
- private[this] var isBomWritten = false
-
- override def encodeLoop(in: CharBuffer, out: ByteBuffer): CoderResult = {
- if (!isBomWritten) {
- try {
- out.put(bom)
- } catch {
- case _: BufferOverflowException => return CoderResult.OVERFLOW
- } finally {
- isBomWritten = true
- }
- }
- charset.newEncoder().encode(in, out, true)
- }
-
- override def implReset() = isBomWritten = false
-}
-
-object UnicodeCharset {
- private[files] val bomTable: Map[Charset, IndexedSeq[Byte]] = Map(
- "UTF-8" -> IndexedSeq(0xEF, 0xBB, 0xBF),
- "UTF-16BE" -> IndexedSeq(0xFE, 0xFF),
- "UTF-16LE" -> IndexedSeq(0xFF, 0xFE),
- "UTF-32BE" -> IndexedSeq(0x00, 0x00, 0xFE, 0xFF),
- "UTF-32LE" -> IndexedSeq(0xFF, 0xFE, 0x00, 0x00)
- ).collect{case (charset, bytes) if Charset.isSupported(charset) => Charset.forName(charset) -> bytes.map(_.toByte)}
- .ensuring(_.nonEmpty, "No unicode charset detected")
-
- def apply(charset: Charset, writeByteOrderMarkers: Boolean = false): Charset =
- if (bomTable.contains(charset)) new UnicodeCharset(charset, writeByteOrderMarkers) else charset
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala
deleted file mode 100644
index 80cd5fc8..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package better.files
-
-import java.io.{OutputStream, Writer}
-import java.nio.charset.{Charset, CharsetDecoder, CodingErrorAction}
-import java.nio.{ByteBuffer, CharBuffer}
-
-import scala.annotation.tailrec
-
-/**
- * Code ported from Java to Scala:
- * https://github.com/apache/commons-io/blob/d357d9d563c4a34fa2ab3cdc68221c851a9de4f5/src/main/java/org/apache/commons/io/output/WriterOutputStream.java
- */
-class WriterOutputStream(writer: Writer, decoder: CharsetDecoder, bufferSize: Int, flushImmediately: Boolean) extends OutputStream {
-
- /**
- * CharBuffer used as output for the decoder
- */
- private[this] val decoderOut = CharBuffer.allocate(bufferSize)
-
- /**
- * ByteBuffer used as output for the decoder. This buffer can be small
- * as it is only used to transfer data from the decoder to the buffer provided by the caller.
- */
- private[this] val decoderIn = ByteBuffer.allocate(bufferSize>>4)
-
- def this(writer: Writer, bufferSize: Int = defaultBufferSize, flushImmediately: Boolean = false)(implicit charset: Charset = defaultCharset) =
- this(writer = writer, decoder = charset.newDecoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE).replaceWith("?"), bufferSize = bufferSize, flushImmediately = flushImmediately)
-
- override def write(b: Array[Byte], off: Int, len: Int) = {
- @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) {
- val c = decoderIn.remaining min len
- decoderIn.put(b, off, c)
- processInput(endOfInput = false)
- loop(off + c, len - c)
- }
- loop(off, len)
- if (flushImmediately) flushOutput()
- }
-
- override def write(b: Int) = write(Array(b.toByte))
-
- override def flush() = {
- flushOutput()
- writer.flush()
- }
-
- override def close() = {
- processInput(endOfInput = true)
- flushOutput()
- writer.close()
- }
-
- private[this] def processInput(endOfInput: Boolean) = {
- decoderIn.flip()
- @tailrec def loop(): Unit = {
- val coderResult = decoder.decode(decoderIn, decoderOut, endOfInput)
- if (coderResult.isOverflow) {
- flushOutput()
- loop()
- } else {
- assert(coderResult.isUnderflow, "decoder is configured to replace malformed input and unmappable characters")
- }
- }
- loop()
- decoderIn.compact()
- }
-
- private[this] def flushOutput(): Unit = {
- if (decoderOut.position > 0) {
- writer.write(decoderOut.array, 0, decoderOut.position)
- val _ = decoderOut.rewind()
- }
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala
deleted file mode 100644
index bef8c1ed..00000000
--- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package better
-
-import java.io.{InputStream, StreamTokenizer}
-import java.nio.charset.Charset
-
-import scala.collection.mutable
-import scala.util.{Failure, Success, Try}
-
-package object files extends Implicits {
-
- /**
- * Default array buffer size
- * Seems like a good value used by JDK: (see: java.io.BufferedInputStream.DEFAULT_BUFFER_SIZE)
- */
- val defaultBufferSize = 8192
-
- /**
- * The default charset used by better-files
- * Note: It uses java.net.charset.Charset.defaultCharset() in general but if the default supports byte-order markers,
- * it uses a more compliant version than the JDK one (see: https://github.com/pathikrit/better-files/issues/107)
- */
- val defaultCharset: Charset =
- UnicodeCharset(Charset.defaultCharset())
-
- val EOF = StreamTokenizer.TT_EOF
-
- type Files = Iterator[File]
-
- /**
- * If bufferSize is set to less than or equal to 0, we don't buffer
- * @param bufferSize
- * @return
- */
- def resourceAsStream(name: String, bufferSize: Int = defaultBufferSize): InputStream =
- currentClassLoader().getResourceAsStream(name).buffered(bufferSize)
-
- // Some utils:
- private[files] def newMultiMap[A, B]: mutable.MultiMap[A, B] = new mutable.HashMap[A, mutable.Set[B]] with mutable.MultiMap[A, B]
-
- @inline private[files] def when[A](condition: Boolean)(f: => A): Option[A] = if (condition) Some(f) else None
-
- @inline private[files] def repeat[U](n: Int)(f: => U): Unit = (1 to n).foreach(_ => f)
-
- private[files] def currentClassLoader() = Thread.currentThread().getContextClassLoader
-
- private[files] def eofReader(read: => Int): Iterator[Int] = Iterator.continually(read).takeWhile(_ != EOF)
-
- /**
- * Utility to apply f on all xs skipping over errors
- * Throws the last error that happened
- * *
- * @param xs
- * @param f
- * @tparam A
- */
- private[files] def tryAll[A](xs: Seq[A])(f: A => Unit): Unit = {
- val res = xs.foldLeft(Option.empty[Throwable]) {
- case (currError, a) =>
- Try(f(a)) match {
- case Success(_) => currError
- case Failure(e) => Some(e)
- }
- }
- res.foreach(throwable => throw throwable)
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala
deleted file mode 100644
index 769cfbf9..00000000
--- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package better.files
-
-import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
-
-import scala.concurrent.duration._
-import scala.language.postfixOps
-import scala.util.Properties.{isLinux, isMac}
-
-trait CommonSpec extends FlatSpec with BeforeAndAfterEach with Matchers {
- val isCI = sys.env.get("CI").exists(_.toBoolean)
-
- val isUnixOS = isLinux || isMac
-
- def sleep(t: FiniteDuration = 2 second) = Thread.sleep(t.toMillis)
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala
deleted file mode 100644
index 36379eec..00000000
--- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package better.files
-
-import scala.concurrent.duration._
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.language.postfixOps
-
-class FileMonitorSpec extends CommonSpec {
- "file watcher" should "watch single files" in {
- assume(isCI)
- val file = File.newTemporaryFile(suffix = ".txt").writeText("Hello world")
-
- var log = List.empty[String]
- def output(msg: String) = synchronized {
- println(msg)
- log = msg :: log
- }
- /***************************************************************************/
- val watcher = new FileMonitor(file) {
- override def onCreate(file: File, count: Int) = output(s"$file got created $count time(s)")
- override def onModify(file: File, count: Int) = output(s"$file got modified $count time(s)")
- override def onDelete(file: File, count: Int) = output(s"$file got deleted $count time(s)")
- }
- watcher.start()
- /***************************************************************************/
- sleep(5 seconds)
- file.writeText("hello world"); sleep()
- file.clear(); sleep()
- file.writeText("howdy"); sleep()
- file.delete(); sleep()
- sleep(5 seconds)
- val sibling = (file.parent / "t1.txt").createIfNotExists(); sleep()
- sibling.writeText("hello world"); sleep()
- sleep(20 seconds)
-
- log.size should be >= 2
- log.exists(_ contains sibling.name) shouldBe false
- log.forall(_ contains file.name) shouldBe true
- }
-
- ignore should "watch directories to configurable depth" in {
- assume(isCI)
- val dir = File.newTemporaryDirectory()
- (dir/"a"/"b"/"c"/"d"/"e").createDirectories()
- var log = List.empty[String]
- def output(msg: String) = synchronized(log = msg :: log)
-
- val watcher = new FileMonitor(dir, maxDepth = 2) {
- override def onCreate(file: File, count: Int) = output(s"Create happened on ${file.name} $count times")
- }
- watcher.start()
-
- sleep(5 seconds)
- (dir/"a"/"b"/"t1").touch().writeText("hello world"); sleep()
- (dir/"a"/"b"/"c"/"d"/"t1").touch().writeText("hello world"); sleep()
- sleep(10 seconds)
-
- withClue(log) {
- log.size shouldEqual 1
- }
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala
deleted file mode 100644
index f197575a..00000000
--- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala
+++ /dev/null
@@ -1,549 +0,0 @@
-package better.files
-
-import java.nio.file.{FileAlreadyExistsException, FileSystems, Files => JFiles}
-
-import better.files.Dsl._
-import better.files.File.{home, root}
-
-import scala.language.postfixOps
-import scala.util.Try
-
-class FileSpec extends CommonSpec {
-
- /** try to cope with windows, which will return e.g. c:\ as root */
- val rootStr = FileSystems.getDefault.getRootDirectories.iterator().next().toString
- import java.io.File.{separator, separatorChar}
-
- /**
- * Helper for unix -> windows path references (as strings).
- *
- * @param path as unix path
- * @return path in native format
- */
- def unixToNative(path: String): String = {
- if (isUnixOS) {
- path
- } else {
- path
- .replaceFirst("^/", rootStr.replaceAllLiterally("\\", "\\\\")) // we must escape '\' in C:\
- .replaceAllLiterally("/", separator)
- }
- }
-
- var testRoot: File = _ //TODO: Get rid of mutable test vars
- var fa: File = _
- var a1: File = _
- var a2: File = _
- var t1: File = _
- var t2: File = _
- var t3: File = _
- var fb: File = _
- var b1: File = _
- var b2: File = _
-
- /**
- * Setup the following directory structure under root
- * /a
- * /a1
- * /a2
- * a21.txt
- * a22.txt
- * /b
- * b1/ --> ../a1
- * b2.txt --> ../a2/a22.txt
- */
-
- override def beforeEach() = {
- testRoot = File.newTemporaryDirectory("better-files")
- fa = testRoot/"a"
- a1 = testRoot/"a"/"a1"
- a2 = testRoot/"a"/"a2"
- t1 = testRoot/"a"/"a1"/"t1.txt"
- t2 = testRoot/"a"/"a1"/"t2.txt"
- t3 = testRoot/"a"/"a1"/"t3.scala.txt"
- fb = testRoot/"b"
- b1 = testRoot/"b"/"b1"
- b2 = testRoot/'b/"b2.txt"
- Seq(a1, a2, fb) foreach mkdirs
- Seq(t1, t2) foreach touch
- }
-
- override def afterEach() = {
- val _ = rm(testRoot)
- }
-
- override def withFixture(test: NoArgTest) = {
- //val before = File.numberOfOpenFileDescriptors()
- val result = super.withFixture(test)
- //val after = File.numberOfOpenFileDescriptors()
- //assert(before == after, s"Resource leakage detected in $test")
- result
- }
-
- "files" can "be instantiated" in {
- import java.io.{File => JFile}
-
- val f = File("/User/johndoe/Documents") // using constructor
- val f1: File = file"/User/johndoe/Documents" // using string interpolator
- val f2: File = "/User/johndoe/Documents".toFile // convert a string path to a file
- val f3: File = new JFile("/User/johndoe/Documents").toScala // convert a Java file to Scala
- val f4: File = root/"User"/"johndoe"/"Documents" // using root helper to start from root
- //val f5: File = `~` / "Documents" // also equivalent to `home / "Documents"`
- val f6: File = "/User"/"johndoe"/"Documents" // using file separator DSL
- val f7: File = home/"Documents"/"presentations"/`..` // Use `..` to navigate up to parent
- val f8: File = root/"User"/"johndoe"/"Documents"/ `.`
- val f9: File = File(f.uri)
- val f10: File = File("../a") // using a relative path
- Seq(f, f1, f2, f3, f4,/* f5,*/ f6, f7, f8, f9, f10) foreach {f =>
- f.pathAsString should not include ".."
- }
-
- root.toString shouldEqual rootStr
- home.toString.count(_ == separatorChar) should be > 1
- (root/"usr"/"johndoe"/"docs").toString shouldEqual unixToNative("/usr/johndoe/docs")
- Seq(f, f1, f2, f4, /*f5,*/ f6, f8, f9).map(_.toString).toSet shouldBe Set(f.toString)
- }
-
- it can "be instantiated with anchor" in {
- // testRoot / a / a1 / t1.txt
- val basedir = a1
- File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc"))
- File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc"))
-
- File(basedir, "rel/path/to/loc").toString should be (unixToNative(basedir.toString + "/rel/path/to/loc"))
- File(basedir, "../rel/path/to/loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
- File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
-
- val baseref = t1
- File(baseref, "/abs/path/to/loc").toString should be (unixToNative("/abs/path/to/loc"))
- File(baseref, "/abs", "path", "to", "loc").toString should be (unixToNative("/abs/path/to/loc"))
-
- File(baseref, "rel/path/to/loc").toString should be (unixToNative(a1.toString + "/rel/path/to/loc"))
- File(baseref, "../rel/path/to/loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
- File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
- }
-
- it can "be instantiated with non-existing abs anchor" in {
- val anchorStr = "/abs/to/nowhere"
- val anchorStr_a = anchorStr + "/a"
- val basedir = File(anchorStr_a + "/last")
-
- File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc"))
- File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc"))
-
- File(basedir, "rel/path/to/loc").toString should be (unixToNative(anchorStr_a + "/rel/path/to/loc"))
- File(basedir, "../rel/path/to/loc").toString should be (unixToNative(anchorStr + "/rel/path/to/loc"))
- File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(anchorStr + "/rel/path/to/loc"))
- }
-
- it can "be instantiated with non-existing relative anchor" in {
- val relAnchor = File("rel/anc/b/last")
- val basedir = relAnchor
-
- File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc"))
- File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc"))
-
- File(basedir, "rel/path/to/loc").toString should be (unixToNative(File("rel/anc/b").toString + "/rel/path/to/loc"))
- File(basedir, "../rel/path/to/loc").toString should be (unixToNative(File("rel/anc").toString + "/rel/path/to/loc"))
- File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(File("rel/anc").toString + "/rel/path/to/loc"))
- }
-
- it should "do basic I/O" in {
- t1 < "hello"
- t1.contentAsString shouldEqual "hello"
- t1.appendLine() << "world"
- (t1!) shouldEqual String.format("hello%nworld%n")
- t1.chars.toStream should contain theSameElementsInOrderAs String.format("hello%nworld%n").toSeq
- "foo" `>:` t1
- "bar" >>: t1
- t1.contentAsString shouldEqual String.format("foobar%n")
- t1.appendLines("hello", "world")
- t1.contentAsString shouldEqual String.format("foobar%nhello%nworld%n")
- t2.writeText("hello").appendText("world").contentAsString shouldEqual "helloworld"
-
- (testRoot/"diary")
- .createIfNotExists()
- .appendLine()
- .appendLines("My name is", "Inigo Montoya")
- .printLines(Iterator("x", 1))
- .lines.toSeq should contain theSameElementsInOrderAs Seq("", "My name is", "Inigo Montoya", "x", "1")
- }
-
- it should "handle BOM" in {
- val lines = Seq("Line 1", "Line 2")
- val expectedContent = lines.mkString(start = "", sep = "\n", end = "\n")
- File.temporaryFile() foreach {file =>
- file.appendLines(lines: _*)(charset = UnicodeCharset("UTF-8", writeByteOrderMarkers = true))
- file.contentAsString(charset = "UTF-8") should not equal expectedContent
- file.contentAsString shouldEqual expectedContent
- }
- }
-
-// TODO: Do not depend on self-referential tests
-// it should "glob" in {
-// assume(isCI)
-// a1.glob("*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt")
-// //a1.glob("*.txt").map(_.name).toSeq shouldEqual Seq("t1.txt", "t2.txt")
-// testRoot.glob("**/*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt")
-// val path = testRoot.path.toString.ensuring(testRoot.path.isAbsolute)
-// File(path).glob("**/*.{txt}").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt")
-// ("benchmarks"/"src").glob("**/*.{scala,java}").map(_.name).toSeq.sorted shouldEqual Seq("ArrayBufferScanner.java", "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
-// ("benchmarks"/"src").glob("**/*.{scala}").map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
-// ("benchmarks"/"src").glob("**/*.scala").map(_.name).toSeq.sorted shouldEqual Seq("Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
-// ("benchmarks"/"src").listRecursively.filter(_.extension.contains(".scala")).map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
-// ls("core"/"src"/"test") should have length 1
-// ("core"/"src"/"test").walk(maxDepth = 1) should have length 2
-// ("core"/"src"/"test").walk(maxDepth = 0) should have length 1
-// ("core"/"src"/"test").walk() should have length (("core"/"src"/"test").listRecursively.length + 1L)
-// ls_r("core"/"src"/"test") should have length 8
-// }
-
- it should "support names/extensions" in {
- assume(isCI)
- fa.extension shouldBe None
- fa.nameWithoutExtension shouldBe fa.name
- t1.extension shouldBe Some(".txt")
- t1.extension(includeDot = false) shouldBe Some("txt")
- t3.extension shouldBe Some(".txt")
- t3.extension(includeAll = true) shouldBe Some(".scala.txt")
- t3.extension(includeDot = false, includeAll = true) shouldBe Some("scala.txt")
- t1.name shouldBe "t1.txt"
- t1.nameWithoutExtension shouldBe "t1"
- t1.changeExtensionTo(".md").name shouldBe "t1.md"
- (t1 < "hello world").changeExtensionTo(".txt").name shouldBe "t1.txt"
- t1.contentType shouldBe Some("text/plain")
- ("src" / "test").toString should include ("better-files")
- (t1 == t1.toString) shouldBe false
- (t1.contentAsString == t1.toString) shouldBe false
- (t1 == t1.contentAsString) shouldBe false
- t1.root shouldEqual fa.root
- file"/tmp/foo.scala.html".extension shouldBe Some(".html")
- file"/tmp/foo.scala.html".nameWithoutExtension shouldBe "foo"
- file"/tmp/foo.scala.html".nameWithoutExtension(includeAll = false) shouldBe "foo.scala"
- root.name shouldBe ""
- }
-
- it should "hide/unhide" in {
- t1.isHidden shouldBe false
- }
-
- it should "support parent/child" in {
- fa isChildOf testRoot shouldBe true
- testRoot isChildOf root shouldBe true
- root isChildOf root shouldBe true
- fa isChildOf fa shouldBe true
- b2 isChildOf b2 shouldBe false
- b2 isChildOf b2.parent shouldBe true
- root.parent shouldBe null
- }
-
- it should "support siblings" in {
- (file"/tmp/foo.txt" sibling "bar.txt").pathAsString shouldBe unixToNative("/tmp/bar.txt")
- fa.siblings.toList.map(_.name) shouldBe List("b")
- fb isSiblingOf fa shouldBe true
- }
-
- it should "support sorting" in {
- testRoot.list.toSeq.sorted(File.Order.byName) should not be empty
- testRoot.list.toSeq.max(File.Order.bySize).isEmpty shouldBe false
- Seq(fa, fb).contains(testRoot.list.toSeq.min(File.Order.byDepth)) shouldBe true
- sleep()
- t2.appendLine("modified!")
- a1.list.toSeq.min(File.Order.byModificationTime) shouldBe t1
- testRoot.list.toSeq.sorted(File.Order.byDirectoriesFirst) should not be empty
- }
-
- it must "have .size" in {
- fb.isEmpty shouldBe true
- t1.size shouldBe 0
- t1.writeText("Hello World")
- t1.size should be > 0L
- testRoot.size should be > (t1.size + t2.size)
- }
-
- it should "set/unset permissions" in {
- assume(isCI)
- import java.nio.file.attribute.PosixFilePermission
- //an[UnsupportedOperationException] should be thrownBy t1.dosAttributes
- t1.permissions()(PosixFilePermission.OWNER_EXECUTE) shouldBe false
-
- chmod_+(PosixFilePermission.OWNER_EXECUTE, t1)
- t1.testPermission(PosixFilePermission.OWNER_EXECUTE) shouldBe true
- t1.permissionsAsString shouldBe "rwxrw-r--"
-
- chmod_-(PosixFilePermission.OWNER_EXECUTE, t1)
- t1.isOwnerExecutable shouldBe false
- t1.permissionsAsString shouldBe "rw-rw-r--"
- }
-
- it should "support equality" in {
- fa shouldEqual (testRoot/"a")
- fa shouldNot equal (testRoot/"b")
- val c1 = fa.md5
- fa.md5 shouldEqual c1
- t1 < "hello"
- t2 < "hello"
- (t1 == t2) shouldBe false
- (t1 === t2) shouldBe true
- t2 < "hello world"
- (t1 == t2) shouldBe false
- (t1 === t2) shouldBe false
- fa.md5 should not equal c1
- }
-
- it should "create if not exist directory structures" in {
- File.usingTemporaryDirectory() {dir =>
- val file = dir / "a" / "b" / "c.txt"
- assert(file.notExists)
- assert(file.parent.notExists)
- file.createIfNotExists(createParents = true)
- assert(file.exists)
- assert(file.parent.exists)
- file.writeText("Hello world")
- assert(file.contentAsString === "Hello world")
- }
- }
-
- it should "treat symlinks transparently in convenience methods" in {
- File.usingTemporaryDirectory() {dir =>
- val realDir = dir / "a"
- val dirSymlink = dir / "b"
- realDir.createDirectory()
- JFiles.createSymbolicLink(dirSymlink.path, realDir.path)
- dirSymlink.createDirectories()
- a[FileAlreadyExistsException] should be thrownBy dirSymlink.createDirectories()(linkOptions = File.LinkOptions.noFollow)
- /*a[FileAlreadyExistsException] shouldNot be thrownBy*/ dirSymlink.createDirectories()
- }
- }
-
- it should "support chown/chgrp" in {
- fa.ownerName should not be empty
- fa.groupName should not be empty
- a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chown("hitler", fa)
- //a[java.nio.file.FileSystemException] should be thrownBy chown("root", fa)
- a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chgrp("cool", fa)
- //a[java.nio.file.FileSystemException] should be thrownBy chown("admin", fa)
- //fa.chown("nobody").chgrp("nobody")
- stat(t1) shouldBe a[java.nio.file.attribute.PosixFileAttributes]
- }
-
- it should "detect file locks" in {
- File.temporaryFile() foreach {file =>
- def lockInfo() = file.isReadLocked() -> file.isWriteLocked()
- // TODO: Why is file.isReadLocked() should be false?
- lockInfo() shouldBe (true -> false)
- val channel = file.newRandomAccess(File.RandomAccessMode.readWrite).getChannel
- val lock = channel.tryLock()
- lockInfo() shouldBe (true -> true)
- lock.release()
- channel.close()
- lockInfo() shouldBe (true -> false)
- }
- }
-
- it should "support ln/cp/mv" in {
- val magicWord = "Hello World"
- t1 writeText magicWord
- // link
- // to relative target
- val b0 = b1.sibling("b0")
- java.nio.file.Files.createSymbolicLink(b0.path, java.nio.file.Paths.get("b1"))
- b0.symbolicLink should not be empty
- b0.symbolicLink.get.path.isAbsolute shouldBe false
- // to absolute target
- b1.linkTo(a1, symbolic = true)
- ln_s(b2, t2)
- (b1 / "t1.txt").contentAsString shouldEqual magicWord
- // copy
- b2.contentAsString shouldBe empty
- t1.md5 should not equal t2.md5
- a[java.nio.file.FileAlreadyExistsException] should be thrownBy (t1 copyTo t2)
- t1.copyTo(t2, overwrite = true)
- t1.exists shouldBe true
- t1.md5 shouldEqual t2.md5
- b2.contentAsString shouldEqual magicWord
- // rename
- t2.name shouldBe "t2.txt"
- t2.exists shouldBe true
- val t3 = t2 renameTo "t3.txt"
- t3.name shouldBe "t3.txt"
- t2.exists shouldBe false
- t3.exists shouldBe true
- // move
- t3 moveTo t2
- t2.exists shouldBe true
- t3.exists shouldBe false
- }
-
- it should "support creating hard links with ln" in {
- assume(isUnixOS)
- val magicWord = "Hello World"
- t1 writeText magicWord
- t1.linkTo(t3, symbolic = false)
- (a1 / "t3.scala.txt").contentAsString shouldEqual magicWord
- }
-
- it should "support custom charset" in {
- import java.nio.charset.Charset
- t1.writeText("你好世界")(charset = "UTF8")
- t1.contentAsString(charset = "ISO-8859-1") should not equal "你好世界"
- t1.contentAsString(charset = "UTF8") shouldEqual "你好世界"
- val c1 = md5(t1)
- val c2 = t1.overwrite("你好世界")(File.OpenOptions.default, Charset.forName("ISO-8859-1")).md5
- c1 should not equal c2
- c2 shouldEqual t1.checksum("md5")
- }
-
- it should "support hashing algos" in {
- implicit val charset = java.nio.charset.StandardCharsets.UTF_8
- t1.writeText("")
- md5(t1) shouldEqual "D41D8CD98F00B204E9800998ECF8427E"
- sha1(t1) shouldEqual "DA39A3EE5E6B4B0D3255BFEF95601890AFD80709"
- sha256(t1) shouldEqual "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855"
- sha512(t1) shouldEqual "CF83E1357EEFB8BDF1542850D66D8007D620E4050B5715DC83F4A921D36CE9CE47D0D13C5D85F2B0FF8318D2877EEC2F63B931BD47417A81A538327AF927DA3E"
- }
-
- it should "compute correct checksum for non-zero length string" in {
- implicit val charset = java.nio.charset.StandardCharsets.UTF_8
- t1.writeText("test")
- md5(t1) shouldEqual "098F6BCD4621D373CADE4E832627B4F6"
- sha1(t1) shouldEqual "A94A8FE5CCB19BA61C4C0873D391E987982FBBD3"
- sha256(t1) shouldEqual "9F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A08"
- sha512(t1) shouldEqual "EE26B0DD4AF7E749AA1A8EE3C10AE9923F618980772E473F8819A5D4940E0DB27AC185F8A0E1D5F84F88BC887FD67B143732C304CC5FA9AD8E6F57F50028A8FF"
- }
-
- it should "copy" in {
- (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World")
- (fb / "t5" / "t5.txt").createIfNotExists(createParents = true).writeText("Scala Awesome")
- (fb / "t5" / "t3").notExists shouldBe true
- cp(fb / "t3", fb / "t5")
- (fb / "t3").exists shouldBe true
- (fb / "t5" / "t3").exists shouldBe true
- (fb / "t5" / "t5.txt").contentAsString shouldEqual "Scala Awesome"
- assert((fb / "t3") === (fb / "t5" / "t3"))
- }
-
- it should "move" in {
- (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World")
- mv(fb / "t3", fb / "t5")
- (fb / "t5" / "t4.txt").contentAsString shouldEqual "Hello World"
- (fb / "t3").notExists shouldBe true
- }
-
- it should "delete" in {
- fb.exists shouldBe true
- fb.delete()
- fb.exists shouldBe false
- }
-
- it should "touch" in {
- (fb / "z1").exists shouldBe false
- (fb / "z1").isEmpty shouldBe true
- (fb / "z1").touch()
- (fb / "z1").exists shouldBe true
- (fb / "z1").isEmpty shouldBe true
- Thread.sleep(1000)
- (fb / "z1").lastModifiedTime.getEpochSecond should be < (fb / "z1").touch().lastModifiedTime.getEpochSecond
- }
-
- it should "md5" in {
- val h1 = t1.hashCode
- val actual = (t1 < "hello world").md5
- val h2 = t1.hashCode
- h1 shouldEqual h2
- import scala.sys.process._
- val expected = Try(s"md5sum ${t1.path}" !!) getOrElse (s"md5 ${t1.path}" !!)
- expected.toUpperCase should include (actual)
- actual should not equal h1
- }
-
- it should "support file in/out" in {
- t1 < "hello world"
- for {
- in <- t1.inputStream
- out <- t2.outputStream
- } in.pipeTo(out)
- t2.contentAsString shouldEqual "hello world"
- t2.newInputStream.asString() shouldEqual "hello world"
- }
-
- it should "zip/unzip directories" in {
- t1.writeText("hello world")
- val zipFile = testRoot.zip()
- zipFile.size should be > 100L
- zipFile.name should endWith (".zip")
-
- def test(output: File) = {
- (output/"a"/"a1"/"t1.txt").contentAsString shouldEqual "hello world"
- output === testRoot shouldBe true
- (output/"a"/"a1"/"t1.txt").overwrite("hello")
- (output !== testRoot) shouldBe true
- }
-
- test(zipFile.unzip())
- test(zipFile.streamedUnzip())
- }
-
- it should "zip/unzip single files" in {
- t1.writeText("hello world")
- val zipFile = t1.zip()
- zipFile.size should be > 100L
- zipFile.name should endWith (".zip")
- val destination = unzip(zipFile)(File.newTemporaryDirectory())
- (destination/"t1.txt").contentAsString shouldEqual "hello world"
- }
-
- it should "gzip" in {
- for {
- writer <- (testRoot / "test.gz").newOutputStream.buffered.gzipped.writer.buffered.autoClosed
- } writer.write("Hello world")
-
- (testRoot / "test.gz").inputStream.map(_.buffered.gzipped.buffered.lines.toSeq) shouldEqual Seq("Hello world")
- }
-
- it should "read bytebuffers" in {
- t1.writeText("hello world")
- for {
- fileChannel <- t1.newFileChannel.autoClosed
- } fileChannel.toMappedByteBuffer.remaining() shouldEqual t1.bytes.length
-
- (t2 writeBytes t1.bytes).contentAsString shouldEqual t1.contentAsString
- }
-
- it should "convert readers to inputstreams and writers to outputstreams" in {
- File.temporaryFile() foreach {f =>
- val text = List.fill(10000)("hello world")
- for {
- writer <- f.bufferedWriter
- out <- writer.outputstream.autoClosed
- } out.write(text.mkString("\n").getBytes)
- val t = f.bufferedReader.flatMap(_.toInputStream.lines)
- t.toList shouldEqual text
- }
- }
-
- it should "serialize/deserialize" in {
- class Person(val name: String, val age: Int) extends Serializable
- val p1 = new Person("Chris", 34)
-
- File.temporaryFile() foreach {f => //serialization round-trip test
- assert(f.isEmpty)
- f.writeSerialized(p1)
- assert(f.nonEmpty)
- val p2: Person = f.readDeserialized[Person]
- assert(p1.name === p2.name)
- assert(p1.age === p2.age)
-
- val p3 = f.inputStream.map(_.asObjectInputStreamUsingClassLoader().deserialize[Person])
- assert(p3.name === p2.name)
- assert(p3.age === p2.age)
- }
- }
-
- it should "count number of open file descriptors" in {
- val expected = java.lang.management.ManagementFactory.getOperatingSystemMXBean
- .asInstanceOf[com.sun.management.UnixOperatingSystemMXBean]
- .getOpenFileDescriptorCount
- assert((File.numberOfOpenFileDescriptors() - expected).abs <= 10)
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala
deleted file mode 100644
index 1acf7d7b..00000000
--- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala
+++ /dev/null
@@ -1,360 +0,0 @@
-package better.files
-
-import better.files.Dsl._
-
-import java.io.File.separator
-
-import org.scalatest.BeforeAndAfterAll
-
-class GlobSpec extends CommonSpec with BeforeAndAfterAll {
- var testDir: File = _
- var globTree: File = _
- var specialTree: File = _
-
- var regexWildcardPath: File = _
- var globWildcardPath: File = _
- //
- // Test target for glob
- //
- // tests/
- // ├── globtree
- // │   ├── a
- // │   │   ├── a2
- // │   │   │   ├── a2.txt
- // │   │   │   └── x.txt
- // │   │   ├── a.not
- // │   │   ├── a.txt
- // │   │   └── x.txt
- // │   ├── b
- // │   │   ├── a
- // │   │   │   └── ba.txt
- // │   │   └── b.txt
- // │   ├── c
- // │   │   ├── c.txt
- // │   │   └── x.txt
- // │   ├── empty
- // │   ├── link_to_a -> a
- // │   ├── one.txt
- // │   ├── readme.md
- // │   ├── three.txt
- // │   └── two.txt
- // └── special
- // ├── .*
- // │   └── a
- // │ └── a.txt
- // └── **
- // └── a
- // └── a.txt
- //
- override def beforeAll() = {
- testDir = File.newTemporaryDirectory("glob-tests")
- globTree = testDir / "globtree"
-
- mkdir(globTree)
- val a = mkdir(globTree / "a" )
- mkdir(globTree / "a" / "a2")
- touch(globTree / "a" / "a2" / "a2.txt")
- touch(globTree / "a" / "a2" / "x.txt")
- touch(globTree / "a" / "a.not")
- touch(globTree / "a" / "a.txt")
- touch(globTree / "a" / "x.txt")
-
- mkdir(globTree / "b" )
- mkdir(globTree / "b" / "a")
- touch(globTree / "b" / "a" / "ba.txt")
- touch(globTree / "b" / "b.txt")
-
- mkdir(globTree / "c" )
- touch(globTree / "c" / "c.txt")
- touch(globTree / "c" / "x.txt")
-
- mkdir(globTree / "empty" )
-
- if (isUnixOS) {
- ln_s(globTree / "link_to_a", a)
- }
-
- touch(globTree / "one.txt")
- touch(globTree / "two.txt")
- touch(globTree / "three.txt")
- touch(globTree / "readme.md")
-
- // Special target with path name components as wildcards
- specialTree = testDir / "special"
-
- // Windows does not support '*' in file names
- if (isUnixOS) {
- // regex
- mkdir(specialTree)
- regexWildcardPath = mkdir(specialTree / ".*")
- mkdir(specialTree / ".*" / "a")
- touch(specialTree / ".*" / "a" / "a.txt")
-
- // glob
- globWildcardPath = mkdir(specialTree / "**")
- mkdir(specialTree / "**" / "a")
- touch(specialTree / "**" / "a" / "a.txt")
- }
-
- ()
- }
-
- override def afterAll() = {
- val _ = rm(testDir)
- }
-
- /**
- * Helper in case something goes wrong...
- */
- private def debugPaths(files: Seq[File]): String = {
- files
- .sortBy(_.path)
- .map(files => s"PATH: ${files.toString}")
- .mkString(s"SIZE: ${files.size}\n", "\n", "\n")
- }
-
- /**
- * Verity if candidates are equal with references.
- * Does not accept empty sets, use assert(paths.isEmpty) for that.
- *
- * @param pathsIt candidates
- * @param refPaths references
- * @param baseDir basedir to for creating full path of references
- */
- private def verify(pathsIt: Files, refPaths: Seq[String], baseDir: File) = {
- val paths = pathsIt.toSeq
- val refs = refPaths
- .map(refPath => baseDir/refPath)
- .sortBy(_.path)
-
- withClue("Result: " + debugPaths(paths) + "Reference: " + debugPaths(refs)) {
- assert(paths.length === refPaths.length)
- assert(paths.nonEmpty)
- paths.sortBy(_.path).zip(refs).foreach({case (path, refPath) => assert(path === refPath)})
- }
- }
-
- "glob" should "match plain file (e.g. 'file.ext')" in {
- val refPaths = Seq(
- "one.txt"
- )
- val paths = globTree.glob("one.txt")
- verify(paths, refPaths, globTree)
- }
- it should "match path without glob (e.g. 'sub/dir/file.ext')" in {
- val refPaths = Seq(
- "a/a.txt"
- )
- val paths = globTree.glob("a/a.txt")
- verify(paths, refPaths, globTree)
- }
-
- it should "match file-glob (e.g. '*.ext')" in {
- val refPaths = Seq(
- "one.txt",
- "two.txt",
- "three.txt"
- )
- val paths = globTree.glob("*.txt")
- verify(paths, refPaths, globTree)
- assert(globTree.glob("*.txt", includePath = false)(File.PathMatcherSyntax.glob).isEmpty)
- }
-
- it should "match fixed sub dir and file-glob (e.g. '**/subdir/*.ext')" in {
- // TODO: DOC: why top level 'a' is not matched
- val refPaths = List(
- "b/a/ba.txt"
- )
- val paths = globTree.glob("**/a/*.txt")
- verify(paths, refPaths, globTree)
- }
-
- it should "use parent dir for matching (e.g. plain 'subdir/*.ext')" in {
- // e.g. check that b nor c are matched, nor b/a
- val refPaths = Seq(
- "a/a.txt",
- "a/x.txt"
- )
- val paths = globTree.glob("a/*.txt")
- verify(paths, refPaths, globTree)
- }
-
- it should "match sub-directory glob with plain file (e.g. 'subdir/*/file.ext')" in {
- val refPaths = Seq(
- "a/x.txt",
- "c/x.txt"
- )
- val paths = testDir.glob("globtree/*/x.txt")
- verify(paths, refPaths, globTree)
- }
-
- it should "match sub-directory glob with file-glob (e.g. 'subdir/*/*.ext')" in {
- val refPaths = Seq(
- "a/a.txt",
- "a/x.txt",
- "c/c.txt",
- "c/x.txt",
- "b/b.txt"
- )
- val paths = testDir.glob("globtree/*/*.txt")
- verify(paths, refPaths, globTree)
- }
-
- it should "match deep sub-directory glob with plain file (e.g. 'subdir/**/file.ext')" in {
- val refPaths = Seq(
- "a/a2/x.txt",
- "a/x.txt",
- "c/x.txt"
- )
- val p1s = globTree.glob("**/x.txt")
- verify(p1s, refPaths, globTree)
-
- val p2s = testDir.glob("globtree/**/x.txt")
- verify(p2s, refPaths, globTree)
- }
-
- it should "match deep sub-directory glob with file-glob (e.g. 'subdir/**/*.ext')" in {
- val refPaths = Seq(
- "a/a.txt",
- "a/x.txt",
- "a/a2/x.txt",
- "a/a2/a2.txt",
- "c/x.txt",
- "c/c.txt",
- "b/b.txt",
- "b/a/ba.txt"
- )
- val p1s = globTree.glob("**/*.txt")
- verify(p1s, refPaths, globTree)
-
- val p2s = testDir.glob("globtree/**/*.txt")
- verify(p2s, refPaths, globTree)
- }
-
- it should "match deep file-glob (e.g. 'subdir/**.ext')" in {
- val refPaths = Seq(
- "one.txt",
- "two.txt",
- "three.txt",
- "a/a.txt",
- "a/x.txt",
- "a/a2/x.txt",
- "a/a2/a2.txt",
- "b/a/ba.txt",
- "b/b.txt",
- "c/x.txt",
- "c/c.txt"
- )
- val p1s = globTree.glob("**.txt")
- verify(p1s, refPaths, globTree)
-
- val p2s = testDir.glob("globtree/**.txt")
- verify(p2s, refPaths, globTree)
- }
-
- it should "match everything (e.g. 'subdir/**')" in {
- val refPaths = List(
- "a",
- "a/a.not",
- "a/a.txt",
- "a/a2",
- "a/a2/a2.txt",
- "a/a2/x.txt",
- "a/x.txt",
- "b",
- "b/a",
- "b/a/ba.txt",
- "b/b.txt",
- "c",
- "c/c.txt",
- "c/x.txt",
- "empty",
- "one.txt",
- "readme.md",
- "three.txt",
- "two.txt") ++
- when(isUnixOS)("link_to_a")
-
- val paths = testDir.glob("globtree/**")
- verify(paths, refPaths, globTree)
- }
-
- it should "work with links (e.g. 'link_to_a/**.txt')" in {
- assume(isUnixOS)
- val refPaths = Seq(
- "a/a.txt",
- "a/x.txt",
- "a/a2/x.txt",
- "a/a2/a2.txt"
- )
-
- // TODO: DOC: File behaviour, links are resolved (abs + normalized path)
-
- val p1s = globTree.glob("link_to_a/**.txt")(visitOptions = File.VisitOptions.follow)
- verify(p1s, refPaths, globTree)
-
- val p2s = globTree.glob("link_to_a/**.txt").toSeq
- assert(p2s.isEmpty)
-
- val p3s = testDir.glob("globtree/link_to_a/**.txt")(visitOptions = File.VisitOptions.follow)
- verify(p3s, refPaths, globTree)
-
- val p4s = testDir.glob("globtree/link_to_a/**.txt")
- assert(p4s.isEmpty)
- }
-
- it should "not use dir name as wildcard (e.g. dirname is **)" in {
- assume(isUnixOS)
- val d = globWildcardPath // "path" / "with" / "**"
- val paths = d.glob("*.txt")
-
- assert(paths.isEmpty)
- }
-
- "Regex" should "match all txt-files under sub-directory (e.g. '.*/.*\\\\.txt')" in {
- val refPaths = Seq(
- "a/a.txt",
- "a/x.txt",
- "a/a2/x.txt",
- "a/a2/a2.txt",
- "c/x.txt",
- "c/c.txt",
- "b/b.txt",
- "b/a/ba.txt"
- )
- val paths = globTree.glob(".*" + separator + ".*\\.txt")(File.PathMatcherSyntax.regex)
-
- verify(paths, refPaths, globTree)
- }
-
- it should "match the same if `Regex` is used" in {
- val pattern = (".*" + separator + ".*\\.txt").r
-
- val pathsGlob = globTree.glob(pattern.regex)(File.PathMatcherSyntax.regex)
- val pathsRegex = globTree.globRegex(pattern)
-
- verify(pathsRegex, pathsGlob.toSeq.map(_.toString), globTree)
-
- }
-
- it should "use parent dir for matching (e.g. plain 'subdir/*.ext' instead of '**/subdir/*.ext)" in {
- // e.g. check that b nor c are matched, nor b/a
- val refPaths = Seq(
- "a/a.txt",
- "a/x.txt",
- "a/a2/a2.txt",
- "a/a2/x.txt"
- )
- val paths = globTree.glob("a" + separator + ".*\\.txt")(File.PathMatcherSyntax.regex)
-
- verify(paths, refPaths, globTree)
- assert(globTree.glob("a/.*\\.txt", includePath = false)(File.PathMatcherSyntax.regex).isEmpty)
- }
-
- it should "not use dir name as wildcard (e.g. dirname is .*)" in {
- assume(isUnixOS)
- val d = regexWildcardPath // "path" / "with" / ".*"
- val paths = d.glob("a\\.txt")(File.PathMatcherSyntax.regex)
- assert(paths.isEmpty)
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala
deleted file mode 100644
index 554f5358..00000000
--- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-package better.files
-
-import org.scalatest.matchers.{MatchResult, Matcher}
-
-import scala.reflect.ClassTag
-import scala.util.control.ControlThrowable
-
-class ManagedResourceSpec extends CommonSpec {
- // Test classes
-
- private class TestDisposable extends AutoCloseable {
- var closeCount = 0
-
- override def close(): Unit =
- closeCount += 1
- }
-
- private class TestDisposableThatThrows extends TestDisposable {
- override def close(): Unit = {
- super.close()
- throw new TestDisposeException
- }
- }
-
- private class TestDisposableThatThrowsFatal extends TestDisposable {
- override def close(): Unit = {
- super.close()
- throw new TestDisposeFatalException
- }
- }
-
- private class TestEvalException extends Exception
- private class TestDisposeException extends Exception
- private class TestDisposeFatalException extends Exception with ControlThrowable
-
- // Custom matchers
-
- private class HaveSuppressedMatcher(classes: Class[_ <: Throwable]*) extends Matcher[Throwable] {
- override def apply(left: Throwable): MatchResult = {
- MatchResult(
- (classes corresponds left.getSuppressed) {
- (clazz, suppressed) => clazz isInstance suppressed
- },
- s"had suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}",
- s"had not suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}"
- )
- }
- }
-
- private def haveSuppressed[E <: Throwable](implicit ct: ClassTag[E]) =
- new HaveSuppressedMatcher(ct.runtimeClass.asInstanceOf[Class[_ <: Throwable]])
-
- // Test body
-
- behavior of "managed resources"
-
- it should "map correctly" in {
- val t = new TestDisposable
-
- val result = for {
- tc <- t.autoClosed
- } yield {
- t.closeCount shouldBe 0
- "hello"
- }
-
- result shouldBe "hello"
- t.closeCount shouldBe 1
- }
-
- it should "flatMap correctly" in {
- val t = new TestDisposable
-
- val result = (for {
- tc <- t.autoClosed
- v <- Iterator("one", "two", "three")
- } yield {
- t.closeCount shouldBe 0
- v
- }).toSeq
-
- result should contain inOrder ("one", "two", "three")
- t.closeCount shouldBe 1
- }
-
- it should "handle exceptions correctly" in {
- val t = new TestDisposable
-
- a [TestEvalException] should be thrownBy {
- for {
- tc <- t.autoClosed
- } {
- t.closeCount shouldBe 0
- throw new TestEvalException
- }
- }
- t.closeCount shouldBe 1
-
- var lastSeen = ""
- a [TestEvalException] should be thrownBy {
- for {
- tc <- t.autoClosed
- v <- Iterator("one", "two", "three")
- } {
- t.closeCount shouldBe 1
- lastSeen = v
- if (v == "two") throw new TestEvalException
- }
- }
- t.closeCount shouldBe 2
- lastSeen shouldBe "two"
- }
-
- it should "handle disposal exceptions correctly" in {
- // For some mysterious reason, thrownBy doesn't work here, in this specific test case. No clue why, despite spending an entire day trying to figure it out,
- // including repeatedly stepping through the innards of ScalaTest in a debugger. Catching the exception manually does work, though.
- val messageNoException = "no exception was thrown"
- def messageWrongException(e: Throwable): String =
- s"an exception was thrown, but not a TestDisposeException; instead it's a ${e.getClass.getName}"
-
- val t = new TestDisposableThatThrows
-
- val e1 =
- try {
- for {
- tc <- t.autoClosed
- } {
- t.closeCount shouldBe 0
- }
- None
- }
- catch {
- case e: TestDisposeException =>
- Some(e)
- }
- assert(e1.nonEmpty, messageNoException)
- e1 foreach { e1c => assert(e1c.isInstanceOf[TestDisposeException], messageWrongException(e1c)) }
- t.closeCount shouldBe 1
-
- var lastSeen = ""
- val e2 =
- try {
- val i = for {
- tc <- t.autoClosed
- v <- Iterator("one", "two", "three")
- } yield {
- t.closeCount shouldBe 1
- lastSeen = v
- v
- }
- while (i.hasNext) i.next()
- None
- }
- catch {
- case e: TestDisposeException =>
- Some(e)
- }
- lastSeen shouldBe "three"
- assert(e2.nonEmpty, messageNoException)
- e2 foreach { e2c => assert(e2c.isInstanceOf[TestDisposeException], messageWrongException(e2c)) }
- t.closeCount shouldBe 2
- }
-
- it should "handle non-local returns correctly" in {
- val t = new TestDisposable
-
- def doTheThing(): String = {
- throw the [ControlThrowable] thrownBy {
- for {
- tc <- t.autoClosed
- } {
- t.closeCount shouldBe 0
- return "hello"
- }
- }
- }
- doTheThing() shouldBe "hello"
- t.closeCount shouldBe 1
-
- def doTheThings(): String = {
- throw the [ControlThrowable] thrownBy {
- for {
- tc <- t.autoClosed
- v <- Iterator("one", "two", "three")
- } {
- t.closeCount shouldBe 1
- if (v == "two") return v
- }
- }
- }
- doTheThings() shouldBe "two"
- t.closeCount shouldBe 2
- }
-
- it should "handle multiple exceptions correctly" in {
- val t = new TestDisposableThatThrows
-
- the [TestEvalException] thrownBy {
- for {
- tc <- t.autoClosed
- } {
- t.closeCount shouldBe 0
- throw new TestEvalException
- }
- } should haveSuppressed [TestDisposeException]
- t.closeCount shouldBe 1
-
- var lastSeen = ""
- the [TestEvalException] thrownBy {
- for {
- tc <- t.autoClosed
- v <- Iterator("one", "two", "three")
- } {
- t.closeCount shouldBe 1
- lastSeen = v
- if (v == "two") throw new TestEvalException
- }
- } should haveSuppressed [TestDisposeException]
- lastSeen shouldBe "two"
- t.closeCount shouldBe 2
- }
-
- it should "give fatal exceptions precedence" in {
- val t = new TestDisposableThatThrowsFatal
-
- the [TestDisposeFatalException] thrownBy {
- for {
- tc <- t.autoClosed
- } {
- t.closeCount shouldBe 0
- throw new TestEvalException
- }
- } should haveSuppressed [TestEvalException]
- t.closeCount shouldBe 1
-
- var lastSeen = ""
- the [TestDisposeFatalException] thrownBy {
- for {
- tc <- t.autoClosed
- v <- Iterator("one", "two", "three")
- } {
- t.closeCount shouldBe 1
- lastSeen = v
- if (v == "two") throw new TestEvalException
- }
- } should haveSuppressed [TestEvalException]
- t.closeCount shouldBe 2
- lastSeen shouldBe "two"
- }
-}
diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala
deleted file mode 100644
index 54f0a117..00000000
--- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-package better.files
-
-import Dsl._
-
-import scala.language.existentials
-
-class ScannerSpec extends CommonSpec {
- def t1 = File.newTemporaryFile()
-
-
- "splitter" should "split" in {
- val csvSplitter = StringSplitter.on(',')
- def split(s: String) = csvSplitter.split(s).toList
-
- assert(split(",") === List("", ""))
- assert(split("") === List(""))
- assert(split("Hello World") === List("Hello World"))
- assert(split("Hello,World") === List("Hello", "World"))
-
- assert(split(",,") === List("", "", ""))
- assert(split(",Hello,World,") === List("", "Hello", "World", ""))
- assert(split(",Hello,World") === List("", "Hello", "World"))
- assert(split("Hello,World,") === List("Hello", "World", ""))
- }
-
- "scanner" should "parse files" in {
- val data = t1 << s"""
- | Hello World
- | 1 2 3
- | Ok 23 football
- """.stripMargin
- data.scanner() foreach {scanner =>
- assert(scanner.lineNumber() == 0)
- assert(scanner.next[String] == "Hello")
- assert(scanner.lineNumber() == 2)
- assert(scanner.next[String] == "World")
- assert(scanner.next[Int] == 1)
- assert(scanner.next[Int] == 2)
- assert(scanner.lineNumber() == 3)
- assert(scanner.next[Int] == 3)
- assert(scanner.nextLine() == " Ok 23 football")
- assert(!scanner.hasNext)
- a[NoSuchElementException] should be thrownBy scanner.next()
- assert(!scanner.hasNext)
- }
- data.tokens().toSeq shouldEqual data.newScanner().toSeq
- }
-
- it should "parse longs/booleans" in {
- val data = for {
- scanner <- Scanner("10 false").autoClosed
- } yield scanner.next[(Long, Boolean)]
- data shouldBe ((10L, false))
- }
-
- it should "parse custom parsers" in {
- val file = t1 < """
- |Garfield
- |Woofer
- """.stripMargin
-
- sealed trait Animal
- case class Dog(name: String) extends Animal
- case class Cat(name: String) extends Animal
-
- implicit val animalParser: Scannable[Animal] = Scannable {scanner =>
- val name = scanner.next[String]
- if (name == "Garfield") Cat(name) else Dog(name)
- }
- file.scanner() foreach {scanner =>
- Seq.fill(2)(scanner.next[Animal]) should contain theSameElementsInOrderAs Seq(Cat("Garfield"), Dog("Woofer"))
- }
- }
-
- it should "parse empty tokens" in {
- val scanner = Scanner("hello||world", StringSplitter.on('|'))
- List.fill(3)(scanner.next[Option[String]]) shouldEqual List(Some("hello"), None, Some("world"))
- }
-}