summaryrefslogtreecommitdiff
path: root/scalaplugin/src/test
diff options
context:
space:
mode:
authorLi Haoyi <haoyi.sg@gmail.com>2017-11-29 19:26:27 -0800
committerLi Haoyi <haoyi.sg@gmail.com>2017-11-29 19:26:27 -0800
commita561cb89b202e2cfb1ab548960e25c1743b318d2 (patch)
treebe7bcdc0bd9402a186b08614ebaece88a633fe49 /scalaplugin/src/test
parent5efc40317f4650e2f0f1357af428d5833ccc4613 (diff)
downloadmill-a561cb89b202e2cfb1ab548960e25c1743b318d2.tar.gz
mill-a561cb89b202e2cfb1ab548960e25c1743b318d2.tar.bz2
mill-a561cb89b202e2cfb1ab548960e25c1743b318d2.zip
first pass at building better-files using Mill
Diffstat (limited to 'scalaplugin/src/test')
-rw-r--r--scalaplugin/src/test/resource/better-files/.gitignore206
-rw-r--r--scalaplugin/src/test/resource/better-files/CHANGES.md73
-rw-r--r--scalaplugin/src/test/resource/better-files/LICENSE21
-rw-r--r--scalaplugin/src/test/resource/better-files/README.md637
-rw-r--r--scalaplugin/src/test/resource/better-files/akka/README.md394
-rw-r--r--scalaplugin/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala67
-rw-r--r--scalaplugin/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala101
-rw-r--r--scalaplugin/src/test/resource/better-files/benchmarks/README.md24
-rw-r--r--scalaplugin/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java78
-rw-r--r--scalaplugin/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala158
-rw-r--r--scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala10
-rw-r--r--scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala39
-rw-r--r--scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala66
-rw-r--r--scalaplugin/src/test/resource/better-files/build.sbt163
-rw-r--r--scalaplugin/src/test/resource/better-files/circle.yml21
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala155
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/File.scala1257
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala72
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala324
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala91
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala83
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala183
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala23
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala100
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala74
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/package.scala66
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala15
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala61
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala549
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala360
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala250
-rw-r--r--scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala79
-rw-r--r--scalaplugin/src/test/resource/better-files/project/Dependencies.scala15
-rw-r--r--scalaplugin/src/test/resource/better-files/project/build.properties2
-rw-r--r--scalaplugin/src/test/resource/better-files/project/plugins.sbt9
-rw-r--r--scalaplugin/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala24
-rw-r--r--scalaplugin/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala32
-rw-r--r--scalaplugin/src/test/resource/better-files/site/index.html16
-rw-r--r--scalaplugin/src/test/resource/better-files/site/tech_talk_preview.pngbin0 -> 60942 bytes
-rw-r--r--scalaplugin/src/test/resource/better-files/version.sbt1
-rw-r--r--scalaplugin/src/test/scala/mill/scalaplugin/BetterFilesTests.scala115
41 files changed, 6014 insertions, 0 deletions
diff --git a/scalaplugin/src/test/resource/better-files/.gitignore b/scalaplugin/src/test/resource/better-files/.gitignore
new file mode 100644
index 00000000..6f460f93
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/.gitignore
@@ -0,0 +1,206 @@
+# Created by https://www.gitignore.io/api/linux,osx,windows,intellij,eclipse,sbt,scala
+
+### Eclipse ###
+
+.metadata
+bin/
+tmp/
+*.tmp
+*.bak
+*.swp
+*~.nib
+local.properties
+.settings/
+.loadpath
+.recommenders
+
+# External tool builders
+.externalToolBuilders/
+
+# Locally stored "Eclipse launch configurations"
+*.launch
+
+# PyDev specific (Python IDE for Eclipse)
+*.pydevproject
+
+# CDT-specific (C/C++ Development Tooling)
+.cproject
+
+# Java annotation processor (APT)
+.factorypath
+
+# PDT-specific (PHP Development Tools)
+.buildpath
+
+# sbteclipse plugin
+.target
+
+# Tern plugin
+.tern-project
+
+# TeXlipse plugin
+.texlipse
+
+# STS (Spring Tool Suite)
+.springBeans
+
+# Code Recommenders
+.recommenders/
+
+# Scala IDE specific (Scala & Java development for Eclipse)
+.cache-main
+.scala_dependencies
+.worksheet
+
+### Eclipse Patch ###
+# Eclipse Core
+.project
+
+# JDT-specific (Eclipse Java Development Tools)
+.classpath
+
+### Intellij ###
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+.idea/
+
+# User-specific stuff:
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/dictionaries
+
+# Sensitive or high-churn files:
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.xml
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+
+# Gradle:
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# CMake
+cmake-build-debug/
+
+# Mongo Explorer plugin:
+.idea/**/mongoSettings.xml
+
+## File-based project format:
+*.iws
+
+## Plugin-specific files:
+
+# IntelliJ
+/out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+### Intellij Patch ###
+# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
+
+# *.iml
+# modules.xml
+# .idea/misc.xml
+# *.ipr
+
+# Sonarlint plugin
+.idea/sonarlint
+
+### Linux ###
+*~
+
+# temporary files which can be created if a process still has a handle open of a deleted file
+.fuse_hidden*
+
+# KDE directory preferences
+.directory
+
+# Linux trash folder which might appear on any partition or disk
+.Trash-*
+
+# .nfs files are created when an open file is removed but is still being accessed
+.nfs*
+
+### OSX ###
+*.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+### SBT ###
+# Simple Build Tool
+# http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control
+
+dist/*
+target/
+lib_managed/
+src_managed/
+project/boot/
+project/plugins/project/
+.history
+.cache
+.lib/
+
+### Scala ###
+*.class
+*.log
+
+### Windows ###
+# Windows thumbnail cache files
+Thumbs.db
+ehthumbs.db
+ehthumbs_vista.db
+
+# Folder config file
+Desktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# End of https://www.gitignore.io/api/linux,osx,windows,intellij,eclipse,sbt,scala
diff --git a/scalaplugin/src/test/resource/better-files/CHANGES.md b/scalaplugin/src/test/resource/better-files/CHANGES.md
new file mode 100644
index 00000000..4103ef04
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/CHANGES.md
@@ -0,0 +1,73 @@
+better-files follows the following `MAJOR.MINOR.PATCH` release conventions:
+- **Changes in `PATCH` version**:
+ - Minor functionality changes (usually bug fixes)
+ - No breaking public API changes
+ - New APIs might be added
+- **Change in `MINOR` version**:
+ - In addition to `PATCH` changes
+ - Minor API shape changes e.g. renaming, deprecations
+ - Trivial to modify code to address compilation issues
+- **Change in `MAJOR` version**:
+ - In addition to `MINOR` changes
+ - Significant structural and API changes
+
+-----------
+
+## v4.0.0
+* [Issue #129](https://github.com/pathikrit/better-files/issues/129): JSR-203 and JimFS compatibility
+* [Issue #88](https://github.com/pathikrit/better-files/issues/88): Strongly typed relative and absolute path APIs
+* [Issue #122](https://github.com/pathikrit/better-files/issues/122): Scala Platform Release - Support for Scala 2.13 and 2.11
+* Move Scanner to own module that depends on cats/shapeless
+* Remove implicit options from all APIs
+
+## v3.2.1
+* [Issue #193](https://github.com/pathikrit/better-files/issues/193): Handle fast changing directory watching on Windows
+* [Issue #195](https://github.com/pathikrit/better-files/issues/195): Do not swallow `FileAlreadyExistsException` when creating directory or file
+* [Add](https://github.com/pathikrit/better-files/commit/00f27867ebd0cddec1ace7835dcc2375869fb3ae) method to check verified file existence (or non-existence)
+* [Issue #198](https://github.com/pathikrit/better-files/issues/198): `InputStreamOps#asString` doesn't close the stream on exception
+* [PR #199](https://github.com/pathikrit/better-files/pull/199): Utils for Object I/O
+
+## v3.2.0
+* [Rename](https://github.com/pathikrit/better-files/commit/ec34a6f843fec941b51bdddafc2e07e5bc0e1cbb) PosixFilePermissions.OTHERS* APIs
+* [Issue #186](https://github.com/pathikrit/better-files/issues/186): Splitter based Scanner
+* [Issue #173](https://github.com/pathikrit/better-files/issues/173): Better ARM handling of fatal errors
+* [Issue #182](https://github.com/pathikrit/better-files/issues/182): Move and Copy *into* directory utils
+* [Issue #189](https://github.com/pathikrit/better-files/issues/189): Util to read String from an InputStream
+* [Issue #187](https://github.com/pathikrit/better-files/issues/187): Readers for `java.time.*` and `java.sql.*`
+* [Restore File.usingTemp](https://github.com/pathikrit/better-files/commit/35184a642245db3d1e41fc02c7bfbec0b19a43bb) first introduced in [7c60ca](https://github.com/pathikrit/better-files/commit/d3522e8da63b55c7d3fa14cc9b0b76acd57c60ca)
+* [Fix](https://github.com/pathikrit/better-files/pull/184) bug in appendBytes
+
+## v3.1.0
+* [Issue #140](https://github.com/pathikrit/better-files/issues/140): Batch up events for file monitoring
+* [Issue #136](https://github.com/pathikrit/better-files/issues/136): Use execution contexts for file monitoring
+* [Issue #152](https://github.com/pathikrit/better-files/issues/152): Streamed unzipping
+* [Issue #150](https://github.com/pathikrit/better-files/issues/150): `ManagedResource[File]` for temp files
+* [Issue #126](https://github.com/pathikrit/better-files/pull/159): New Typeclassed approach to ARM
+* [Issue #160](https://github.com/pathikrit/better-files/issues/160): Ability to convert Reader/Writer to Input/Output streams
+* [Issue #77](https://github.com/pathikrit/better-files/issues/77): Better UNIX-y behaviour for `cp` and `mv` DSL utils
+* [Issue #169](https://github.com/pathikrit/better-files/issues/169): Support for symbols in file DSL
+* [Issue #171](https://github.com/pathikrit/better-files/issues/171): Handle `createDirectories()` on symlinks to existing directories
+
+## v3.0.0
+* [Issue #9](https://github.com/pathikrit/better-files/issues/9): File resource utils
+* [Issue #114](https://github.com/pathikrit/better-files/issues/114): Glob with automatic path
+* [Issue #107](https://github.com/pathikrit/better-files/issues/107): Handle Byte-order markers
+* [PR #113](https://github.com/pathikrit/better-files/pull/113): File anchor util
+* [Issue #105](https://github.com/pathikrit/better-files/issues/105): Remove dependency on scala.io
+* [File.usingTemp](https://github.com/pathikrit/better-files/commit/d3522e8da63b55c7d3fa14cc9b0b76acd57c60ca)
+* [Optional symbolic operations](https://github.com/pathikrit/better-files/issues/102)
+* [PR #100](https://github.com/pathikrit/better-files/pull/100): Fix issue in unzip of parents
+* [PR #101](https://github.com/pathikrit/better-files/pull/101): Removed File.Type
+* [Issue #96](https://github.com/pathikrit/better-files/issues/96): Teeing outputstreams
+* [File.testPermission](https://github.com/pathikrit/better-files/commit/7b175c582643790e4d2fd21552e47cc9c615dfbb)
+* [File.nonEmpty](https://github.com/pathikrit/better-files/commit/18c9cd51b7b2e503ff4944050ac5119470869e6e)
+* [Update metadata API](https://github.com/pathikrit/better-files/commit/c3d65951d80f09b813e158a9e3a1785c622353b3)
+* [Issue #80](https://github.com/pathikrit/better-files/issues/80): Unzip filters
+* [PR #107](https://github.com/pathikrit/better-files/pull/127): Java serialization utils
+
+## v2.17.1
+* [PR #99](https://github.com/pathikrit/better-files/pull/99): Release for Scala 2.12
+
+## v2.17.0
+* [PR #78](https://github.com/pathikrit/better-files/pull/78): Change `write(Array[Byte])` to `writeByteArray()`. Same for `append`
+* [Issue #76](https://github.com/pathikrit/better-files/issues/76): Move `better.files.Read` typeclass to `better.files.Scanner.Read`
diff --git a/scalaplugin/src/test/resource/better-files/LICENSE b/scalaplugin/src/test/resource/better-files/LICENSE
new file mode 100644
index 00000000..a63964f8
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Pathikrit Bhowmick
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/scalaplugin/src/test/resource/better-files/README.md b/scalaplugin/src/test/resource/better-files/README.md
new file mode 100644
index 00000000..9877c3bc
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/README.md
@@ -0,0 +1,637 @@
+# better-files [![License][licenseImg]][licenseLink] [![CircleCI][circleCiImg]][circleCiLink] [![Codacy][codacyImg]][codacyLink]
+
+`better-files` is a [dependency-free](project/Dependencies.scala) *pragmatic* [thin Scala wrapper](core/src/main/scala/better/files/File.scala) around [Java NIO](https://docs.oracle.com/javase/tutorial/essential/io/fileio.html).
+
+## Talks [![Gitter][gitterImg]][gitterLink]
+ - [ScalaDays NYC 2016][scalaDaysNyc2016Event] ([slides][scalaDaysNyc2016Slides])
+
+ <a href="http://www.youtube.com/watch?feature=player_embedded&v=uaYKkpqs6CE" target="_blank">
+ <img src="site/tech_talk_preview.png" alt="ScalaDays NYC 2016: Introduction to better-files" width="480" height="360" border="10" />
+ </a>
+
+ - [ScalaDays Berlin 2016][scalaDaysBerlin2016Event] ([video][scalaDaysBerlin2016Video], [slides][scalaDaysBerlin2016Slides])
+ - [Scalæ by the Bay 2016][scalæByTheBay2016Event] ([video][scalæByTheBay2016Video], [slides][scalæByTheBay2016Slides])
+
+## Tutorial [![Scaladoc][scaladocImg]][scaladocLink]
+ 0. [Instantiation](#instantiation)
+ 0. [Simple I/O](#file-readwrite)
+ 0. [Streams](#streams)
+ 0. [Encodings](#encodings)
+ 0. [Java serialization utils](#java-serialization-utils)
+ 0. [Java compatibility](#java-interoperability)
+ 0. [Globbing](#globbing)
+ 0. [File system operations](#file-system-operations)
+ 0. [Temporary files](#temporary-files)
+ 0. [UNIX DSL](#unix-dsl)
+ 0. [File attributes](#file-attributes)
+ 0. [File comparison](#file-comparison)
+ 0. [Zip/Unzip](#zip-apis)
+ 0. [Automatic Resource Management](#lightweight-arm)
+ 0. [Scanner](#scanner)
+ 0. [File Monitoring](#file-monitoring)
+ 0. [Reactive File Watcher](#akka-file-watcher)
+
+## sbt [![UpdateImpact][updateImpactImg]][updateImpactLink]
+In your `build.sbt`, add this:
+```scala
+libraryDependencies += "com.github.pathikrit" %% "better-files" % version
+```
+To use the [Akka based file monitor](akka), also add this:
+```scala
+libraryDependencies ++= Seq(
+ "com.github.pathikrit" %% "better-files-akka" % version,
+ "com.typesafe.akka" %% "akka-actor" % "2.5.6"
+)
+```
+Latest `version`: [![Maven][mavenImg]][mavenLink] [![Scaladex][scaladexImg]][scaladexLink]
+
+Although this library is currently only actively developed for Scala 2.12 and 2.13,
+you can find reasonably recent versions of this library for Scala 2.10 and 2.11 [here](https://oss.sonatype.org/#nexus-search;quick~better-files).
+
+## Tests [![codecov][codecovImg]][codecovLink]
+* [FileSpec](core/src/test/scala/better/files/FileSpec.scala)
+* [FileWatcherSpec](akka/src/test/scala/better/files/FileWatcherSpec.scala)
+* [Benchmarks](benchmarks/)
+
+[licenseImg]: https://img.shields.io/github/license/pathikrit/better-files.svg
+[licenseImg2]: https://img.shields.io/:license-mit-blue.svg
+[licenseLink]: LICENSE
+
+[circleCiImg]: https://img.shields.io/circleci/project/pathikrit/better-files/master.svg
+[circleCiImg2]: https://circleci.com/gh/pathikrit/better-files/tree/master.svg
+[circleCiLink]: https://circleci.com/gh/pathikrit/better-files
+
+[codecovImg]: https://img.shields.io/codecov/c/github/pathikrit/better-files/master.svg
+[codecovImg2]: https://codecov.io/github/pathikrit/better-files/coverage.svg?branch=master
+[codecovLink]: http://codecov.io/github/pathikrit/better-files?branch=master
+
+[codacyImg]: https://img.shields.io/codacy/0e2aeb7949bc49e6802afcc43a7a1aa1.svg
+[codacyImg2]: https://api.codacy.com/project/badge/grade/0e2aeb7949bc49e6802afcc43a7a1aa1
+[codacyLink]: https://www.codacy.com/app/pathikrit/better-files/dashboard
+
+[mavenImg]: https://img.shields.io/maven-central/v/com.github.pathikrit/better-files_2.12.svg
+[mavenImg2]: https://maven-badges.herokuapp.com/maven-central/com.github.pathikrit/better-files_2.12/badge.svg
+[mavenLink]: http://search.maven.org/#search%7Cga%7C1%7Cbetter-files
+
+[gitterImg]: https://img.shields.io/gitter/room/pathikrit/better-files.svg
+[gitterImg2]: https://badges.gitter.im/Join%20Chat.svg
+[gitterLink]: https://gitter.im/pathikrit/better-files
+
+[scaladexImg]: https://index.scala-lang.org/pathikrit/better-files/better-files/latest.svg
+[scaladexLink]: https://index.scala-lang.org/pathikrit/better-files
+
+[scaladocImg]: https://www.javadoc.io/badge/com.github.pathikrit/better-files_2.12.svg?color=blue&label=scaladocs
+<!--[scaladocLink]: https://www.javadoc.io/page/com.github.pathikrit/better-files_2.12/latest/better/files/File.html-->
+[scaladocLink]: http://pathikrit.github.io/better-files/latest/api/better/files/File.html
+
+[updateImpactImg]: https://app.updateimpact.com/badge/704376701047672832/root.svg?config=compile
+[updateImpactLink]: https://app.updateimpact.com/latest/704376701047672832/root
+
+[scalaDaysNyc2016Event]: http://event.scaladays.org/scaladays-nyc-2016/#!#schedulePopupExtras-7664
+[scalaDaysNyc2016Video]: https://www.youtube.com/watch?v=uaYKkpqs6CE
+<!--[scalaDaysNyc2016VideoPreview]: http://img.youtube.com/vi/uaYKkpqs6CE/0.jpg-->
+[scalaDaysNyc2016VideoPreview]: site/tech_talk_preview.png
+[scalaDaysNyc2016Slides]: https://slides.com/pathikrit/better-files/
+
+[scalaDaysBerlin2016Event]: http://event.scaladays.org/scaladays-berlin-2016#!#schedulePopupExtras-7668
+[scalaDaysBerlin2016Video]: https://www.youtube.com/watch?v=m2YsD5cgnzI
+[scalaDaysBerlin2016Slides]: https://slides.com/pathikrit/better-files/
+
+[scalæByTheBay2016Event]: http://sched.co/7iUn
+[scalæByTheBay2016Video]: https://www.youtube.com/watch?v=bLiCE6NGjrk&t=251s
+[scalæByTheBay2016Slides]: https://slides.com/pathikrit/better-files/
+
+-------
+### Instantiation
+The following are all equivalent:
+```scala
+import better.files._
+import java.io.{File => JFile}
+
+val f = File("/User/johndoe/Documents") // using constructor
+val f1: File = file"/User/johndoe/Documents" // using string interpolator
+val f2: File = "/User/johndoe/Documents".toFile // convert a string path to a file
+val f3: File = new JFile("/User/johndoe/Documents").toScala // convert a Java file to Scala
+val f4: File = root/"User"/"johndoe"/"Documents" // using root helper to start from root
+val f5: File = `~` / "Documents" // also equivalent to `home / "Documents"`
+val f6: File = "/User"/"johndoe"/"Documents" // using file separator DSL
+val f7: File = "/User"/'johndoe/'Documents // same as above but using Symbols instead of Strings
+val f8: File = home/"Documents"/"presentations"/`..` // use `..` to navigate up to parent
+```
+
+**Note**: Rename the import if you think the usage of the class `File` may confuse your teammates:
+```scala
+import better.files.{File => ScalaFile, _}
+import java.io.File
+```
+I personally prefer renaming the Java crap instead:
+```scala
+import better.files._
+import java.io.{File => JFile}
+```
+
+### File Read/Write
+Dead simple I/O:
+```scala
+val file = root/"tmp"/"test.txt"
+file.overwrite("hello")
+file.appendLine().append("world")
+assert(file.contentAsString == "hello\nworld")
+```
+If you are someone who likes symbols, then the above code can also be written as:
+```scala
+import better.files.Dsl.SymbolicOperations
+
+file < "hello" // same as file.overwrite("hello")
+file << "world" // same as file.appendLines("world")
+assert(file! == "hello\nworld")
+```
+Or even, right-associatively:
+```scala
+import better.files.Dsl.SymbolicOperations
+
+"hello" `>:` file
+"world" >>: file
+val bytes: Array[Byte] = file.loadBytes
+```
+[Fluent Interface](https://en.wikipedia.org/wiki/Fluent_interface):
+```scala
+ (root/"tmp"/"diary.txt")
+ .createIfNotExists()
+ .appendLine()
+ .appendLines("My name is", "Inigo Montoya")
+ .moveToDirectory(home/"Documents")
+ .renameTo("princess_diary.txt")
+ .changeExtensionTo(".md")
+ .lines
+```
+
+### Streams
+Various ways to slurp a file without loading the contents into memory:
+ ```scala
+val bytes : Iterator[Byte] = file.bytes
+val chars : Iterator[Char] = file.chars
+val lines : Iterator[String] = file.lineIterator //file.lines loads all lines in memory
+```
+Note: The above APIs can be traversed at most once e.g. `file.bytes` is a `Iterator[Byte]` which only allows `TraversableOnce`.
+To traverse it multiple times without creating a new iterator instance, convert it into some other collection e.g. `file.bytes.toStream`
+
+You can write an `Iterator[Byte]` or an `Iterator[String]` back to a file:
+```scala
+file.writeBytes(bytes)
+file.printLines(lines)
+```
+
+### Encodings
+You can supply your own charset too for anything that does a read/write (it assumes `java.nio.charset.Charset.defaultCharset()` if you don't provide one):
+```scala
+val content: String = file.contentAsString // default charset
+
+// custom charset:
+import java.nio.charset.Charset
+file.contentAsString(charset = Charset.forName("US-ASCII"))
+
+//or simply using implicit conversion from Strings
+file.write("hello world")(charset = "US-ASCII")
+ ```
+
+Note: By default, `better-files` [correctly handles BOMs while decoding](core/src/main/scala/better/files/UnicodeCharset.scala).
+If you wish to have the [incorrect JDK behaviour](http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4508058),
+you would need to supply Java's UTF-8 charset e.g.:
+```scala
+file.contentAsString(charset = Charset.forName("UTF-8")) // Default incorrect JDK behaviour for UTF-8 (see: JDK-4508058)
+```
+
+If you also wish to write BOMs while encoding, you would need to supply it as:
+```scala
+file.write("hello world")(charset = UnicodeCharset("UTF-8", writeByteOrderMarkers = true))
+```
+
+### Java serialization utils
+Some common utils to serialize/deserialize using Java's serialization util
+```scala
+case class Person(name: String, age: Int)
+val person = new Person("Chris", 24)
+
+// Write
+file.newOutputStream.buffered.asObjectOutputStream.serialize(obj).flush()
+
+// Read
+val person2 = file.newInputStream.buffered.asObjectInputStream.readObject().asInstanceOf[Person]
+assert(person == person2)
+```
+
+The above can be simply written as:
+```scala
+val person2: Person = file.writeSerialized(person).readDeserialized[Person]
+assert(person == person2)
+```
+
+### Java interoperability
+You can always access the Java I/O classes:
+```scala
+val file: File = tmp / "hello.txt"
+val javaFile : java.io.File = file.toJava
+val uri : java.net.URI = file.uri
+val url : java.net.URL = file.url
+val reader : java.io.BufferedReader = file.newBufferedReader
+val outputstream : java.io.OutputStream = file.newOutputStream
+val writer : java.io.BufferedWriter = file.newBufferedWriter
+val inputstream : java.io.InputStream = file.newInputStream
+val path : java.nio.file.Path = file.path
+val fs : java.nio.file.FileSystem = file.fileSystem
+val channel : java.nio.channel.FileChannel = file.newFileChannel
+val ram : java.io.RandomAccessFile = file.newRandomAccess
+val fr : java.io.FileReader = file.newFileReader
+val fw : java.io.FileWriter = file.newFileWriter(append = true)
+val printer : java.io.PrintWriter = file.newPrintWriter
+```
+The library also adds some useful [implicits](http://pathikrit.github.io/better-files/latest/api/better/files/Implicits.html) to above classes e.g.:
+```scala
+file1.reader > file2.writer // pipes a reader to a writer
+System.in > file2.out // pipes an inputstream to an outputstream
+src.pipeTo(sink) // if you don't like symbols
+
+val bytes : Iterator[Byte] = inputstream.bytes
+val bis : BufferedInputStream = inputstream.buffered
+val bos : BufferedOutputStream = outputstream.buffered
+val reader : InputStreamReader = inputstream.reader
+val writer : OutputStreamWriter = outputstream.writer
+val printer : PrintWriter = outputstream.printWriter
+val br : BufferedReader = reader.buffered
+val bw : BufferedWriter = writer.buffered
+val mm : MappedByteBuffer = fileChannel.toMappedByteBuffer
+val str : String = inputstream.asString //Read a string from an InputStream
+```
+`better-files` also supports [certain conversions that are not supported out of the box by the JDK](https://stackoverflow.com/questions/62241/how-to-convert-a-reader-to-inputstream-and-a-writer-to-outputstream)
+
+[`tee`](http://stackoverflow.com/questions/7987395/) multiple outputstreams:
+```scala
+val s3 = s1 tee s2
+s3.printWriter.println(s"Hello world") // gets written to both s1 and s2
+```
+
+### Globbing
+No need to port [this](http://docs.oracle.com/javase/tutorial/essential/io/find.html) to Scala:
+```scala
+val dir = "src"/"test"
+val matches: Iterator[File] = dir.glob("*.{java,scala}")
+// above code is equivalent to:
+dir.listRecursively.filter(f => f.extension == Some(".java") || f.extension == Some(".scala"))
+```
+
+You can even use more advanced regex syntax instead of [glob syntax](http://docs.oracle.com/javase/tutorial/essential/io/fileOps.html#glob):
+```scala
+val matches = dir.globRegex("^\\w*$".r) //equivalent to dir.glob("^\\w*$")(syntax = File.PathMatcherSyntax.regex)
+```
+
+By default, glob syntax in `better-files` is [different from](https://github.com/pathikrit/better-files/issues/114)
+the default JDK glob behaviour since it always includes path. To use the default behaviour:
+```scala
+dir.glob("**/*.txt", includePath = false) // JDK default
+//OR
+dir.glob("*.txt", includePath = true) // better-files default
+```
+You can also extend the `File.PathMatcherSyntax` to create your own matchers.
+
+For custom cases:
+```scala
+dir.collectChildren(_.isSymbolicLink) // collect all symlinks in a directory
+```
+For simpler cases, you can always use `dir.list` or `dir.walk(maxDepth: Int)`
+
+### File system operations
+Utilities to `ls`, `cp`, `rm`, `mv`, `ln`, `md5`, `touch`, `cat` etc:
+```scala
+file.touch()
+file.delete() // unlike the Java API, also works on directories as expected (deletes children recursively)
+file.clear() // If directory, deletes all children; if file clears contents
+file.renameTo(newName: String)
+file.moveTo(destination)
+file.moveToDirectory(destination)
+file.copyTo(destination) // unlike the default API, also works on directories (copies recursively)
+file.copyToDirectory(destination)
+file.linkTo(destination) // ln destination file
+file.symbolicLinkTo(destination) // ln -s destination file
+file.{checksum, md5, sha1, sha256, sha512, digest} // also works for directories
+file.setOwner(user: String) // chown user file
+file.setGroup(group: String) // chgrp group file
+Seq(file1, file2) `>:` file3 // same as cat file1 file2 > file3 (must import import better.files.Dsl.SymbolicOperations)
+Seq(file1, file2) >>: file3 // same as cat file1 file2 >> file3 (must import import better.files.Dsl.SymbolicOperations)
+file.isReadLocked; file.isWriteLocked; file.isLocked
+File.numberOfOpenFileDescriptors // number of open file descriptors
+```
+You can also load resources from your classpath using `File.resource` or `File.copyResource`.
+
+### Temporary files
+Utils to create temporary files:
+```scala
+File.newTemporaryDirectory()
+File.newTemporaryFile()
+```
+The above APIs allow optional specifications of `prefix`, `suffix` and `parentDir`.
+These files are [not deleted automatically on exit by the JVM](http://stackoverflow.com/questions/16691437/when-are-java-temporary-files-deleted) (you have to set `deleteOnExit` which adds to `shutdownHook`).
+
+A cleaner alternative is to use self-deleting file contexts which deletes the file immediately when done:
+```scala
+for {
+ tempFile <- File.temporaryFile()
+} doSomething(tempFile) // tempFile is auto deleted at the end of this block - even if an exception happens
+```
+
+OR equivalently:
+```scala
+File.usingTemporaryFile() {tempFile =>
+ //do something
+} // tempFile is auto deleted at the end of this block - even if an exception happens
+```
+
+You can make any files temporary (i.e. delete after use) by doing this:
+```scala
+val foo = File.home / "Downloads" / "foo.txt"
+
+for {
+ temp <- foo.toTemporary
+} doSomething(temp) // foo is deleted at the end of this block - even if an exception happens
+```
+
+### UNIX DSL
+All the above can also be expressed using [methods](http://pathikrit.github.io/better-files/latest/api/better/files/Dsl$.html) reminiscent of the command line:
+```scala
+import better.files._
+import better.files.Dsl._ // must import Dsl._ to bring in these utils
+
+pwd / cwd // current dir
+cp(file1, file2)
+mv(file1, file2)
+rm(file) /*or*/ del(file)
+ls(file) /*or*/ dir(file)
+ln(file1, file2) // hard link
+ln_s(file1, file2) // soft link
+cat(file1)
+cat(file1) >>: file
+touch(file)
+mkdir(file)
+mkdirs(file) // mkdir -p
+chown(owner, file)
+chgrp(owner, file)
+chmod_+(permission, files) // add permission
+chmod_-(permission, files) // remove permission
+md5(file); sha1(file); sha256(file); sha512(file)
+unzip(zipFile)(targetDir)
+zip(file*)(targetZipFile)
+```
+
+### File attributes
+Query various file attributes e.g.:
+```scala
+file.name // simpler than java.io.File#getName
+file.extension
+file.contentType
+file.lastModifiedTime // returns JSR-310 time
+file.owner
+file.group
+file.isDirectory; file.isSymbolicLink; file.isRegularFile
+file.isHidden
+file.hide(); file.unhide()
+file.isOwnerExecutable; file.isGroupReadable // etc. see file.permissions
+file.size // for a directory, computes the directory size
+file.posixAttributes; file.dosAttributes // see file.attributes
+file.isEmpty // true if file has no content (or no children if directory) or does not exist
+file.isParentOf; file.isChildOf; file.isSiblingOf; file.siblings
+file("dos:system") = true // set custom meta-data for file (similar to Files.setAttribute)
+```
+All the above APIs let you specify the [`LinkOption`](http://docs.oracle.com/javase/8/docs/api/java/nio/file/LinkOption.html) either directly:
+```scala
+file.isDirectory(LinkOption.NOFOLLOW_LINKS)
+```
+Or using the [`File.LinkOptions`](http://pathikrit.github.io/better-files/latest/api/better/files/File$$LinkOptions$.html) helper:
+```scala
+file.isDirectory(File.LinkOptions.noFollow)
+```
+
+`chmod`:
+```scala
+import java.nio.file.attribute.PosixFilePermission
+file.addPermission(PosixFilePermission.OWNER_EXECUTE) // chmod +X file
+file.removePermission(PosixFilePermission.OWNER_WRITE) // chmod -w file
+assert(file.permissionsAsString == "rw-r--r--")
+
+// The following are all equivalent:
+assert(file.permissions contains PosixFilePermission.OWNER_EXECUTE)
+assert(file.testPermission(PosixFilePermission.OWNER_EXECUTE))
+assert(file.isOwnerExecutable)
+```
+
+### File comparison
+Use `==` to check for path-based equality and `===` for content-based equality:
+```scala
+file1 == file2 // equivalent to `file1.isSamePathAs(file2)`
+file1 === file2 // equivalent to `file1.isSameContentAs(file2)` (works for regular-files and directories)
+file1 != file2 // equivalent to `!file1.isSamePathAs(file2)`
+file1 !== file2 // equivalent to `!file1.isSameContentAs(file2)`
+```
+There are also various [`Ordering[File]` instances](http://pathikrit.github.io/better-files/latest/api/better/files/File$$Order$.html) included, e.g.:
+```scala
+val files = myDir.list.toSeq
+files.sorted(File.Order.byName)
+files.max(File.Order.bySize)
+files.min(File.Order.byDepth)
+files.max(File.Order.byModificationTime)
+files.sorted(File.Order.byDirectoriesFirst)
+```
+
+### Zip APIs
+You don't have to lookup on StackOverflow "[How to zip/unzip in Java/Scala?](http://stackoverflow.com/questions/9324933/)":
+```scala
+// Unzipping:
+val zipFile: File = file"path/to/research.zip"
+val research: File = zipFile.unzipTo(destination = home/"Documents"/"research")
+
+// Zipping:
+val zipFile: File = directory.zipTo(destination = home/"Desktop"/"toEmail.zip")
+
+// Zipping in:
+val zipFile = File("countries.zip").zipIn(file"usa.txt", file"russia.txt")
+
+// Zipping/Unzipping to temporary files/directories:
+val someTempZipFile: File = directory.zip()
+val someTempDir: File = zipFile.unzip()
+assert(directory === someTempDir)
+
+// Gzip handling:
+File("countries.gz").newInputStream.gzipped.lines.take(10).foreach(println)
+```
+
+### Lightweight ARM
+Auto-close Java closeables:
+```scala
+for {
+ in <- file1.newInputStream.autoClosed
+ out <- file2.newOutputStream.autoClosed
+} in.pipeTo(out)
+// The input and output streams are auto-closed once out of scope
+```
+`better-files` provides convenient managed versions of all the Java closeables e.g. instead of writing:
+```scala
+for {
+ reader <- file.newBufferedReader.autoClosed
+} foo(reader)
+```
+You can write:
+```scala
+for {
+ reader <- file.bufferedReader // returns ManagedResource[BufferedReader]
+} foo(reader)
+
+// or simply:
+file.bufferedReader.foreach(foo)
+```
+
+You can also define your own custom disposable resources e.g.:
+```scala
+trait Shutdownable {
+ def shutdown(): Unit = ()
+}
+
+object Shutdownable {
+ implicit val disposable: Disposable[Shutdownable] = Disposable(_.shutdown())
+}
+
+val s: Shutdownable = ....
+
+for {
+ instance <- new ManagedResource(s)
+} doSomething(s) // s is disposed after this
+```
+
+### Scanner
+Although [`java.util.Scanner`](http://docs.oracle.com/javase/8/docs/api/java/util/Scanner.html) has a feature-rich API, it only allows parsing primitives.
+It is also [notoriously slow](https://www.cpe.ku.ac.th/~jim/java-io.html) since it uses regexes and does un-Scala things like returns nulls and throws exceptions.
+
+`better-files` provides a [faster](benchmarks#benchmarks), richer, safer, more idiomatic and compossible [Scala replacement](http://pathikrit.github.io/better-files/latest/api/better/files/Scanner.html)
+that [does not use regexes](core/src/main/scala/better/files/Scanner.scala), allows peeking, accessing line numbers, returns `Option`s whenever possible and lets the user mixin custom parsers:
+```scala
+val data = t1 << s"""
+ | Hello World
+ | 1 true 2 3
+""".stripMargin
+val scanner: Scanner = data.newScanner()
+assert(scanner.next[String] == "Hello")
+assert(scanner.lineNumber == 1)
+assert(scanner.next[String] == "World")
+assert(scanner.next[(Int, Boolean)] == (1, true))
+assert(scanner.tillEndOfLine() == " 2 3")
+assert(!scanner.hasNext)
+```
+If you are simply interested in tokens, you can use `file.tokens()`
+
+Writing your own custom scanners:
+```scala
+sealed trait Animal
+case class Dog(name: String) extends Animal
+case class Cat(name: String) extends Animal
+
+implicit val animalParser: Scannable[Animal] = Scannable {scanner =>
+ val name = scanner.next[String]
+ if (name == "Garfield") Cat(name) else Dog(name)
+}
+
+val scanner = file.newScanner()
+println(scanner.next[Animal])
+```
+
+The [shapeless-scanner](shapeless/src/main/scala/better/files/ShapelessScanner.scala) module lets you scan [`HList`s](https://github.com/milessabin/shapeless/blob/master/core/src/main/scala/shapeless/hlists.scala):
+```scala
+val in = Scanner("""
+ 12 Bob True
+ 13 Mary False
+ 26 Rick True
+""")
+
+import shapeless._
+
+type Row = Int :: String :: Boolean :: HNil
+
+val out = Seq.fill(3)(in.next[Row])
+assert(out == Seq(
+ 12 :: "Bob" :: true :: HNil,
+ 13 :: "Mary" :: false :: HNil,
+ 26 :: "Rick" :: true :: HNil
+))
+```
+
+[and case-classes](https://meta.plasm.us/posts/2015/11/08/type-classes-and-generic-derivation/):
+
+```scala
+case class Person(id: Int, name: String, isMale: Boolean)
+val out2 = Seq.fill(3)(in.next[Person])
+```
+
+Simple CSV reader:
+```scala
+val file = """
+ 23,foo
+ 42,bar
+"""
+val csvScanner = file.newScanner(StringSpliiter.on(','))
+csvScanner.next[Int] //23
+csvScanner.next[String] //foo
+```
+
+### File Monitoring
+Vanilla Java watchers:
+```scala
+import java.nio.file.{StandardWatchEventKinds => EventType}
+val service: java.nio.file.WatchService = myDir.newWatchService
+myDir.register(service, events = Seq(EventType.ENTRY_CREATE, EventType.ENTRY_DELETE))
+```
+The above APIs are [cumbersome to use](https://docs.oracle.com/javase/tutorial/essential/io/notification.html#process) (involves a lot of type-casting and null-checking),
+are based on a blocking [polling-based model](http://docs.oracle.com/javase/8/docs/api/java/nio/file/WatchKey.html),
+does not easily allow [recursive watching of directories](https://docs.oracle.com/javase/tutorial/displayCode.html?code=https://docs.oracle.com/javase/tutorial/essential/io/examples/WatchDir.java)
+and nor does it easily allow [watching regular files](http://stackoverflow.com/questions/16251273/) without writing a lot of Java boilerplate.
+
+`better-files` abstracts all the above ugliness behind a [simple interface](core/src/main/scala/better/files/File.scala#1100):
+```scala
+val watcher = new FileMonitor(myDir, recursive = true) {
+ override def onCreate(file: File, count: Int) = println(s"$file got created")
+ override def onModify(file: File, count: Int) = println(s"$file got modified $count times")
+ override def onDelete(file: File, count: Int) = println(s"$file got deleted")
+}
+watcher.start()
+```
+Sometimes, instead of overwriting each of the 3 methods above, it is more convenient to override the dispatcher itself:
+```scala
+import java.nio.file.{Path, StandardWatchEventKinds => EventType, WatchEvent}
+
+val watcher = new FileMonitor(myDir, recursive = true) {
+ override def onEvent(eventType: WatchEvent.Kind[Path], file: File, count: Int) = eventType match {
+ case EventType.ENTRY_CREATE => println(s"$file got created")
+ case EventType.ENTRY_MODIFY => println(s"$file got modified $count")
+ case EventType.ENTRY_DELETE => println(s"$file got deleted")
+ }
+}
+```
+
+### Akka File Watcher
+`better-files` also provides a powerful yet concise [reactive file watcher](akka/src/main/scala/better/files/FileWatcher.scala)
+based on [Akka actors](http://doc.akka.io/docs/akka/snapshot/scala/actors.html) that supports dynamic dispatches:
+ ```scala
+import akka.actor.{ActorRef, ActorSystem}
+import better.files._, FileWatcher._
+
+implicit val system = ActorSystem("mySystem")
+
+val watcher: ActorRef = (home/"Downloads").newWatcher(recursive = true)
+
+// register partial function for an event
+watcher ! on(EventType.ENTRY_DELETE) {
+ case file if file.isDirectory => println(s"$file got deleted")
+}
+
+// watch for multiple events
+watcher ! when(events = EventType.ENTRY_CREATE, EventType.ENTRY_MODIFY) {
+ case (EventType.ENTRY_CREATE, file, count) => println(s"$file got created")
+ case (EventType.ENTRY_MODIFY, file, count) => println(s"$file got modified $count times")
+}
+```
diff --git a/scalaplugin/src/test/resource/better-files/akka/README.md b/scalaplugin/src/test/resource/better-files/akka/README.md
new file mode 100644
index 00000000..391cec2e
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/akka/README.md
@@ -0,0 +1,394 @@
+Reproduction of [this Java Advent article](http://www.javaadvent.com/2015/12/reactive-file-system-monitoring-using-akka-actors.html)
+
+-----
+
+In this article, we will discuss:
+
+0. File system monitoring using [Java NIO.2][nio2]
+1. Common pitfalls of the default Java library
+2. Design a simple thread-based file system monitor
+3. Use the above to design a reactive file system monitor using the [actor][akka] [model][actorModel]
+
+**Note**: Although all the code samples here are in Scala, it can be rewritten in simple Java too. To quickly familiarize yourself with Scala syntax, [here is a very short and nice Scala cheatsheet][cheatsheet]. For a more comprehensive guide to Scala for Java programmers, [consult this][cheatsheet2] (not needed to follow this article).
+
+For the absolute shortest cheatsheet, the following Java code:
+
+```java
+public void foo(int x, int y) {
+ int z = x + y
+ if (z == 1) {
+ System.out.println(x);
+ } else {
+ System.out.println(y);
+ }
+}
+```
+
+is equivalent to the following Scala code:
+
+```scala
+def foo(x: Int, y: Int): Unit = {
+ val z: Int = x + y
+ z match {
+ case 1 => println(x)
+ case _ => println(y)
+ }
+}
+```
+
+
+All the code presented here is available under MIT license as part of the [better-files][better-files-watcher] library on [GitHub][better-files].
+
+-----------
+
+Let's say you are tasked to build a cross-platform desktop file-search engine. You quickly realize that after the initial indexing of all the files, you need to also quickly reindex any new files (or directories) that got created or updated. A naive way would be to simply rescan the entire file system every few minutes; but that would be incredibly inefficient since most operating systems expose file system notification APIs that allow the application programmer to register callbacks for changes e.g. [ionotify][ionotify-wiki] in Linux, [FSEvenets][fsevents-wiki] in Mac and [FindFirstChangeNotification][FindFirstChangeNotification] in Windows.
+
+But now you are stuck dealing with OS-specific APIs! Thankfully, beginning Java SE 7, we have a platform independent abstraction for watching file system changes via the [WatchService API][javadoc-watchservice]. The WatchService API was developed as part of [Java NIO.2][nio2-wiki], under [JSR-51][jsr-51] and here is a "hello world" example of using it to watch a given [Path][javadoc-path]:
+
+```scala
+import java.nio.file._
+import java.nio.file.StandardWatchEventKinds._
+import scala.collection.JavaConversions._
+
+def watch(directory: Path): Unit = {
+ // First create the service
+ val service: WatchService = directory.getFileSystem.newWatchService()
+
+ // Register the service to the path and also specify which events we want to be notified about
+ directory.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY)
+
+ while (true) {
+ val key: WatchKey = service.take() // Wait for this key to be signalled
+ for {event <- key.pollEvents()} {
+ // event.context() is the path to the file that got changed
+ event.kind() match {
+ case ENTRY_CREATE => println(s"${event.context()} got created")
+ case ENTRY_MODIFY => println(s"${event.context()} got modified")
+ case ENTRY_DELETE => println(s"${event.context()} got deleted")
+ case _ =>
+ // This can happen when OS discards or loses an event.
+ // See: http://docs.oracle.com/javase/8/docs/api/java/nio/file/StandardWatchEventKinds.html#OVERFLOW
+ println(s"Unknown event $event happened at ${event.context()}")
+ }
+ }
+ key.reset() // Do not forget to do this!! See: http://stackoverflow.com/questions/20180547/
+ }
+}
+```
+
+Although the above is a good first attempt, it lacks in several aspects:
+
+0. **Bad Design**: The above code looks unnatural and you probably had to [look it up on StackOverflow][so-down] to get it right. Can we do better?
+2. **Bad Design**: The code does not do a very good job of handling errors. What happens when we encounter a file we could not open?
+3. **Gotcha**: The Java API only allows us to watch the directory for changes to its direct children; it [does not recursively watch a directory][so-recursive-watching] for you.
+4. **Gotcha**: The Java API [does not allow us to watch a single file][so-only-watch-dirs], only a directory.
+5. **Gotcha**: Even if we resolve the aformentioned issues, the Java API [does not automatically start watching a new child file][so-autowatch] or directory created under the root.
+6. **Bad Design**: The code as implemented above, exposes a blocking/polling, thread-based model. Can we use a better concurrency abstraction?
+
+-----------
+
+
+Let's start with each of the above concerns.
+
+* **A better interface**: Here is what *my ideal* interface would look like:
+
+```scala
+abstract class FileMonitor(root: Path) {
+ def start(): Unit
+ def onCreate(path: Path): Unit
+ def onModify(path: Path): Unit
+ def onDelete(path: Path): Unit
+ def stop(): Unit
+}
+```
+
+That way, I can simply write the example code as:
+
+```scala
+val watcher = new FileMonitor(myFile) {
+ override def onCreate(path: Path) = println(s"$path got created")
+ override def onModify(path: Path) = println(s"$path got modified")
+ override def onDelete(path: Path) = println(s"$path got deleted")
+}
+watcher.start()
+```
+
+Ok, let's try to adapt the first example using a Java `Thread` so that we can expose "my ideal interface":
+
+```scala
+trait FileMonitor { // My ideal interface
+ val root: Path // starting file
+ def start(): Unit // start the monitor
+ def onCreate(path: Path) = {} // on-create callback
+ def onModify(path: Path) = {} // on-modify callback
+ def onDelete(path: Path) = {} // on-delete callback
+ def onUnknownEvent(event: WatchEvent[_]) = {} // handle lost/discarded events
+ def onException(e: Throwable) = {} // handle errors e.g. a read error
+ def stop(): Unit // stop the monitor
+}
+```
+
+And here is a very basic thread-based implementation:
+
+```scala
+class ThreadFileMonitor(val root: Path) extends Thread with FileMonitor {
+ setDaemon(true) // daemonize this thread
+ setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler {
+ override def uncaughtException(thread: Thread, exception: Throwable) = onException(exception)
+ })
+
+ val service = root.getFileSystem.newWatchService()
+
+ override def run() = Iterator.continually(service.take()).foreach(process)
+
+ override def interrupt() = {
+ service.close()
+ super.interrupt()
+ }
+
+ override def start() = {
+ watch(root)
+ super.start()
+ }
+
+ protected[this] def watch(file: Path): Unit = {
+ file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY)
+ }
+
+ protected[this] def process(key: WatchKey) = {
+ key.pollEvents() foreach {
+ case event: WatchEvent[Path] => dispatch(event.kind(), event.context())
+ case event => onUnknownEvent(event)
+ }
+ key.reset()
+ }
+
+ def dispatch(eventType: WatchEvent.Kind[Path], file: Path): Unit = {
+ eventType match {
+ case ENTRY_CREATE => onCreate(file)
+ case ENTRY_MODIFY => onModify(file)
+ case ENTRY_DELETE => onDelete(file)
+ }
+ }
+}
+```
+
+The above looks much cleaner! Now we can watch files to our heart's content without poring over the details of JavaDocs by simply implementing the `onCreate(path)`, `onModify(path)`, `onDelete(path)` etc.
+
+* **Exception handling**: This is already done above. `onException` gets called whenever we encounter an exception and the invoker can decide what to do next by implementing it.
+
+* **Recursive watching**: The Java API **does not allow recursive watching of directories**. We need to modify the `watch(file)` to recursively attach the watcher:
+
+```scala
+def watch(file: Path, recursive: Boolean = true): Unit = {
+ if (Files.isDirectory(file)) {
+ file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY)
+ // recursively call watch on children of this file
+ if (recursive) {
+ Files.list(file).iterator() foreach {f => watch(f, recursive)}
+ }
+ }
+}
+```
+
+* **Watching regular files**: As mentioned before, the Java API **can only watch directories**. One hack we can do to watch single files is to set a watcher on its parent directory and only react if the event trigerred on the file itself.
+
+```scala
+override def start() = {
+ if (Files.isDirectory(root)) {
+ watch(root, recursive = true)
+ } else {
+ watch(root.getParent, recursive = false)
+ }
+ super.start()
+}
+```
+
+And, now in `process(key)`, we make sure we react to either a directory or that file only:
+
+```scala
+def reactTo(target: Path) = Files.isDirectory(root) || (root == target)
+```
+
+And, we check before `dispatch` now:
+
+```scala
+case event: WatchEvent[Path] =>
+ val target = event.context()
+ if (reactTo(target)) {
+ dispatch(event.kind(), target)
+ }
+```
+
+* **Auto-watching new items**: The Java API, **does not auto-watch any new sub-files**. We can address this by attaching the watcher ourselves in `process(key)` when an `ENTRY_CREATE` event is fired:
+
+```scala
+if (reactTo(target)) {
+ if (Files.isDirectory(root) && event.kind() == ENTRY_CREATE) {
+ watch(root.resolve(target))
+ }
+ dispatch(event.kind(), target)
+}
+```
+
+Putting it all together, we have our final [`FileMonitor.scala`][FileMonitor.scala]:
+
+```scala
+class ThreadFileMonitor(val root: Path) extends Thread with FileMonitor {
+ setDaemon(true) // daemonize this thread
+ setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler {
+ override def uncaughtException(thread: Thread, exception: Throwable) = onException(exception)
+ })
+
+ val service = root.getFileSystem.newWatchService()
+
+ override def run() = Iterator.continually(service.take()).foreach(process)
+
+ override def interrupt() = {
+ service.close()
+ super.interrupt()
+ }
+
+ override def start() = {
+ if (Files.isDirectory(root)) {
+ watch(root, recursive = true)
+ } else {
+ watch(root.getParent, recursive = false)
+ }
+ super.start()
+ }
+
+ protected[this] def watch(file: Path, recursive: Boolean = true): Unit = {
+ if (Files.isDirectory(file)) {
+ file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY)
+ if (recursive) {
+ Files.list(file).iterator() foreach {f => watch(f, recursive)}
+ }
+ }
+ }
+
+ private[this] def reactTo(target: Path) = Files.isDirectory(root) || (root == target)
+
+ protected[this] def process(key: WatchKey) = {
+ key.pollEvents() foreach {
+ case event: WatchEvent[Path] =>
+ val target = event.context()
+ if (reactTo(target)) {
+ if (Files.isDirectory(root) && event.kind() == ENTRY_CREATE) {
+ watch(root.resolve(target))
+ }
+ dispatch(event.kind(), target)
+ }
+ case event => onUnknownEvent(event)
+ }
+ key.reset()
+ }
+
+ def dispatch(eventType: WatchEvent.Kind[Path], file: Path): Unit = {
+ eventType match {
+ case ENTRY_CREATE => onCreate(file)
+ case ENTRY_MODIFY => onModify(file)
+ case ENTRY_DELETE => onDelete(file)
+ }
+ }
+}
+```
+
+-----
+Now, that we have addressed all the gotchas and distanced ourselves from the intricacies of the WatchService API, we are still tightly coupled to the thread-based API.
+We will use the above class to expose a different concurrency model, namely, the [actor model][actorModel2] instead to design a reactive, dynamic and resilient file-system watcher using [Akka][akka-docs]. Although the [construction of Akka actors][akka-actors] is beyond the scope of this article, we will present a very simple actor that uses the `ThreadFileMonitor`:
+
+```scala
+import java.nio.file.{Path, WatchEvent}
+
+import akka.actor._
+
+class FileWatcher(file: Path) extends ThreadFileMonitor(file) with Actor {
+ import FileWatcher._
+
+ // MultiMap from Events to registered callbacks
+ protected[this] val callbacks = newMultiMap[Event, Callback]
+
+ // Override the dispatcher from ThreadFileMonitor to inform the actor of a new event
+ override def dispatch(event: Event, file: Path) = self ! Message.NewEvent(event, file)
+
+ // Override the onException from the ThreadFileMonitor
+ override def onException(exception: Throwable) = self ! Status.Failure(exception)
+
+ // when actor starts, start the ThreadFileMonitor
+ override def preStart() = super.start()
+
+ // before actor stops, stop the ThreadFileMonitor
+ override def postStop() = super.interrupt()
+
+ override def receive = {
+ case Message.NewEvent(event, target) if callbacks contains event =>
+ callbacks(event) foreach {f => f(event -> target)}
+
+ case Message.RegisterCallback(events, callback) =>
+ events foreach {event => callbacks.addBinding(event, callback)}
+
+ case Message.RemoveCallback(event, callback) =>
+ callbacks.removeBinding(event, callback)
+ }
+}
+
+object FileWatcher {
+ type Event = WatchEvent.Kind[Path]
+ type Callback = PartialFunction[(Event, Path), Unit]
+
+ sealed trait Message
+ object Message {
+ case class NewEvent(event: Event, file: Path) extends Message
+ case class RegisterCallback(events: Seq[Event], callback: Callback) extends Message
+ case class RemoveCallback(event: Event, callback: Callback) extends Message
+ }
+}
+```
+
+This allows us to dynamically register and remove callbacks to react to file system events:
+
+```scala
+// initialize the actor instance
+val system = ActorSystem("mySystem")
+val watcher: ActorRef = system.actorOf(Props(new FileWatcher(Paths.get("/home/pathikrit"))))
+
+// util to create a RegisterCallback message for the actor
+def when(events: Event*)(callback: Callback): Message = {
+ Message.RegisterCallback(events.distinct, callback)
+}
+
+// send the register callback message for create/modify events
+watcher ! when(events = ENTRY_CREATE, ENTRY_MODIFY) {
+ case (ENTRY_CREATE, file) => println(s"$file got created")
+ case (ENTRY_MODIFY, file) => println(s"$file got modified")
+}
+```
+
+Full source: [`FileWatcher.scala`][FileWatcher.scala]
+
+-----
+
+[actorModel]: https://en.wikipedia.org/wiki/Actor_model
+[actorModel2]: http://berb.github.io/diploma-thesis/original/054_actors.html
+[akka]: http://akka.io
+[akka-actors]: http://doc.akka.io/docs/akka/snapshot/scala/actors.html
+[akka-docs]: http://doc.akka.io/docs/akka/2.4.1/java.html
+[better-files]: https://github.com/pathikrit/better-files
+[better-files-watcher]: https://github.com/pathikrit/better-files#akka-file-watcher
+[cheatsheet]: http://learnxinyminutes.com/docs/scala/
+[cheatsheet2]: http://techblog.realestate.com.au/java-to-scala-cheatsheet/
+[FileWatcher.scala]: https://github.com/pathikrit/better-files/blob/2ea6bb694551f1fe6e9ce58dbd1b814391a02e5a/akka/src/main/scala/better/files/FileWatcher.scala
+[FileMonitor.scala]: https://github.com/pathikrit/better-files/blob/2ea6bb694551f1fe6e9ce58dbd1b814391a02e5a/core/src/main/scala/better/files/FileMonitor.scala
+[FindFirstChangeNotification]: https://msdn.microsoft.com/en-us/library/aa364417(VS.85).aspx
+[fsevents-wiki]: https://en.wikipedia.org/wiki/FSEvents
+[ionotify-wiki]: https://en.wikipedia.org/wiki/Inotify
+[nio2]: https://docs.oracle.com/javase/tutorial/essential/io/fileio.html
+[nio2-wiki]: https://en.wikipedia.org/wiki/Non-blocking_I/O_(Java)
+[jsr-51]: https://www.jcp.org/en/jsr/detail?id=51
+[javadoc-path]: https://docs.oracle.com/javase/8/docs/api/java/nio/file/Path.html
+[javadoc-watchservice]: https://docs.oracle.com/javase/8/docs/api/java/nio/file/WatchService.html
+[so-autowatch]: https://github.com/lloydmeta/schwatcher/issues/44
+[so-down]: http://memecrunch.com/meme/YBHZ/stackoverflow-is-down/image.jpg
+[so-recursive-watching]: http://stackoverflow.com/questions/18701242/how-to-watch-a-folder-and-subfolders-for-changes
+[so-only-watch-dirs]: http://stackoverflow.com/questions/16251273/can-i-watch-for-single-file-change-with-watchservice-not-the-whole-directory
diff --git a/scalaplugin/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala b/scalaplugin/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala
new file mode 100644
index 00000000..66594d20
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala
@@ -0,0 +1,67 @@
+package better.files
+
+import akka.actor._
+
+/**
+ * An actor that can watch a file or a directory
+ * Instead of directly calling the constructor of this, call file.newWatcher to create the actor
+ *
+ * @param file watch this file (or directory)
+ * @param maxDepth In case of directories, how much depth should we watch
+ */
+class FileWatcher(file: File, maxDepth: Int) extends Actor {
+ import FileWatcher._
+
+ def this(file: File, recursive: Boolean = true) = this(file, if (recursive) Int.MaxValue else 0)
+
+ protected[this] val callbacks = newMultiMap[Event, Callback]
+
+ protected[this] val monitor: File.Monitor = new FileMonitor(file, maxDepth) {
+ override def onEvent(event: Event, file: File, count: Int) = self ! Message.NewEvent(event, file, count)
+ override def onException(exception: Throwable) = self ! Status.Failure(exception)
+ }
+
+ override def preStart() = monitor.start()(executionContext = context.dispatcher)
+
+ override def receive = {
+ case Message.NewEvent(event, target, count) if callbacks.contains(event) => callbacks(event).foreach(f => repeat(count)(f(event -> target)))
+ case Message.RegisterCallback(events, callback) => events.foreach(event => callbacks.addBinding(event, callback))
+ case Message.RemoveCallback(event, callback) => callbacks.removeBinding(event, callback)
+ }
+
+ override def postStop() = monitor.stop()
+}
+
+object FileWatcher {
+ import java.nio.file.{Path, WatchEvent}
+
+ type Event = WatchEvent.Kind[Path]
+ type Callback = PartialFunction[(Event, File), Unit]
+
+ sealed trait Message
+ object Message {
+ case class NewEvent(event: Event, file: File, count: Int) extends Message
+ case class RegisterCallback(events: Traversable[Event], callback: Callback) extends Message
+ case class RemoveCallback(event: Event, callback: Callback) extends Message
+ }
+
+ implicit val disposeActorSystem: Disposable[ActorSystem] =
+ Disposable(_.terminate())
+
+ implicit class FileWatcherOps(file: File) {
+ def watcherProps(recursive: Boolean): Props =
+ Props(new FileWatcher(file, recursive))
+
+ def newWatcher(recursive: Boolean = true)(implicit system: ActorSystem): ActorRef =
+ system.actorOf(watcherProps(recursive))
+ }
+
+ def when(events: Event*)(callback: Callback): Message =
+ Message.RegisterCallback(events, callback)
+
+ def on(event: Event)(callback: File => Unit): Message =
+ when(event) { case (`event`, file) => callback(file) }
+
+ def stop(event: Event, callback: Callback): Message =
+ Message.RemoveCallback(event, callback)
+}
diff --git a/scalaplugin/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala b/scalaplugin/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala
new file mode 100644
index 00000000..014373cd
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala
@@ -0,0 +1,101 @@
+package better.files
+
+import Dsl._
+
+import scala.concurrent.duration._
+import scala.collection.mutable
+import scala.language.postfixOps
+
+class FileWatcherSpec extends CommonSpec {
+ "file watcher" should "watch directories" in {
+ assume(isCI)
+ File.usingTemporaryDirectory() {dir =>
+ (dir / "a" / "b" / "c.txt").createIfNotExists(createParents = true)
+
+ var actualEvents = List.empty[String]
+ def output(file: File, event: String) = synchronized {
+ val msg = s"${dir.path relativize file.path} got $event"
+ println(msg)
+ actualEvents = msg :: actualEvents
+ }
+ /***************************************************************************/
+ import java.nio.file.{StandardWatchEventKinds => Events}
+ import FileWatcher._
+
+ import akka.actor.{ActorRef, ActorSystem}
+ implicit val system = ActorSystem()
+
+ val watcher: ActorRef = dir.newWatcher()
+
+ watcher ! when(events = Events.ENTRY_CREATE, Events.ENTRY_MODIFY) { // watch for multiple events
+ case (Events.ENTRY_CREATE, file) => output(file, "created")
+ case (Events.ENTRY_MODIFY, file) => output(file, "modified")
+ }
+
+ watcher ! on(Events.ENTRY_DELETE)(file => output(file, "deleted")) // register partial function for single event
+ /***************************************************************************/
+ sleep(5 seconds)
+
+ val expectedEvents = mutable.ListBuffer.empty[String]
+
+ def doIO[U](logs: String*)(f: => U): Unit = {
+ expectedEvents ++= logs
+ f
+ sleep()
+ }
+
+ doIO("a/b/c.txt got modified") {
+ (dir / "a" / "b" / "c.txt").writeText("Hello world")
+ }
+ doIO("a/b got deleted", "a/b/c.txt got deleted") {
+ rm(dir / "a" / "b")
+ }
+ doIO("d got created") {
+ mkdir(dir / "d")
+ }
+ doIO("d/e.txt got created") {
+ touch(dir / "d" / "e.txt")
+ }
+ doIO("d/f got created") {
+ mkdirs(dir / "d" / "f" / "g")
+ }
+ doIO("d/f/g/e.txt got created") {
+ touch(dir / "d" / "f" / "g" / "e.txt")
+ }
+
+ doIO("a/e.txt got created", "d/f/g/e.txt got deleted") {
+ (dir / "d" / "f" / "g" / "e.txt") moveTo (dir / "a" / "e.txt")
+ }
+
+ sleep(10 seconds)
+
+ println(
+ s"""
+ |Expected=${expectedEvents.sorted}
+ |Actual=${actualEvents.sorted}
+ |""".stripMargin)
+
+ expectedEvents.diff(actualEvents) shouldBe empty
+
+ def checkNotWatching[U](msg: String)(f: => U) = {
+ val before = List(actualEvents : _*)
+ f
+ sleep()
+ val after = List(actualEvents : _*)
+ assert(before === after, msg)
+ }
+
+ system.stop(watcher)
+ sleep()
+ checkNotWatching("stop watching after actor is stopped") {
+ mkdirs(dir / "e")
+ }
+
+ system.terminate()
+ sleep()
+ checkNotWatching("stop watching after actor-system is stopped") {
+ mkdirs(dir / "f")
+ }
+ }
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/benchmarks/README.md b/scalaplugin/src/test/resource/better-files/benchmarks/README.md
new file mode 100644
index 00000000..ed092ece
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/benchmarks/README.md
@@ -0,0 +1,24 @@
+Benchmarks
+====
+* [Scanner benchmarks](src/main/scala/better/files/Scanners.scala):
+```
+> sbt "benchmarks/test"
+JavaScanner : 2191 ms
+StringBuilderScanner : 1325 ms
+CharBufferScanner : 1117 ms
+StreamingScanner : 212 ms
+IterableScanner : 365 ms
+IteratorScanner : 297 ms
+BetterFilesScanner : 272 ms
+ArrayBufferScanner : 220 ms
+FastJavaIOScanner2 : 181 ms
+FastJavaIOScanner : 179 ms
+```
+
+----
+
+[![YourKit](https://www.yourkit.com/images/yklogo.png)](https://www.yourkit.com/)
+
+YourKit supports better-files with its full-featured Java Profiler.
+YourKit, LLC is the creator of [YourKit Java Profiler](https://www.yourkit.com/java/profiler/) and [YourKit .NET Profiler](https://www.yourkit.com/.net/profiler/),
+innovative and intelligent tools for profiling Java and .NET applications.
diff --git a/scalaplugin/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java b/scalaplugin/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java
new file mode 100644
index 00000000..50550704
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java
@@ -0,0 +1,78 @@
+package better.files;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.Arrays;
+
+/**
+ * Hand built using a char buffer
+ */
+public class ArrayBufferScanner extends AbstractScanner {
+ private char[] buffer = new char[1 << 4];
+ private int pos = 1;
+
+ private BufferedReader reader;
+
+ public ArrayBufferScanner(BufferedReader reader) {
+ super(reader);
+ this.reader = reader;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return pos > 0;
+ }
+
+ private void loadBuffer() {
+ pos = 0;
+ try {
+ for (int i; (i = reader.read()) != -1; ) {
+ char c = (char) i;
+ if (c != ' ' && c != '\n' && c != '\t' && c != '\r' && c != '\f') {
+ if (pos == buffer.length) buffer = Arrays.copyOf(buffer, 2 * pos);
+ buffer[pos++] = c;
+ } else if (pos != 0) break;
+ }
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ public String current() {
+ return String.copyValueOf(buffer, 0, pos);
+ }
+
+ @Override
+ public String next() {
+ loadBuffer();
+ return current();
+ }
+
+ @Override
+ public String nextLine() {
+ try {
+ return reader.readLine();
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ @Override
+ public int nextInt() {
+ loadBuffer();
+ final int radix = 10;
+ int result = 0;
+ int i = buffer[0] == '-' || buffer[0] == '+' ? 1 : 0;
+ for (checkValidNumber(pos > i); i < pos; i++) {
+ int digit = buffer[i] - '0';
+ checkValidNumber(0 <= digit && digit <= 9);
+ result = result * radix + digit;
+ }
+ return buffer[0] == '-' ? -result : result;
+ }
+
+ private void checkValidNumber(boolean condition) {
+ if (!condition) throw new NumberFormatException(current());
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala b/scalaplugin/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala
new file mode 100644
index 00000000..791e6039
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala
@@ -0,0 +1,158 @@
+package better.files
+
+import java.io.BufferedReader
+
+/**
+ * Base interface to test
+ */
+abstract class AbstractScanner(protected[this] val reader: BufferedReader) {
+ def hasNext: Boolean
+ def next(): String
+ def nextInt() = next().toInt
+ def nextLine() = reader.readLine()
+ def close() = reader.close()
+}
+
+/**
+ * Based on java.util.Scanner
+ */
+class JavaScanner(reader: BufferedReader) extends AbstractScanner(reader) {
+ private[this] val scanner = new java.util.Scanner(reader)
+ override def hasNext = scanner.hasNext
+ override def next() = scanner.next()
+ override def nextInt() = scanner.nextInt()
+ override def nextLine() = {
+ scanner.nextLine()
+ scanner.nextLine()
+ }
+ override def close() = scanner.close()
+}
+
+/**
+ * Based on StringTokenizer + resetting the iterator
+ */
+class IterableScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterable[String] {
+ override def iterator = for {
+ line <- Iterator.continually(reader.readLine()).takeWhile(_ != null)
+ tokenizer = new java.util.StringTokenizer(line)
+ _ <- Iterator.continually(tokenizer).takeWhile(_.hasMoreTokens)
+ } yield tokenizer.nextToken()
+
+ private[this] var current = iterator
+ override def hasNext = current.hasNext
+ override def next() = current.next()
+ override def nextLine() = {
+ current = iterator
+ super.nextLine()
+ }
+}
+
+/**
+ * Based on a mutating var StringTokenizer
+ */
+class IteratorScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] {
+ import java.util.StringTokenizer
+ private[this] val tokenizers = Iterator.continually(reader.readLine()).takeWhile(_ != null).map(new StringTokenizer(_)).filter(_.hasMoreTokens)
+ private[this] var current: Option[StringTokenizer] = None
+
+ @inline private[this] def tokenizer(): Option[StringTokenizer] = current.find(_.hasMoreTokens) orElse {
+ current = if (tokenizers.hasNext) Some(tokenizers.next()) else None
+ current
+ }
+ override def hasNext = tokenizer().nonEmpty
+ override def next() = tokenizer().get.nextToken()
+ override def nextLine() = {
+ current = None
+ super.nextLine()
+ }
+}
+
+/**
+ * Based on java.io.StreamTokenizer
+ */
+class StreamingScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] {
+ import java.io.StreamTokenizer
+ private[this] val in = new StreamTokenizer(reader)
+
+ override def hasNext = in.ttype != StreamTokenizer.TT_EOF
+ override def next() = {
+ in.nextToken()
+ in.sval
+ }
+ override def nextInt() = nextDouble().toInt
+ def nextDouble() = {
+ in.nextToken()
+ in.nval
+ }
+}
+
+/**
+ * Based on a reusable StringBuilder
+ */
+class StringBuilderScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] {
+ private[this] val chars = reader.chars
+ private[this] val buffer = new StringBuilder()
+
+ override def next() = {
+ buffer.clear()
+ while (buffer.isEmpty && hasNext) {
+ chars.takeWhile(c => !c.isWhitespace).foreach(buffer += _)
+ }
+ buffer.toString()
+ }
+ override def hasNext = chars.hasNext
+}
+
+/**
+ * Scala version of the ArrayBufferScanner
+ */
+class CharBufferScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] {
+ private[this] val chars = reader.chars
+ private[this] var buffer = Array.ofDim[Char](1<<4)
+
+ override def next() = {
+ var pos = 0
+ while (pos == 0 && hasNext) {
+ for {
+ c <- chars.takeWhile(c => c != ' ' && c != '\n')
+ } {
+ if (pos == buffer.length) buffer = java.util.Arrays.copyOf(buffer, 2 * pos)
+ buffer(pos) = c
+ pos += 1
+ }
+ }
+ String.copyValueOf(buffer, 0, pos)
+ }
+ override def hasNext = chars.hasNext
+}
+
+/**
+ * Scanner using https://github.com/williamfiset/FastJavaIO
+ */
+class FastJavaIOScanner(reader: BufferedReader) extends AbstractScanner(reader) {
+ protected def is: java.io.InputStream = new org.apache.commons.io.input.ReaderInputStream(reader, defaultCharset)
+
+ private[this] val fastReader = new fastjavaio.InputReader(is)
+
+ override def hasNext = true //TODO: https://github.com/williamfiset/FastJavaIO/issues/3
+ override def next() = fastReader.readStr()
+ override def nextInt() = fastReader.readInt()
+ override def nextLine() = fastReader.readLine()
+}
+
+/**
+ * Same as FastJavaIOScanner but uses better-files's Reader => InputStream
+ */
+class FastJavaIOScanner2(reader: BufferedReader) extends FastJavaIOScanner(reader) {
+ override def is = reader.toInputStream
+}
+
+/**
+ * Based on the better-files implementation
+ */
+class BetterFilesScanner(reader: BufferedReader) extends AbstractScanner(reader) {
+ private[this] val scanner = Scanner(reader)
+ override def hasNext = scanner.hasNext
+ override def next() = scanner.next
+ override def nextLine() = scanner.nextLine()
+}
diff --git a/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala b/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala
new file mode 100644
index 00000000..68b734e1
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala
@@ -0,0 +1,10 @@
+package better.files
+
+import org.scalatest.FunSuite
+
+trait Benchmark extends FunSuite {
+ def profile[A](f: => A): (A, Long) = {
+ val t = System.nanoTime()
+ (f, ((System.nanoTime() - t)/1e6).toLong)
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala b/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala
new file mode 100644
index 00000000..aa09bc77
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala
@@ -0,0 +1,39 @@
+package better.files
+
+import java.nio.charset.Charset
+
+import scala.util.Random
+
+class EncodingBenchmark extends Benchmark {
+
+ def testWrite(file: File, charset: Charset) = profile {
+ for {
+ writer <- file.bufferedWriter(charset)
+ content <- Iterator.continually(Random.nextString(10000)).take(1000)
+ } writer.write(content + "\n")
+ }
+
+ def testRead(file: File, charset: Charset) = profile {
+ for {
+ reader <- file.bufferedReader(charset)
+ line <- reader.lines().autoClosed
+ } line
+ }
+
+ def run(charset: Charset) = {
+ File.temporaryFile() foreach {file =>
+ val (_, w) = testWrite(file, charset)
+ info(s"Charset=$charset, write=$w ms")
+
+ val (_, r) = testRead(file, charset)
+ info(s"Charset=$charset, read=$r ms")
+ }
+ }
+
+ test("encoding") {
+ val utf8 = Charset.forName("UTF-8")
+ run(charset = utf8)
+ info("-------------")
+ run(charset = UnicodeCharset(utf8))
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala b/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala
new file mode 100644
index 00000000..83082b9a
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala
@@ -0,0 +1,66 @@
+package better.files
+
+import java.io.{BufferedReader, StringReader}
+
+class ScannerBenchmark extends Benchmark {
+ val file = File.newTemporaryFile()
+ val n = 1000
+ repeat(n) {
+ file.appendLine(-n to n mkString " ")
+ .appendLine("hello " * n)
+ .appendLine("world " * n)
+ }
+ val scanners: Seq[BufferedReader => AbstractScanner] = Seq(
+ new JavaScanner(_),
+ new StringBuilderScanner(_),
+ new CharBufferScanner(_),
+ new StreamingScanner(_),
+ new IterableScanner(_),
+ new IteratorScanner(_),
+ new BetterFilesScanner(_),
+ new ArrayBufferScanner(_),
+ new FastJavaIOScanner2(_),
+ new FastJavaIOScanner(_)
+ )
+
+ def runTest(scanner: AbstractScanner) = {
+ val (_, time) = profile(run(scanner))
+ scanner.close()
+ info(f"${scanner.getClass.getSimpleName.padTo(25, ' ')}: $time%4d ms")
+ }
+
+ def run(scanner: AbstractScanner): Unit = repeat(n) {
+ assert(scanner.hasNext)
+ val ints = List.fill(2 * n + 1)(scanner.nextInt())
+ val line = "" //scanner.nextLine()
+ val words = IndexedSeq.fill(2 * n)(scanner.next())
+ (line, ints, words)
+ }
+
+ test("scanner") {
+ info("Warming up ...")
+ scanners foreach { scannerBuilder =>
+ val canaryData =
+ """
+ |10 -23
+ |Hello World
+ |Hello World
+ |19
+ """.stripMargin
+ val scanner = scannerBuilder(new BufferedReader(new StringReader(canaryData)))
+ info(s"Testing ${scanner.getClass.getSimpleName} for correctness")
+ assert(scanner.hasNext)
+ assert(scanner.nextInt() == 10)
+ assert(scanner.nextInt() == -23)
+ assert(scanner.next() == "Hello")
+ assert(scanner.next() == "World")
+ val l = scanner.nextLine()
+ assert(l == "Hello World", l)
+ assert(scanner.nextInt() == 19)
+ //assert(!scanner.hasNext)
+ }
+
+ info("Running benchmark ...")
+ scanners foreach { scanner => runTest(scanner(file.newBufferedReader)) }
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/build.sbt b/scalaplugin/src/test/resource/better-files/build.sbt
new file mode 100644
index 00000000..a3ae7a81
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/build.sbt
@@ -0,0 +1,163 @@
+val username = "pathikrit"
+val repo = "better-files"
+
+lazy val commonSettings = Seq(
+ organization := s"com.github.$username",
+ scalaVersion := "2.12.3",
+ crossScalaVersions := Seq("2.12.3"),
+ crossVersion := CrossVersion.binary,
+ javacOptions ++= Seq("-source", "1.8", "-target", "1.8", "-Xlint"),
+ scalacOptions ++= Seq(
+ "-deprecation", // Emit warning and location for usages of deprecated APIs.
+ "-encoding", "utf-8", // Specify character encoding used by source files.
+ "-explaintypes", // Explain type errors in more detail.
+ "-feature", // Emit warning and location for usages of features that should be imported explicitly.
+ "-language:existentials", // Existential types (besides wildcard types) can be written and inferred
+ "-language:experimental.macros", // Allow macro definition (besides implementation and application)
+ "-language:higherKinds", // Allow higher-kinded types
+ "-language:implicitConversions", // Allow definition of implicit functions called views
+ "-unchecked", // Enable additional warnings where generated code depends on assumptions.
+ "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access.
+ "-Xfatal-warnings", // Fail the compilation if there are any warnings.
+ "-Xfuture", // Turn on future language features.
+ "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
+ "-Xlint:by-name-right-associative", // By-name parameter of right associative operator.
+ "-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error.
+ "-Xlint:delayedinit-select", // Selecting member of DelayedInit.
+ "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
+ "-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
+ "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
+ "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
+ "-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
+ "-Xlint:nullary-unit", // Warn when nullary methods return Unit.
+ "-Xlint:option-implicit", // Option.apply used implicit view.
+ "-Xlint:package-object-classes", // Class or object defined in package object.
+ "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
+ "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
+ "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
+ "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
+ "-Xlint:unsound-match", // Pattern match may not be typesafe.
+ "-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.
+ "-Ypartial-unification", // Enable partial unification in type constructor inference
+ "-Ywarn-dead-code", // Warn when dead code is identified.
+ "-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined.
+ "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
+ "-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`.
+ "-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
+ "-Ywarn-nullary-unit", // Warn when nullary methods return Unit.
+ "-Ywarn-numeric-widen", // Warn when numerics are widened.
+ "-Ywarn-unused:implicits", // Warn if an implicit parameter is unused.
+ "-Ywarn-unused:imports", // Warn if an import selector is not referenced.
+ "-Ywarn-unused:locals", // Warn if a local definition is unused.
+ "-Ywarn-unused:params", // Warn if a value parameter is unused.
+ "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused.
+ "-Ywarn-unused:privates", // Warn if a private member is unused.
+ "-Ywarn-value-discard" // Warn when non-Unit expression results are unused.
+ ),
+ libraryDependencies += Dependencies.scalatest,
+ updateImpactOpenBrowser := false
+)
+
+lazy val core = (project in file("core"))
+ .settings(commonSettings: _*)
+ .settings(publishSettings: _*)
+ .settings(
+ name := repo,
+ description := "Simple, safe and intuitive I/O in Scala"
+ )
+
+lazy val akka = (project in file("akka"))
+ .settings(commonSettings: _*)
+ .settings(publishSettings: _*)
+ .settings(
+ name := s"$repo-akka",
+ description := "Reactive file watcher using Akka actors",
+ libraryDependencies += Dependencies.akka
+ )
+ .dependsOn(core % "test->test;compile->compile")
+
+lazy val shapelessScanner = (project in file("shapeless"))
+ .settings(commonSettings: _*)
+ .settings(noPublishSettings: _*)
+ .settings(
+ name := s"shapeless-scanner",
+ description := "Shapeless Scanner",
+ libraryDependencies += Dependencies.shapeless
+ )
+ .dependsOn(core % "test->test;compile->compile")
+
+lazy val benchmarks = (project in file("benchmarks"))
+ .settings(commonSettings: _*)
+ .settings(noPublishSettings: _*)
+ .settings(
+ name := s"$repo-benchmarks",
+ libraryDependencies ++= Seq(
+ Dependencies.commonsio,
+ Dependencies.fastjavaio
+ )
+ )
+ .dependsOn(core % "test->test;compile->compile")
+
+lazy val root = (project in file("."))
+ .settings(commonSettings: _*)
+ .settings(docSettings: _*)
+ .settings(noPublishSettings: _*)
+ .settings(releaseSettings: _*)
+ .aggregate(core, akka, shapelessScanner, benchmarks)
+
+import UnidocKeys._
+lazy val docSettings = unidocSettings ++ site.settings ++ ghpages.settings ++ Seq(
+ autoAPIMappings := true,
+ unidocProjectFilter in (ScalaUnidoc, unidoc) := inProjects(core, akka),
+ SiteKeys.siteSourceDirectory := file("site"),
+ site.addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), "latest/api"),
+ git.remoteRepo := s"git@github.com:$username/$repo.git"
+)
+
+import ReleaseTransformations._
+lazy val releaseSettings = Seq(
+ releaseProcess := Seq[ReleaseStep](
+ checkSnapshotDependencies,
+ inquireVersions,
+ //runClean,
+ runTest,
+ setReleaseVersion,
+ commitReleaseVersion,
+ tagRelease,
+ publishArtifacts,
+ setNextVersion,
+ commitNextVersion,
+ releaseStepCommand("sonatypeReleaseAll"),
+ pushChanges
+ )
+)
+
+lazy val noPublishSettings = Seq(
+ publish := (),
+ publishLocal := (),
+ publishArtifact := false
+)
+
+lazy val publishSettings = Seq(
+ homepage := Some(url(s"https://github.com/$username/$repo")),
+ licenses += "MIT" -> url(s"https://github.com/$username/$repo/blob/master/LICENSE"),
+ scmInfo := Some(ScmInfo(url(s"https://github.com/$username/$repo"), s"git@github.com:$username/$repo.git")),
+ apiURL := Some(url(s"https://$username.github.io/$repo/latest/api/")),
+ releaseCrossBuild := true,
+ releasePublishArtifactsAction := PgpKeys.publishSigned.value,
+ publishMavenStyle := true,
+ publishArtifact in Test := false,
+ publishTo := Some(if (isSnapshot.value) Opts.resolver.sonatypeSnapshots else Opts.resolver.sonatypeStaging),
+ credentials ++= (for {
+ username <- sys.env.get("SONATYPE_USERNAME")
+ password <- sys.env.get("SONATYPE_PASSWORD")
+ } yield Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", username, password)).toSeq,
+ pomExtra :=
+ <developers>
+ <developer>
+ <id>{username}</id>
+ <name>Pathikrit Bhowmick</name>
+ <url>http://github.com/{username}</url>
+ </developer>
+ </developers>
+)
diff --git a/scalaplugin/src/test/resource/better-files/circle.yml b/scalaplugin/src/test/resource/better-files/circle.yml
new file mode 100644
index 00000000..0ca8d9b9
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/circle.yml
@@ -0,0 +1,21 @@
+machine:
+ environment:
+ SBT_GHPAGES_COMMIT_MESSAGE: 'Publishing Scaladoc [ci skip]'
+ java:
+ version: oraclejdk8
+
+test:
+ override:
+ - sbt clean coverage +test
+
+deployment:
+ master:
+ branch: master
+ owner: pathikrit
+ commands:
+ - sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage
+ - bash <(curl -s https://codecov.io/bash)
+ - git config --global user.email "pathikritbhowmick@msn.com"
+ - git config --global user.name "circle-ci"
+ - git config --global push.default simple
+ - sbt ghpagesPushSite +publish
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala
new file mode 100644
index 00000000..3bacd91d
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala
@@ -0,0 +1,155 @@
+package better.files
+
+import java.nio.charset.Charset
+import java.nio.file.attribute.{PosixFileAttributes, PosixFilePermission, PosixFilePermissions}
+import java.util.zip.Deflater
+
+import scala.collection.JavaConverters._
+
+/**
+ * Do file ops using a UNIX command line DSL
+ */
+object Dsl {
+ def ~ : File =
+ File.home
+
+ def pwd: File =
+ File.currentWorkingDirectory
+
+ def cwd: File =
+ pwd
+
+ val `..`: File => File =
+ _.parent
+
+ val `.`: File => File =
+ identity
+
+ /**
+ * Adds some symbolic operations to file
+ * @param file
+ */
+ implicit class SymbolicOperations(val file: File) {
+ /**
+ * Allows navigation up e.g. file / .. / ..
+ *
+ * @param f
+ * @return
+ */
+ def /(f: File => File): File =
+ f(file)
+
+ def <<(line: String)(implicit charset: Charset = defaultCharset): file.type =
+ file.appendLines(line)(charset)
+
+ def >>:(line: String)(implicit charset: Charset = defaultCharset): file.type =
+ file.appendLines(line)(charset)
+
+ def <(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): file.type =
+ file.write(text)(openOptions, charset)
+
+ def `>:`(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): file.type =
+ file.write(text)(openOptions, charset)
+
+ def `!`(implicit charset: Charset = defaultCharset): String =
+ file.contentAsString(charset)
+
+ def `===`(that: File): Boolean =
+ file.isSameContentAs(that)
+
+ def !==(that: File): Boolean =
+ !(file === that)
+ }
+
+ def cp(from: File, to: File): File = {
+ if (to.isDirectory) {
+ from.copyToDirectory(to)
+ } else {
+ from.copyTo(to, overwrite = true)
+ }
+ }
+
+ def mv(from: File, to: File): File = {
+ if (to.isDirectory) {
+ from.moveToDirectory(to)
+ } else {
+ from.moveTo(to, overwrite = true)
+ }
+ }
+
+ def rm(file: File): File =
+ file.delete(swallowIOExceptions = true)
+
+ def del(file: File): File =
+ rm(file)
+
+ def ln(file1: File, file2: File): File =
+ file1.linkTo(file2)
+
+ def ln_s(file1: File, file2: File): File =
+ file1.symbolicLinkTo(file2)
+
+ def cat(files: File*): Seq[Iterator[Byte]] =
+ files.map(_.bytes)
+
+ def ls(file: File): Files =
+ file.list
+
+ def dir(file: File): Files =
+ ls(file)
+
+ def ls_r(file: File): Files =
+ file.listRecursively
+
+ def touch(file: File): File =
+ file.touch()
+
+ def mkdir(file: File): File =
+ file.createDirectory()
+
+ def md5(file: File): String =
+ file.md5
+
+ def sha1(file: File): String =
+ file.sha1
+
+ def sha256(file: File): String =
+ file.sha256
+
+ def sha512(file: File): String =
+ file.sha512
+
+ def mkdirs(file: File): File =
+ file.createDirectories()
+
+ def chown(owner: String, file: File): File =
+ file.setOwner(owner)
+
+ def chgrp(group: String, file: File): File =
+ file.setGroup(group)
+
+ /**
+ * Update permission of this file
+ *
+ * @param permissions Must be 9 character POSIX permission representation e.g. "rwxr-x---"
+ * @param file
+ * @return file
+ */
+ def chmod(permissions: String, file: File): File =
+ file.setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet)
+
+ def chmod_+(permission: PosixFilePermission, file: File): File =
+ file.addPermission(permission)
+
+ def chmod_-(permission: PosixFilePermission, file: File): File =
+ file.removePermission(permission)
+
+ def stat(file: File): PosixFileAttributes =
+ file.posixAttributes
+
+ def unzip(zipFile: File)(destination: File)(implicit charset: Charset = defaultCharset): destination.type =
+ zipFile.unzipTo(destination)(charset)
+
+ def zip(files: File*)(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): destination.type =
+ destination.zipIn(files.iterator, compressionLevel)(charset)
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/File.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/File.scala
new file mode 100644
index 00000000..eb11cd93
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/File.scala
@@ -0,0 +1,1257 @@
+package better.files
+
+import java.io.{File => JFile, _}
+import java.net.{URI, URL}
+import java.nio.charset.Charset
+import java.nio.channels._
+import java.nio.file._
+import java.nio.file.attribute._
+import java.security.{DigestInputStream, MessageDigest}
+import java.time.Instant
+import java.util.regex.Pattern
+import java.util.zip._
+import javax.xml.bind.DatatypeConverter
+
+import scala.collection.JavaConverters._
+import scala.concurrent.ExecutionContext
+import scala.util.Properties
+import scala.util.matching.Regex
+
+/**
+ * Scala wrapper around java.nio.files.Path
+ */
+class File private(val path: Path)(implicit val fileSystem: FileSystem = path.getFileSystem) {
+ //TODO: LinkOption?
+
+ def pathAsString: String =
+ path.toString
+
+ def toJava: JFile =
+ new JFile(path.toAbsolutePath.toString)
+
+ /**
+ * Name of file
+ * Certain files may not have a name e.g. root directory - returns empty string in that case
+ *
+ * @return
+ */
+ def name: String =
+ nameOption.getOrElse("")
+
+ /**
+ * Certain files may not have a name e.g. root directory - returns None in that case
+ *
+ * @return
+ */
+ def nameOption: Option[String] =
+ Option(path.getFileName).map(_.toString)
+
+ def root: File =
+ path.getRoot
+
+ def nameWithoutExtension: String =
+ nameWithoutExtension(includeAll = true)
+
+ /**
+ * @param includeAll
+ * For files with multiple extensions e.g. "bundle.tar.gz"
+ * nameWithoutExtension(includeAll = true) returns "bundle"
+ * nameWithoutExtension(includeAll = false) returns "bundle.tar"
+ * @return
+ */
+ def nameWithoutExtension(includeAll: Boolean): String =
+ if (hasExtension) name.substring(0, indexOfExtension(includeAll)) else name
+
+ /**
+ * @return extension (including the dot) of this file if it is a regular file and has an extension, else None
+ */
+ def extension: Option[String] =
+ extension()
+
+ /**
+ * @param includeDot whether the dot should be included in the extension or not
+ * @param includeAll whether all extension tokens should be included, or just the last one e.g. for bundle.tar.gz should it be .tar.gz or .gz
+ * @param toLowerCase to lowercase the extension or not e.g. foo.HTML should have .html or .HTML
+ * @return extension of this file if it is a regular file and has an extension, else None
+ */
+ def extension(includeDot: Boolean = true, includeAll: Boolean = false, toLowerCase: Boolean = true): Option[String] =
+ when(hasExtension) {
+ val dot = indexOfExtension(includeAll)
+ val index = if (includeDot) dot else dot + 1
+ val extension = name.substring(index)
+ if (toLowerCase) extension.toLowerCase else extension
+ }
+
+ private[this] def indexOfExtension(includeAll: Boolean) =
+ if (includeAll) name.indexOf(".") else name.lastIndexOf(".")
+
+ /**
+ * Returns the extension if file is a regular file
+ * If file is unreadable or does not exist, it is assumed to be not a regular file
+ * See: https://github.com/pathikrit/better-files/issues/89
+ *
+ * @return
+ */
+ def hasExtension: Boolean =
+ (isRegularFile || notExists) && name.contains(".")
+
+ /**
+ * Changes the file-extension by renaming this file; if file does not have an extension, it adds the extension
+ * Example usage file"foo.java".changeExtensionTo(".scala")
+ */
+ def changeExtensionTo(extension: String): File =
+ if (isRegularFile) renameTo(s"$nameWithoutExtension$extension") else this
+
+ def contentType: Option[String] =
+ Option(Files.probeContentType(path))
+
+ /**
+ * Return parent of this file
+ * NOTE: This API returns null if this file is the root;
+ * please use parentOption if you expect to handle roots
+ *
+ * @see parentOption
+ * @return
+ */
+ def parent: File =
+ parentOption.orNull
+
+ /**
+ *
+ * @return Some(parent) of this file or None if this is the root and thus has no parent
+ */
+ def parentOption: Option[File] =
+ Option(path.getParent).map(File.apply)
+
+ def /(child: String): File =
+ path.resolve(child)
+
+ def /(child: Symbol): File =
+ this / child.name
+
+ def createChild(child: String, asDirectory: Boolean = false, createParents: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): File =
+ (this / child).createIfNotExists(asDirectory, createParents)(attributes, linkOptions)
+
+ /**
+ * Create this file. If it exists, don't do anything
+ *
+ * @param asDirectory If you want this file to be created as a directory instead, set this to true (false by default)
+ * @param createParents If you also want all the parents to be created from root to this file (false by defailt)
+ * @param attributes
+ * @param linkOptions
+ * @return
+ */
+ def createIfNotExists(asDirectory: Boolean = false, createParents: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
+ if (exists(linkOptions)) {
+ this
+ } else if (asDirectory) {
+ createDirectories()(attributes)
+ } else {
+ if (createParents) parent.createDirectories()(attributes)
+ try {
+ createFile()(attributes)
+ } catch {
+ case _: FileAlreadyExistsException if isRegularFile(linkOptions) => // We don't really care if it exists already
+ }
+ this
+ }
+ }
+
+ /**
+ * Create this file
+ *
+ * @param attributes
+ * @return
+ */
+ def createFile()(implicit attributes: File.Attributes = File.Attributes.default): this.type = {
+ Files.createFile(path, attributes: _*)
+ this
+ }
+
+ def exists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ Files.exists(path, linkOptions: _*)
+
+ def notExists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ Files.notExists(path, linkOptions: _*)
+
+ def sibling(name: String): File =
+ path.resolveSibling(name)
+
+ def isSiblingOf(sibling: File): Boolean =
+ sibling.isChildOf(parent)
+
+ def siblings: Files =
+ parent.list.filterNot(_ == this)
+
+ def isChildOf(parent: File): Boolean =
+ parent.isParentOf(this)
+
+ /**
+ * Check if this directory contains this file
+ *
+ * @param file
+ * @return true if this is a directory and it contains this file
+ */
+ def contains(file: File): Boolean =
+ isDirectory && (file.path startsWith path)
+
+ def isParentOf(child: File): Boolean =
+ contains(child)
+
+ def bytes: Iterator[Byte] =
+ newInputStream.buffered.bytes //TODO: ManagedResource here?
+
+ def loadBytes: Array[Byte] =
+ Files.readAllBytes(path)
+
+ def byteArray: Array[Byte] =
+ loadBytes
+
+ /**
+ * Create this directory
+ *
+ * @param attributes
+ * @return
+ */
+ def createDirectory()(implicit attributes: File.Attributes = File.Attributes.default): this.type = {
+ Files.createDirectory(path, attributes: _*)
+ this
+ }
+
+ /**
+ * Create this directory and all its parents
+ * Unlike the JDK, this by default sanely handles the JDK-8130464 bug
+ * If you want default Java behaviour, use File.LinkOptions.noFollow
+ *
+ * @param attributes
+ * @return
+ */
+ def createDirectories()(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
+ try {
+ Files.createDirectories(path, attributes: _*)
+ } catch {
+ case _: FileAlreadyExistsException if isDirectory(linkOptions) => // work around for JDK-8130464
+ }
+ this
+ }
+
+ def chars(implicit charset: Charset = defaultCharset): Iterator[Char] =
+ newBufferedReader(charset).chars //TODO: ManagedResource here?
+
+ /**
+ * Load all lines from this file
+ * Note: Large files may cause an OutOfMemory in which case, use the streaming version @see lineIterator
+ *
+ * @param charset
+ * @return all lines in this file
+ */
+ def lines(implicit charset: Charset = defaultCharset): Traversable[String] =
+ Files.readAllLines(path, charset).asScala
+
+ /**
+ * Iterate over lines in a file (auto-close stream on complete)
+ * NOTE: If the iteration is partial, it may leave a stream open
+ * If you want partial iteration use @see lines()
+ *
+ * @param charset
+ * @return
+ */
+ def lineIterator(implicit charset: Charset = defaultCharset): Iterator[String] =
+ Files.lines(path, charset).toAutoClosedIterator
+
+ def tokens(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): Iterator[String] =
+ newBufferedReader(charset).tokens(splitter)
+
+ def contentAsString(implicit charset: Charset = defaultCharset): String =
+ new String(byteArray, charset)
+
+ def printLines(lines: Iterator[Any])(implicit openOptions: File.OpenOptions = File.OpenOptions.append): this.type = {
+ for {
+ pw <- printWriter()(openOptions)
+ line <- lines
+ } pw.println(line)
+ this
+ }
+
+ /**
+ * For large number of lines that may not fit in memory, use printLines
+ *
+ * @param lines
+ * @param charset
+ * @return
+ */
+ def appendLines(lines: String*)(implicit charset: Charset = defaultCharset): this.type = {
+ Files.write(path, lines.asJava, charset, File.OpenOptions.append: _*)
+ this
+ }
+
+ def appendLine(line: String = "")(implicit charset: Charset = defaultCharset): this.type =
+ appendLines(line)(charset)
+
+ def append(text: String)(implicit charset: Charset = defaultCharset): this.type =
+ appendByteArray(text.getBytes(charset))
+
+ def appendText(text: String)(implicit charset: Charset = defaultCharset): this.type =
+ append(text)(charset)
+
+ def appendByteArray(bytes: Array[Byte]): this.type = {
+ Files.write(path, bytes, File.OpenOptions.append: _*)
+ this
+ }
+
+ def appendBytes(bytes: Iterator[Byte]): this.type =
+ writeBytes(bytes)(openOptions = File.OpenOptions.append)
+
+ /**
+ * Write byte array to file. For large contents consider using the writeBytes
+ *
+ * @param bytes
+ * @return this
+ */
+ def writeByteArray(bytes: Array[Byte])(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = {
+ Files.write(path, bytes, openOptions: _*)
+ this
+ }
+
+ def writeBytes(bytes: Iterator[Byte])(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = {
+ outputStream(openOptions).foreach(_.buffered write bytes)
+ this
+ }
+
+ def write(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type =
+ writeByteArray(text.getBytes(charset))(openOptions)
+
+ def writeText(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type =
+ write(text)(openOptions, charset)
+
+ def overwrite(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type =
+ write(text)(openOptions, charset)
+
+ def newRandomAccess(mode: File.RandomAccessMode = File.RandomAccessMode.read): RandomAccessFile =
+ new RandomAccessFile(toJava, mode.value)
+
+ def randomAccess(mode: File.RandomAccessMode = File.RandomAccessMode.read): ManagedResource[RandomAccessFile] =
+ newRandomAccess(mode).autoClosed //TODO: Mode enum?
+
+ def newBufferedReader(implicit charset: Charset = defaultCharset): BufferedReader =
+ Files.newBufferedReader(path, charset)
+
+ def bufferedReader(implicit charset: Charset = defaultCharset): ManagedResource[BufferedReader] =
+ newBufferedReader(charset).autoClosed
+
+ def newBufferedWriter(implicit charset: Charset = defaultCharset, openOptions: File.OpenOptions = File.OpenOptions.default): BufferedWriter =
+ Files.newBufferedWriter(path, charset, openOptions: _*)
+
+ def bufferedWriter(implicit charset: Charset = defaultCharset, openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[BufferedWriter] =
+ newBufferedWriter(charset, openOptions).autoClosed
+
+ def newFileReader: FileReader =
+ new FileReader(toJava)
+
+ def fileReader: ManagedResource[FileReader] =
+ newFileReader.autoClosed
+
+ def newFileWriter(append: Boolean = false): FileWriter =
+ new FileWriter(toJava, append)
+
+ def fileWriter(append: Boolean = false): ManagedResource[FileWriter] =
+ newFileWriter(append).autoClosed
+
+ def newPrintWriter(autoFlush: Boolean = false)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): PrintWriter =
+ new PrintWriter(newOutputStream(openOptions), autoFlush)
+
+ def printWriter(autoFlush: Boolean = false)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[PrintWriter] =
+ newPrintWriter(autoFlush)(openOptions).autoClosed
+
+ def newInputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): InputStream =
+ Files.newInputStream(path, openOptions: _*)
+
+ def inputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[InputStream] =
+ newInputStream(openOptions).autoClosed
+
+ //TODO: Move this to inputstream implicit
+ def newDigestInputStream(digest: MessageDigest)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): DigestInputStream =
+ new DigestInputStream(newInputStream(openOptions), digest)
+
+ def digestInputStream(digest: MessageDigest)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[DigestInputStream] =
+ newDigestInputStream(digest)(openOptions).autoClosed
+
+ def newScanner(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): Scanner =
+ Scanner(newBufferedReader(charset), splitter)
+
+ def scanner(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): ManagedResource[Scanner] =
+ newScanner(splitter)(charset).autoClosed
+
+ def newOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): OutputStream =
+ Files.newOutputStream(path, openOptions: _*)
+
+ def outputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[OutputStream] =
+ newOutputStream(openOptions).autoClosed
+
+ def newZipOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): ZipOutputStream =
+ new ZipOutputStream(newOutputStream(openOptions), charset)
+
+ def zipInputStream(implicit charset: Charset = defaultCharset): ManagedResource[ZipInputStream] =
+ newZipInputStream(charset).autoClosed
+
+ def newZipInputStream(implicit charset: Charset = defaultCharset): ZipInputStream =
+ new ZipInputStream(new FileInputStream(toJava).buffered, charset)
+
+ def zipOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): ManagedResource[ZipOutputStream] =
+ newZipOutputStream(openOptions, charset).autoClosed
+
+ def newFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default, attributes: File.Attributes = File.Attributes.default): FileChannel =
+ FileChannel.open(path, openOptions.toSet.asJava, attributes: _*)
+
+ def fileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default, attributes: File.Attributes = File.Attributes.default): ManagedResource[FileChannel] =
+ newFileChannel(openOptions, attributes).autoClosed
+
+ def newAsynchronousFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default): AsynchronousFileChannel =
+ AsynchronousFileChannel.open(path, openOptions: _*)
+
+ def asynchronousFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[AsynchronousFileChannel] =
+ newAsynchronousFileChannel(openOptions).autoClosed
+
+ def newWatchService: WatchService =
+ fileSystem.newWatchService()
+
+ def watchService: ManagedResource[WatchService] =
+ newWatchService.autoClosed
+
+ /**
+ * Serialize a object using Java's serializer into this file
+ *
+ * @param obj
+ * @return
+ */
+ def writeSerialized(obj: Serializable)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = {
+ createIfNotExists().outputStream(openOptions).foreach(_.asObjectOutputStream().serialize(obj).flush())
+ this
+ }
+
+ /**
+ * Deserialize a object using Java's default serialization from this file
+ *
+ * @return
+ */
+ def readDeserialized[A](implicit openOptions: File.OpenOptions = File.OpenOptions.default): A =
+ inputStream(openOptions).map(_.asObjectInputStream().deserialize[A])
+
+ def register(service: WatchService, events: File.Events = File.Events.all): this.type = {
+ path.register(service, events.toArray)
+ this
+ }
+
+ def digest(algorithm: MessageDigest): Array[Byte] = {
+ listRelativePaths.toSeq.sorted foreach { relativePath =>
+ val file: File = path.resolve(relativePath)
+ if(file.isDirectory) {
+ algorithm.update(relativePath.toString.getBytes)
+ } else {
+ file.digestInputStream(algorithm).foreach(_.pipeTo(NullOutputStream))
+ }
+ }
+ algorithm.digest()
+ }
+
+ /**
+ * Set a file attribute e.g. file("dos:system") = true
+ *
+ * @param attribute
+ * @param value
+ * @param linkOptions
+ * @return
+ */
+ def update(attribute: String, value: Any)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
+ Files.setAttribute(path, attribute, value, linkOptions : _*)
+ this
+ }
+
+ /**
+ * @return checksum of this file (or directory) in hex format
+ */
+ def checksum(algorithm: MessageDigest): String =
+ DatatypeConverter.printHexBinary(digest(algorithm))
+
+ def md5: String =
+ checksum("MD5")
+
+ def sha1: String =
+ checksum("SHA-1")
+
+ def sha256: String =
+ checksum("SHA-256")
+
+ def sha512: String =
+ checksum("SHA-512")
+
+ /**
+ * @return Some(target) if this is a symbolic link (to target) else None
+ */
+ def symbolicLink: Option[File] =
+ when(isSymbolicLink)(new File(Files.readSymbolicLink(path)))
+
+ /**
+ * @return true if this file (or the file found by following symlink) is a directory
+ */
+ def isDirectory(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ Files.isDirectory(path, linkOptions: _*)
+
+ /**
+ * @return true if this file (or the file found by following symlink) is a regular file
+ */
+ def isRegularFile(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ Files.isRegularFile(path, linkOptions: _*)
+
+ def isSymbolicLink: Boolean =
+ Files.isSymbolicLink(path)
+
+ def isHidden: Boolean =
+ Files.isHidden(path)
+
+ /**
+ * Check if a file is locked.
+ *
+ * @param mode The random access mode.
+ * @param position The position at which the locked region is to start; must be non-negative.
+ * @param size The size of the locked region; must be non-negative, and the sum position + size must be non-negative.
+ * @param isShared true to request a shared lock, false to request an exclusive lock.
+ * @return True if the file is locked, false otherwise.
+ */
+ def isLocked(mode: File.RandomAccessMode, position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ try {
+ usingLock(mode) {channel =>
+ channel.tryLock(position, size, isShared).release()
+ false
+ }
+ } catch {
+ case _: OverlappingFileLockException | _: NonWritableChannelException | _: NonReadableChannelException => true
+
+ // Windows throws a `FileNotFoundException` if the file is locked (see: https://github.com/pathikrit/better-files/pull/194)
+ case _: FileNotFoundException if verifiedExists(linkOptions).getOrElse(true) => true
+ }
+
+ /**
+ * @see https://docs.oracle.com/javase/tutorial/essential/io/check.html
+ * @see https://stackoverflow.com/questions/30520179/why-does-file-exists-return-true-even-though-files-exists-in-the-nio-files
+ *
+ * @return
+ * Some(true) if file is guaranteed to exist
+ * Some(false) if file is guaranteed to not exist
+ * None if the status is unknown e.g. if file is unreadable
+ */
+ def verifiedExists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Option[Boolean] = {
+ if (exists(linkOptions)) {
+ Some(true)
+ } else if(notExists(linkOptions)) {
+ Some(false)
+ } else {
+ None
+ }
+ }
+
+ def usingLock[U](mode: File.RandomAccessMode)(f: FileChannel => U): U =
+ newRandomAccess(mode).getChannel.autoClosed.map(f)
+
+ def isReadLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false) =
+ isLocked(File.RandomAccessMode.read, position, size, isShared)
+
+ def isWriteLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false) =
+ isLocked(File.RandomAccessMode.readWrite, position, size, isShared)
+
+ def list: Files =
+ Files.list(path)
+
+ def children: Files = list
+
+ def entries: Files = list
+
+ def listRecursively(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
+ walk()(visitOptions).filterNot(isSamePathAs)
+
+ /**
+ * Walk the directory tree recursively upto maxDepth
+ *
+ * @param maxDepth
+ * @return List of children in BFS maxDepth level deep (includes self since self is at depth = 0)
+ */
+ def walk(maxDepth: Int = Int.MaxValue)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
+ Files.walk(path, maxDepth, visitOptions: _*) //TODO: that ignores I/O errors?
+
+ def pathMatcher(syntax: File.PathMatcherSyntax, includePath: Boolean)(pattern: String): PathMatcher =
+ syntax(this, pattern, includePath)
+
+ /**
+ * Util to glob from this file's path
+ *
+ *
+ * @param includePath If true, we don't need to set path glob patterns
+ * e.g. instead of **//*.txt we just use *.txt
+ * @return Set of files that matched
+ */
+ //TODO: Consider removing `syntax` as implicit. You often want to control this on a per method call basis
+ def glob(pattern: String, includePath: Boolean = true)(implicit syntax: File.PathMatcherSyntax = File.PathMatcherSyntax.default, visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
+ pathMatcher(syntax, includePath)(pattern).matches(this)(visitOptions)
+
+ /**
+ * Util to match from this file's path using Regex
+ *
+ * @param includePath If true, we don't need to set path glob patterns
+ * e.g. instead of **//*.txt we just use *.txt
+ * @see glob
+ * @return Set of files that matched
+ */
+ def globRegex(pattern: Regex, includePath: Boolean = true)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
+ glob(pattern.regex, includePath)(syntax = File.PathMatcherSyntax.regex, visitOptions = visitOptions)
+
+ /**
+ * More Scala friendly way of doing Files.walk
+ * Note: This is lazy (returns an Iterator) and won't evaluate till we reify the iterator (e.g. using .toList)
+ *
+ * @param matchFilter
+ * @return
+ */
+ def collectChildren(matchFilter: File => Boolean)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files =
+ walk()(visitOptions).filter(matchFilter)
+
+ def uri: URI =
+ path.toUri
+
+ def url: URL =
+ uri.toURL
+
+ /**
+ * @return file size (for directories, return size of the directory) in bytes
+ */
+ def size(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Long =
+ walk()(visitOptions).map(f => Files.size(f.path)).sum
+
+ def permissions(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Set[PosixFilePermission] =
+ Files.getPosixFilePermissions(path, linkOptions: _*).asScala.toSet
+
+ def permissionsAsString(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String =
+ PosixFilePermissions.toString(permissions(linkOptions).asJava)
+
+ def setPermissions(permissions: Set[PosixFilePermission]): this.type = {
+ Files.setPosixFilePermissions(path, permissions.asJava)
+ this
+ }
+
+ def addPermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type =
+ setPermissions(permissions(linkOptions) + permission)
+
+ def removePermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type =
+ setPermissions(permissions(linkOptions) - permission)
+
+ /**
+ * test if file has this permission
+ */
+ def testPermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ permissions(linkOptions)(permission)
+
+ def isOwnerReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.OWNER_READ)(linkOptions)
+
+ def isOwnerWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.OWNER_WRITE)(linkOptions)
+
+ def isOwnerExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.OWNER_EXECUTE)(linkOptions)
+
+ def isGroupReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.GROUP_READ)(linkOptions)
+
+ def isGroupWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.GROUP_WRITE)(linkOptions)
+
+ def isGroupExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.GROUP_EXECUTE)(linkOptions)
+
+ def isOthersReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.OTHERS_READ)(linkOptions)
+
+ def isOthersWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.OTHERS_WRITE)(linkOptions)
+
+ def isOthersExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ testPermission(PosixFilePermission.OTHERS_EXECUTE)(linkOptions)
+
+ /**
+ * This differs from the above as this checks if the JVM can read this file even though the OS cannot in certain platforms
+ *
+ * @see isOwnerReadable
+ * @return
+ */
+ def isReadable: Boolean =
+ toJava.canRead
+
+ def isWriteable: Boolean =
+ toJava.canWrite
+
+ def isExecutable: Boolean =
+ toJava.canExecute
+
+ def attributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): BasicFileAttributes =
+ Files.readAttributes(path, classOf[BasicFileAttributes], linkOptions: _*)
+
+ def posixAttributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): PosixFileAttributes =
+ Files.readAttributes(path, classOf[PosixFileAttributes], linkOptions: _*)
+
+ def dosAttributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): DosFileAttributes =
+ Files.readAttributes(path, classOf[DosFileAttributes], linkOptions: _*)
+
+ def owner(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): UserPrincipal =
+ Files.getOwner(path, linkOptions: _*)
+
+ def ownerName(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String =
+ owner(linkOptions).getName
+
+ def group(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): GroupPrincipal =
+ posixAttributes(linkOptions).group()
+
+ def groupName(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String =
+ group(linkOptions).getName
+
+ def setOwner(owner: String): this.type = {
+ Files.setOwner(path, fileSystem.getUserPrincipalLookupService.lookupPrincipalByName(owner))
+ this
+ }
+
+ def setGroup(group: String): this.type = {
+ Files.setOwner(path, fileSystem.getUserPrincipalLookupService.lookupPrincipalByGroupName(group))
+ this
+ }
+
+ /**
+ * Similar to the UNIX command touch - create this file if it does not exist and set its last modification time
+ */
+ def touch(time: Instant = Instant.now())(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
+ Files.setLastModifiedTime(createIfNotExists()(attributes, linkOptions).path, FileTime.from(time))
+ this
+ }
+
+ def lastModifiedTime(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Instant =
+ Files.getLastModifiedTime(path, linkOptions: _*).toInstant
+
+ /**
+ * Deletes this file or directory
+ *
+ * @param swallowIOExceptions If this is set to true, any exception thrown is swallowed
+ */
+ def delete(swallowIOExceptions: Boolean = false): this.type = {
+ try {
+ if (isDirectory) list.foreach(_.delete(swallowIOExceptions))
+ Files.delete(path)
+ } catch {
+ case _: IOException if swallowIOExceptions => //e.printStackTrace() //swallow
+ }
+ this
+ }
+
+ def renameTo(newName: String): File =
+ moveTo(path.resolveSibling(newName))
+
+ /**
+ *
+ * @param destination
+ * @param overwrite
+ * @return destination
+ */
+ def moveTo(destination: File, overwrite: Boolean = false): destination.type = {
+ Files.move(path, destination.path, File.CopyOptions(overwrite): _*)
+ destination
+ }
+
+ /**
+ * Moves this file into the given directory
+ * @param directory
+ *
+ * @return the File referencing the new file created under destination
+ */
+ def moveToDirectory(directory: File)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): File = {
+ require(directory.isDirectory(linkOptions), s"$directory must be a directory")
+ moveTo(directory / this.name)
+ }
+
+ /**
+ *
+ * @param destination
+ * @param overwrite
+ * @return destination
+ */
+ def copyTo(destination: File, overwrite: Boolean = false)(implicit copyOptions: File.CopyOptions = File.CopyOptions(overwrite)): destination.type = {
+ if (isDirectory) {//TODO: maxDepth?
+ Files.walkFileTree(path, new SimpleFileVisitor[Path] {
+ def newPath(subPath: Path): Path = destination.path.resolve(path.relativize(subPath))
+
+ override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes) = {
+ Files.createDirectories(newPath(dir))
+ super.preVisitDirectory(dir, attrs)
+ }
+
+ override def visitFile(file: Path, attrs: BasicFileAttributes) = {
+ Files.copy(file, newPath(file), copyOptions: _*)
+ super.visitFile(file, attrs)
+ }
+ })
+ } else {
+ Files.copy(path, destination.path, copyOptions: _*)
+ }
+ destination
+ }
+
+ /**
+ * Copies this file into the given directory
+ * @param directory
+ *
+ * @return the File referencing the new file created under destination
+ */
+ def copyToDirectory(directory: File)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default, copyOptions: File.CopyOptions = File.CopyOptions.default): File = {
+ require(directory.isDirectory(linkOptions), s"$directory must be a directory")
+ copyTo(directory / this.name)(copyOptions)
+ }
+
+ def symbolicLinkTo(destination: File)(implicit attributes: File.Attributes = File.Attributes.default): destination.type = {
+ Files.createSymbolicLink(path, destination.path, attributes: _*)
+ destination
+ }
+
+ def linkTo(destination: File, symbolic: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default): destination.type = {
+ if (symbolic) {
+ symbolicLinkTo(destination)(attributes)
+ } else {
+ Files.createLink(destination.path, path)
+ destination
+ }
+ }
+
+ def listRelativePaths(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Iterator[Path] =
+ walk()(visitOptions).map(relativize)
+
+ def relativize(destination: File): Path =
+ path.relativize(destination.path)
+
+ def isSamePathAs(that: File): Boolean =
+ this.path == that.path
+
+ def isSameFileAs(that: File): Boolean =
+ Files.isSameFile(this.path, that.path)
+
+ /**
+ * @return true if this file is exactly same as that file
+ * For directories, it checks for equivalent directory structure
+ */
+ def isSameContentAs(that: File): Boolean =
+ isSimilarContentAs(that)
+
+ /**
+ * Almost same as isSameContentAs but uses faster md5 hashing to compare (and thus small chance of false positive)
+ * Also works for directories
+ *
+ * @param that
+ * @return
+ */
+ def isSimilarContentAs(that: File): Boolean =
+ this.md5 == that.md5
+
+ override def equals(obj: Any) = {
+ obj match {
+ case file: File => isSamePathAs(file)
+ case _ => false
+ }
+ }
+
+ /**
+ * @param linkOptions
+ * @return true if file is not present or empty directory or 0-bytes file
+ */
+ def isEmpty(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = {
+ if (isDirectory(linkOptions)) {
+ children.isEmpty
+ } else if (isRegularFile(linkOptions)) {
+ toJava.length() == 0
+ } else {
+ notExists(linkOptions)
+ }
+ }
+
+ /**
+ *
+ * @param linkOptions
+ * @return for directories, true if it has no children, false otherwise
+ * for files, true if it is a 0-byte file, false otherwise
+ * else true if it exists, false otherwise
+ */
+ def nonEmpty(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean =
+ !isEmpty(linkOptions)
+
+ /**
+ * If this is a directory, remove all its children
+ * If its a file, empty the contents
+ *
+ * @return this
+ */
+ def clear()(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = {
+ if (isDirectory(linkOptions)) {
+ children.foreach(_.delete())
+ } else {
+ writeByteArray(Array.emptyByteArray)(File.OpenOptions.default)
+ }
+ this
+ }
+
+ def deleteOnExit(): this.type = {
+ toJava.deleteOnExit()
+ this
+ }
+
+ override def hashCode =
+ path.hashCode()
+
+ override def toString =
+ pathAsString
+
+ /**
+ * Zips this file (or directory)
+ *
+ * @param destination The destination file; Creates this if it does not exists
+ * @return The destination zip file
+ */
+ def zipTo(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): destination.type = {
+ val files = if (isDirectory) children else Iterator(this)
+ destination.zipIn(files, compressionLevel)(charset)
+ }
+
+ /**
+ * zip to a temp directory
+ *
+ * @return the target directory
+ */
+ def zip(compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): File =
+ zipTo(destination = File.newTemporaryFile(name, ".zip"), compressionLevel)(charset)
+
+ /**
+ * Unzips this zip file
+ *
+ * @param destination destination folder; Creates this if it does not exist
+ * @param zipFilter An optional param to reject or accept unzipping a file
+ * @return The destination where contents are unzipped
+ */
+ def unzipTo(destination: File, zipFilter: ZipEntry => Boolean = _ => true)(implicit charset: Charset = defaultCharset): destination.type = {
+ for {
+ zipFile <- new ZipFile(toJava, charset).autoClosed
+ entry <- zipFile.entries().asScala if zipFilter(entry)
+ } entry.extractTo(destination, zipFile.getInputStream(entry))
+ destination
+ }
+
+ /**
+ * Streamed unzipping is slightly slower but supports larger files and more encodings
+ * @see https://github.com/pathikrit/better-files/issues/152
+ *
+ * @param destinationDirectory destination folder; Creates this if it does not exist
+ * @return The destination where contents are unzipped
+ */
+ def streamedUnzip(destinationDirectory: File = File.newTemporaryDirectory(name))(implicit charset: Charset = defaultCharset): destinationDirectory.type = {
+ for {
+ zipIn <- zipInputStream(charset)
+ } zipIn.mapEntries(_.extractTo(destinationDirectory, zipIn)).size
+ destinationDirectory
+ }
+
+ def unGzipTo(destinationDirectory: File = File.newTemporaryDirectory())(implicit openOptions: File.OpenOptions = File.OpenOptions.default): destinationDirectory.type = {
+ for {
+ in <- inputStream(openOptions)
+ out <- destinationDirectory.outputStream(openOptions)
+ } in.buffered.pipeTo(out.buffered)
+ destinationDirectory
+ }
+
+ /**
+ * Adds these files into this zip file
+ * Example usage: File("test.zip").zipIn(Seq(file"hello.txt", file"hello2.txt"))
+ *
+ * @param files
+ * @param compressionLevel
+ * @param charset
+ * @return this
+ */
+ def zipIn(files: Files, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(charset: Charset = defaultCharset): this.type = {
+ for {
+ output <- newZipOutputStream(File.OpenOptions.default, charset).withCompressionLevel(compressionLevel).autoClosed
+ input <- files
+ file <- input.walk()
+ name = input.parent relativize file
+ } output.add(file, name.toString)
+ this
+ }
+
+ /**
+ * unzip to a temporary zip file
+ *
+ * @return the zip file
+ */
+ def unzip(zipFilter: ZipEntry => Boolean = _ => true)(implicit charset: Charset = defaultCharset): File =
+ unzipTo(destination = File.newTemporaryDirectory(name), zipFilter)(charset)
+
+ /**
+ * Java's temporary files/directories are not cleaned up by default.
+ * If we explicitly call `.deleteOnExit()`, it gets added to shutdown handler which is not ideal
+ * for long running systems with millions of temporary files as:
+ * a) it would slowdown shutdown and
+ * b) occupy unnecessary disk-space during app lifetime
+ *
+ * This util auto-deletes the resource when done using the ManagedResource facility
+ *
+ * Example usage:
+ * File.temporaryDirectory().foreach(tempDir => doSomething(tempDir)
+ *
+ * @return
+ */
+ def toTemporary: ManagedResource[File] =
+ new ManagedResource(this)(Disposable.fileDisposer)
+
+ //TODO: add features from https://github.com/sbt/io
+}
+
+object File {
+ /**
+ * Get a file from a resource
+ * Note: Use resourceToFile instead as this may not actually always load the file
+ * See: http://stackoverflow.com/questions/676250/different-ways-of-loading-a-file-as-an-inputstream
+ *
+ * @param name
+ * @return
+ */
+ def resource(name: String): File =
+ File(currentClassLoader().getResource(name))
+
+ /**
+ * Copies a resource into a file
+ *
+ * @param name
+ * @param destination File where resource is copied into, if not specified a temp file is created
+ * @return
+ */
+ def copyResource(name: String)(destination: File = File.newTemporaryFile(prefix = name)): destination.type = {
+ for {
+ in <- resourceAsStream(name).autoClosed
+ out <- destination.outputStream
+ } in.pipeTo(out)
+ destination
+ }
+
+ def newTemporaryDirectory(prefix: String = "", parent: Option[File] = None)(implicit attributes: Attributes = Attributes.default): File = {
+ parent match {
+ case Some(dir) => Files.createTempDirectory(dir.path, prefix, attributes: _*)
+ case _ => Files.createTempDirectory(prefix, attributes: _*)
+ }
+ }
+
+ def temporaryDirectory(prefix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default): ManagedResource[File] =
+ newTemporaryDirectory(prefix, parent)(attributes).toTemporary
+
+ def usingTemporaryDirectory[U](prefix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default)(f: File => U): Unit =
+ temporaryDirectory(prefix, parent, attributes).foreach(f)
+
+ def newTemporaryFile(prefix: String = "", suffix: String = "", parent: Option[File] = None)(implicit attributes: Attributes = Attributes.default): File = {
+ parent match {
+ case Some(dir) => Files.createTempFile(dir.path, prefix, suffix, attributes: _*)
+ case _ => Files.createTempFile(prefix, suffix, attributes: _*)
+ }
+ }
+
+ def temporaryFile[U](prefix: String = "", suffix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default): ManagedResource[File] =
+ newTemporaryFile(prefix, suffix, parent)(attributes).toTemporary
+
+ def usingTemporaryFile[U](prefix: String = "", suffix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default)(f: File => U): Unit =
+ temporaryFile(prefix, suffix, parent, attributes).foreach(f)
+
+ implicit def apply(path: Path): File =
+ new File(path.toAbsolutePath.normalize())
+
+ def apply(path: String, fragments: String*): File =
+ Paths.get(path, fragments: _*)
+
+ /**
+ * Get File to path with help of reference anchor.
+ *
+ * Anchor is used as a reference in case that path is not absolute.
+ * Anchor could be path to directory or path to file.
+ * If anchor is file, then file's parent dir is used as an anchor.
+ *
+ * If anchor itself is relative, then anchor is used together with current working directory.
+ *
+ * NOTE: If anchor is non-existing path on filesystem, then it's always treated as file,
+ * e.g. it's last component is removed when it is used as an anchor.
+ *
+ * @param anchor path to be used as anchor
+ * @param path as string
+ * @param fragments optional path fragments
+ * @return absolute, normalize path
+ */
+ def apply(anchor: File, path: String, fragments: String*): File = {
+ val p = Paths.get(path, fragments: _*)
+ if (p.isAbsolute) {
+ p
+ } else if (anchor.isDirectory) {
+ anchor / p.toString
+ } else {
+ anchor.parent / p.toString
+ }
+ }
+
+ def apply(url: URL): File =
+ apply(url.toURI)
+
+ def apply(uri: URI): File =
+ Paths.get(uri)
+
+ def roots: Iterable[File] =
+ FileSystems.getDefault.getRootDirectories.asScala.map(File.apply)
+
+ def root: File =
+ roots.head
+
+ def home: File =
+ Properties.userHome.toFile
+
+ def temp: File =
+ Properties.tmpDir.toFile
+
+ def currentWorkingDirectory: File =
+ File("")
+
+ type Attributes = Seq[FileAttribute[_]]
+ object Attributes {
+ val default : Attributes = Seq.empty
+ }
+
+ type CopyOptions = Seq[CopyOption]
+ object CopyOptions {
+ def apply(overwrite: Boolean) : CopyOptions = (if (overwrite) Seq(StandardCopyOption.REPLACE_EXISTING) else default) ++ LinkOptions.default
+ val default : CopyOptions = Seq.empty //Seq(StandardCopyOption.COPY_ATTRIBUTES)
+ }
+
+ type Events = Seq[WatchEvent.Kind[_]]
+ object Events {
+ val all : Events = Seq(StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.ENTRY_DELETE)
+ val default : Events = all
+ }
+
+ type OpenOptions = Seq[OpenOption]
+ object OpenOptions {
+ val append : OpenOptions = Seq(StandardOpenOption.APPEND, StandardOpenOption.CREATE)
+ val default : OpenOptions = Seq.empty
+ }
+
+ type LinkOptions = Seq[LinkOption]
+ object LinkOptions {
+ val follow : LinkOptions = Seq.empty
+ val noFollow : LinkOptions = Seq(LinkOption.NOFOLLOW_LINKS)
+ val default : LinkOptions = follow
+ }
+
+ type VisitOptions = Seq[FileVisitOption]
+ object VisitOptions {
+ val follow : VisitOptions = Seq(FileVisitOption.FOLLOW_LINKS)
+ val default : VisitOptions = Seq.empty
+ }
+
+ type Order = Ordering[File]
+ object Order {
+ val bySize : Order = Ordering.by(_.size)
+ val byName : Order = Ordering.by(_.name)
+ val byDepth : Order = Ordering.by(_.path.getNameCount)
+ val byModificationTime : Order = Ordering.by(_.lastModifiedTime)
+ val byDirectoriesLast : Order = Ordering.by(_.isDirectory)
+ val byDirectoriesFirst : Order = byDirectoriesLast.reverse
+ val default : Order = byDirectoriesFirst.andThenBy(byName)
+ }
+
+ abstract class PathMatcherSyntax(name: String) {
+
+ /**
+ * Return PathMatcher from this file
+ *
+ * @param file
+ * @param pattern
+ * @param includePath If this is true, no need to include path matchers
+ * e.g. instead of "**//*.txt" we can simply use *.txt
+ * @return
+ */
+ def apply(file: File, pattern: String, includePath: Boolean): PathMatcher = {
+ val escapedPath = if (includePath) escapePath(file.path.toString + file.fileSystem.getSeparator) else ""
+ file.fileSystem.getPathMatcher(s"$name:$escapedPath$pattern")
+ }
+
+ def escapePath(path: String): String
+ }
+ object PathMatcherSyntax {
+ val glob: PathMatcherSyntax = new PathMatcherSyntax("glob") {
+ override def escapePath(path: String) = path
+ .replaceAllLiterally("\\", "\\\\")
+ .replaceAllLiterally("*", "\\*")
+ .replaceAllLiterally("?", "\\?")
+ .replaceAllLiterally("{", "\\{")
+ .replaceAllLiterally("}", "\\}")
+ .replaceAllLiterally("[", "\\[")
+ .replaceAllLiterally("]", "\\]")
+ }
+
+ val regex: PathMatcherSyntax = new PathMatcherSyntax("regex") {
+ override def escapePath(path: String) = Pattern.quote(path)
+ }
+
+ val default: PathMatcherSyntax = glob
+ }
+
+ class RandomAccessMode private(val value: String)
+ object RandomAccessMode {
+ val read = new RandomAccessMode("r")
+ val readWrite = new RandomAccessMode("rw")
+ val readWriteMetadataSynchronous = new RandomAccessMode("rws")
+ val readWriteContentSynchronous = new RandomAccessMode("rwd")
+ }
+
+ def numberOfOpenFileDescriptors(): Long = {
+ java.lang.management.ManagementFactory
+ .getPlatformMBeanServer
+ .getAttribute(new javax.management.ObjectName("java.lang:type=OperatingSystem"), "OpenFileDescriptorCount")
+ .asInstanceOf[Long]
+ }
+
+ /**
+ * Implement this interface to monitor the root file
+ */
+ trait Monitor extends AutoCloseable {
+ val root: File
+
+ /**
+ * Dispatch a StandardWatchEventKind to an appropriate callback
+ * Override this if you don't want to manually handle onDelete/onCreate/onModify separately
+ *
+ * @param eventType
+ * @param file
+ */
+ def onEvent(eventType: WatchEvent.Kind[Path], file: File, count: Int): Unit = eventType match {
+ case StandardWatchEventKinds.ENTRY_CREATE => onCreate(file, count)
+ case StandardWatchEventKinds.ENTRY_MODIFY => onModify(file, count)
+ case StandardWatchEventKinds.ENTRY_DELETE => onDelete(file, count)
+ }
+
+ def start()(implicit executionContext: ExecutionContext): Unit
+
+ def onCreate(file: File, count: Int): Unit
+
+ def onModify(file: File, count: Int): Unit
+
+ def onDelete(file: File, count: Int): Unit
+
+ def onUnknownEvent(event: WatchEvent[_], count: Int): Unit
+
+ def onException(exception: Throwable): Unit
+
+ def stop(): Unit = close()
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala
new file mode 100644
index 00000000..f6f139f2
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala
@@ -0,0 +1,72 @@
+package better.files
+
+import java.nio.file._
+
+import scala.concurrent.ExecutionContext
+import scala.util.Try
+import scala.util.control.NonFatal
+
+/**
+ * Implementation of File.Monitor
+ *
+ * @param root
+ * @param maxDepth
+ */
+abstract class FileMonitor(val root: File, maxDepth: Int) extends File.Monitor {
+ protected[this] val service = root.newWatchService
+
+ def this(root: File, recursive: Boolean = true) = this(root, if (recursive) Int.MaxValue else 0)
+
+ /**
+ * If watching non-directory, don't react to siblings
+ * @param target
+ * @return
+ */
+ protected[this] def reactTo(target: File) = root.isDirectory || root.isSamePathAs(target)
+
+ protected[this] def process(key: WatchKey) = {
+ val path = key.watchable().asInstanceOf[Path]
+
+ import scala.collection.JavaConverters._
+ key.pollEvents().asScala foreach {
+ case event: WatchEvent[Path] @unchecked =>
+ val target: File = path.resolve(event.context())
+ if (reactTo(target)) {
+ if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE) {
+ val depth = root.relativize(target).getNameCount
+ watch(target, (maxDepth - depth) max 0) // auto-watch new files in a directory
+ }
+ onEvent(event.kind(), target, event.count())
+ }
+ case event => if (reactTo(path)) onUnknownEvent(event, event.count())
+ }
+ key.reset()
+ }
+
+ protected[this] def watch(file: File, depth: Int): Unit = {
+ def toWatch: Files = if (file.isDirectory) {
+ file.walk(depth).filter(f => f.isDirectory && f.exists)
+ } else {
+ when(file.exists)(file.parent).iterator // There is no way to watch a regular file; so watch its parent instead
+ }
+ try {
+ toWatch.foreach(f => Try[Unit](f.register(service)).recover(PartialFunction(onException)).get)
+ } catch {
+ case NonFatal(e) => onException(e)
+ }
+ }
+
+ override def start()(implicit executionContext: ExecutionContext) = {
+ watch(root, maxDepth)
+ executionContext.execute(() => Iterator.continually(service.take()).foreach(process))
+ }
+
+ override def close() = service.close()
+
+ // Although this class is abstract, we give provide implementations so user can choose to implement a subset of these
+ override def onCreate(file: File, count: Int) = {}
+ override def onModify(file: File, count: Int) = {}
+ override def onDelete(file: File, count: Int) = {}
+ override def onUnknownEvent(event: WatchEvent[_], count: Int) = {}
+ override def onException(exception: Throwable) = {}
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala
new file mode 100644
index 00000000..322b5f40
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala
@@ -0,0 +1,324 @@
+package better.files
+
+import java.io.{File => JFile, _}
+import java.nio.MappedByteBuffer
+import java.nio.channels.FileChannel
+import java.nio.charset.Charset
+import java.nio.file.{Path, PathMatcher}
+import java.security.MessageDigest
+import java.util.StringTokenizer
+import java.util.stream.{Stream => JStream}
+import java.util.zip._
+
+import scala.annotation.tailrec
+import scala.collection.JavaConverters._
+import scala.util.Try
+
+/**
+ * Container for various implicits
+ */
+trait Implicits {
+
+ //TODO: Rename all Ops to Extensions
+
+ implicit class StringInterpolations(sc: StringContext) {
+ def file(args: Any*): File =
+ value(args).toFile
+
+ private[this] def value(args: Seq[Any]) =
+ sc.s(args: _*)
+ }
+
+ implicit class StringOps(str: String) {
+ def toFile: File =
+ File(str)
+
+ def /(child: String): File =
+ toFile / child
+ }
+
+ implicit class FileOps(file: JFile) {
+ def toScala: File =
+ File(file.getPath)
+ }
+
+ implicit class SymbolExtensions(symbol: Symbol) {
+ def /(child: Symbol): File =
+ File(symbol.name) / child
+ }
+
+ implicit class IteratorExtensions[A](it: Iterator[A]) {
+ def withHasNext(f: => Boolean): Iterator[A] = new Iterator[A] {
+ override def hasNext = f && it.hasNext
+ override def next() = it.next()
+ }
+ }
+
+ implicit class InputStreamOps(in: InputStream) {
+ def pipeTo(out: OutputStream, bufferSize: Int = defaultBufferSize): out.type =
+ pipeTo(out, Array.ofDim[Byte](bufferSize))
+
+ /**
+ * Pipe an input stream to an output stream using a byte buffer
+ */
+ @tailrec final def pipeTo(out: OutputStream, buffer: Array[Byte]): out.type = {
+ val n = in.read(buffer)
+ if (n > 0) {
+ out.write(buffer, 0, n)
+ pipeTo(out, buffer)
+ } else {
+ out
+ }
+ }
+
+ def asString(closeStream: Boolean = true, bufferSize: Int = defaultBufferSize)(implicit charset: Charset = defaultCharset): String = {
+ try {
+ new ByteArrayOutputStream(bufferSize).autoClosed
+ .map(pipeTo(_, bufferSize = bufferSize).toString(charset.displayName()))
+ } finally {
+ if (closeStream) in.close()
+ }
+ }
+
+ def buffered: BufferedInputStream =
+ new BufferedInputStream(in)
+
+ def buffered(bufferSize: Int): BufferedInputStream =
+ new BufferedInputStream(in, bufferSize)
+
+ def gzipped: GZIPInputStream =
+ new GZIPInputStream(in)
+
+ /**
+ * If bufferSize is set to less than or equal to 0, we don't buffer
+ * @param bufferSize
+ * @return
+ */
+ def asObjectInputStream(bufferSize: Int = defaultBufferSize): ObjectInputStream =
+ new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize))
+
+ /**
+ * @param bufferSize If bufferSize is set to less than or equal to 0, we don't buffer
+ * Code adapted from:
+ * https://github.com/apache/commons-io/blob/master/src/main/java/org/apache/commons/io/input/ClassLoaderObjectInputStream.java
+ *
+ * @return A special ObjectInputStream that loads a class based on a specified ClassLoader rather than the default
+ * This is useful in dynamic container environments.
+ */
+ def asObjectInputStreamUsingClassLoader(classLoader: ClassLoader = getClass.getClassLoader, bufferSize: Int = defaultBufferSize): ObjectInputStream =
+ new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize)) {
+ override protected def resolveClass(objectStreamClass: ObjectStreamClass): Class[_] =
+ try {
+ Class.forName(objectStreamClass.getName, false, classLoader)
+ } catch {
+ case _: ClassNotFoundException ⇒ super.resolveClass(objectStreamClass)
+ }
+
+ override protected def resolveProxyClass(interfaces: Array[String]): Class[_] = {
+ try {
+ java.lang.reflect.Proxy.getProxyClass(
+ classLoader,
+ interfaces.map(interface => Class.forName(interface, false, classLoader)) : _*
+ )
+ } catch {
+ case _: ClassNotFoundException | _: IllegalArgumentException => super.resolveProxyClass(interfaces)
+ }
+ }
+ }
+
+ def reader(implicit charset: Charset = defaultCharset): InputStreamReader =
+ new InputStreamReader(in, charset)
+
+ def lines(implicit charset: Charset = defaultCharset): Iterator[String] =
+ reader(charset).buffered.lines().toAutoClosedIterator
+
+ def bytes: Iterator[Byte] =
+ in.autoClosed.flatMap(res => eofReader(res.read()).map(_.toByte))
+ }
+
+ implicit class OutputStreamOps(val out: OutputStream) {
+ def buffered: BufferedOutputStream =
+ new BufferedOutputStream(out)
+
+ def buffered(bufferSize: Int): BufferedOutputStream =
+ new BufferedOutputStream(out, bufferSize)
+
+ def gzipped: GZIPOutputStream =
+ new GZIPOutputStream(out)
+
+ def writer(implicit charset: Charset = defaultCharset): OutputStreamWriter =
+ new OutputStreamWriter(out, charset)
+
+ def printWriter(autoFlush: Boolean = false): PrintWriter =
+ new PrintWriter(out, autoFlush)
+
+ def write(bytes: Iterator[Byte], bufferSize: Int = defaultBufferSize): out.type = {
+ bytes.grouped(bufferSize).foreach(buffer => out.write(buffer.toArray))
+ out.flush()
+ out
+ }
+
+ def tee(out2: OutputStream): OutputStream =
+ new TeeOutputStream(out, out2)
+
+ /**
+ * If bufferSize is set to less than or equal to 0, we don't buffer
+ * @param bufferSize
+ * @return
+ */
+ def asObjectOutputStream(bufferSize: Int = defaultBufferSize): ObjectOutputStream =
+ new ObjectOutputStream(if (bufferSize <= 0) out else buffered(bufferSize))
+ }
+
+ implicit class ReaderOps(reader: Reader) {
+ def buffered: BufferedReader =
+ new BufferedReader(reader)
+
+ def toInputStream(implicit charset: Charset = defaultCharset): InputStream =
+ new ReaderInputStream(reader)(charset)
+ }
+
+ implicit class BufferedReaderOps(reader: BufferedReader) {
+ def chars: Iterator[Char] =
+ reader.autoClosed.flatMap(res => eofReader(res.read()).map(_.toChar))
+
+ def tokens(splitter: StringSplitter = StringSplitter.default): Iterator[String] =
+ reader.lines().toAutoClosedIterator.flatMap(splitter.split)
+ }
+
+ implicit class WriterOps(writer: Writer) {
+ def buffered: BufferedWriter =
+ new BufferedWriter(writer)
+
+ def outputstream(implicit charset: Charset = defaultCharset): OutputStream =
+ new WriterOutputStream(writer)(charset)
+ }
+
+ implicit class FileChannelOps(fc: FileChannel) {
+ def toMappedByteBuffer: MappedByteBuffer =
+ fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size())
+ }
+
+ implicit class PathMatcherOps(matcher: PathMatcher) {
+ def matches(file: File)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default) =
+ file.collectChildren(child => matcher.matches(child.path))(visitOptions)
+ }
+
+ implicit class ObjectInputStreamOps(ois: ObjectInputStream) {
+ def deserialize[A]: A =
+ ois.readObject().asInstanceOf[A]
+ }
+
+ implicit class ObjectOutputStreamOps(val oos: ObjectOutputStream) {
+ def serialize(obj: Serializable): oos.type = {
+ oos.writeObject(obj)
+ oos
+ }
+ }
+
+ implicit class ZipOutputStreamOps(val out: ZipOutputStream) {
+
+ /**
+ * Correctly set the compression level
+ * See: http://stackoverflow.com/questions/1206970/creating-zip-using-zip-utility
+ *
+ * @param level
+ * @return
+ */
+ def withCompressionLevel(level: Int): out.type = {
+ out.setLevel(level)
+ if (level == Deflater.NO_COMPRESSION) out.setMethod(ZipOutputStream.DEFLATED)
+ out
+ }
+
+ def add(file: File, name: String): out.type = {
+ val relativeName = name.stripSuffix(file.fileSystem.getSeparator)
+ val entryName = if (file.isDirectory) s"$relativeName/" else relativeName // make sure to end directories in ZipEntry with "/"
+ out.putNextEntry(new ZipEntry(entryName))
+ if (file.isRegularFile) file.inputStream.foreach(_.pipeTo(out))
+ out.closeEntry()
+ out
+ }
+
+ def +=(file: File): out.type =
+ add(file, file.name)
+ }
+
+ implicit class ZipInputStreamOps(val in: ZipInputStream) {
+ def mapEntries[A](f: ZipEntry => A): Iterator[A] = new Iterator[A] {
+ private[this] var entry = in.getNextEntry
+
+ override def hasNext = entry != null
+
+ override def next() = {
+ val result = Try(f(entry))
+ Try(in.closeEntry())
+ entry = in.getNextEntry
+ result.get
+ }
+ }
+ }
+
+ implicit class ZipEntryOps(val entry: ZipEntry) {
+ /**
+ * Extract this ZipEntry under this rootDir
+ *
+ * @param rootDir directory under which this entry is extracted
+ * @param inputStream use this inputStream when this entry is a file
+ * @return the extracted file
+ */
+ def extractTo(rootDir: File, inputStream: => InputStream): File = {
+ val child = rootDir.createChild(entry.getName, asDirectory = entry.isDirectory, createParents = true)
+ if (!entry.isDirectory) child.outputStream.foreach(inputStream.pipeTo(_))
+ child
+ }
+ }
+
+ implicit class CloseableOps[A <: AutoCloseable](resource: A) {
+ /**
+ * Lightweight automatic resource management
+ * Closes the resource when done e.g.
+ * <pre>
+ * for {
+ * in <- file.newInputStream.autoClosed
+ * } in.write(bytes)
+ * // in is closed now
+ * </pre>
+ *
+ * @return
+ */
+ def autoClosed: ManagedResource[A] =
+ new ManagedResource(resource)(Disposable.closableDisposer)
+ }
+
+ implicit class JStreamOps[A](stream: JStream[A]) {
+ /**
+ * Closes this stream when iteration is complete
+ * It will NOT close the stream if it is not depleted!
+ *
+ * @return
+ */
+ def toAutoClosedIterator: Iterator[A] =
+ stream.autoClosed.flatMap(_.iterator().asScala)
+ }
+
+ private[files] implicit class OrderingOps[A](order: Ordering[A]) {
+ def andThenBy(order2: Ordering[A]): Ordering[A] =
+ Ordering.comparatorToOrdering(order.thenComparing(order2))
+ }
+
+ implicit def stringToMessageDigest(algorithmName: String): MessageDigest =
+ MessageDigest.getInstance(algorithmName)
+
+ implicit def stringToCharset(charsetName: String): Charset =
+ Charset.forName(charsetName)
+
+ implicit def tokenizerToIterator(s: StringTokenizer): Iterator[String] =
+ Iterator.continually(s.nextToken()).withHasNext(s.hasMoreTokens)
+
+ //implicit def posixPermissionToFileAttribute(perm: PosixFilePermission) =
+ // PosixFilePermissions.asFileAttribute(Set(perm))
+
+ private[files] implicit def pathStreamToFiles(files: JStream[Path]): Files =
+ files.toAutoClosedIterator.map(File.apply)
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala
new file mode 100644
index 00000000..dad5ecb8
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala
@@ -0,0 +1,91 @@
+package better.files
+
+import java.util.concurrent.atomic.AtomicBoolean
+
+import scala.util.Try
+import scala.util.control.NonFatal
+
+/**
+ * A typeclass to denote a disposable resource
+ * @tparam A
+ */
+trait Disposable[-A] {
+ def dispose(resource: A): Unit
+
+ def disposeSilently(resource: A): Unit = {
+ val _ = Try(dispose(resource))
+ }
+}
+
+object Disposable {
+ def apply[A](disposeMethod: A => Any): Disposable[A] = new Disposable[A] {
+ override def dispose(resource: A) = {
+ val _ = disposeMethod(resource)
+ }
+ }
+
+ implicit val closableDisposer: Disposable[AutoCloseable] =
+ Disposable(_.close())
+
+ val fileDisposer: Disposable[File] =
+ Disposable(_.delete(swallowIOExceptions = true))
+}
+
+class ManagedResource[A](resource: A)(implicit disposer: Disposable[A]) {
+ private[this] val isDisposed = new AtomicBoolean(false)
+ private[this] def disposeOnce() = if (!isDisposed.getAndSet(true)) disposer.dispose(resource)
+
+ // This is the Scala equivalent of how javac compiles try-with-resources,
+ // Except that fatal exceptions while disposing take precedence over exceptions thrown previously
+ private[this] def disposeOnceAndThrow(e1: Throwable) = {
+ try {
+ disposeOnce()
+ } catch {
+ case NonFatal(e2) => e1.addSuppressed(e2)
+ case e2: Throwable =>
+ e2.addSuppressed(e1)
+ throw e2
+ }
+ throw e1
+ }
+
+ def foreach[U](f: A => U): Unit = {
+ val _ = map(f)
+ }
+
+ def map[B](f: A => B): B = {
+ try {
+ f(resource)
+ } catch {
+ case e1: Throwable => disposeOnceAndThrow(e1)
+ } finally {
+ disposeOnce()
+ }
+ }
+
+ def withFilter(f: A => Boolean): this.type = {
+ if (!f(resource)) disposeOnce()
+ this
+ }
+
+ /**
+ * This handles lazy operations (e.g. Iterators)
+ * for which resource needs to be disposed only after iteration is done
+ *
+ * @param f
+ * @tparam B
+ * @return
+ */
+ def flatMap[B](f: A => Iterator[B]): Iterator[B] = {
+ val it = f(resource)
+ it withHasNext {
+ try {
+ val result = it.hasNext
+ if (!result) disposeOnce()
+ result
+ } catch {
+ case e1: Throwable => disposeOnceAndThrow(e1)
+ }
+ }
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala
new file mode 100644
index 00000000..f9b792cc
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala
@@ -0,0 +1,83 @@
+package better.files
+
+import java.io.{InputStream, Reader}
+import java.nio.{ByteBuffer, CharBuffer}
+import java.nio.charset.{Charset, CharsetEncoder, CoderResult, CodingErrorAction}
+
+import scala.annotation.tailrec
+
+/**
+ * Code ported from Java to Scala:
+ * https://github.com/apache/commons-io/blob/c0eb48f7e83987c5ed112b82f0d651aff5149ae4/src/main/java/org/apache/commons/io/input/ReaderInputStream.java
+ */
+class ReaderInputStream(reader: Reader, encoder: CharsetEncoder, bufferSize: Int) extends InputStream {
+
+ def this(reader: Reader, bufferSize: Int = defaultBufferSize)(implicit charset: Charset = defaultCharset) =
+ this(reader = reader, encoder = charset.newEncoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE), bufferSize = bufferSize)
+
+ /**
+ * CharBuffer used as input for the decoder. It should be reasonably
+ * large as we read data from the underlying Reader into this buffer.
+ */
+ private[this] val encoderIn = CharBuffer.allocate(bufferSize).flip().asInstanceOf[CharBuffer]
+
+ /**
+ * ByteBuffer used as output for the decoder. This buffer can be small
+ * as it is only used to transfer data from the decoder to the buffer provided by the caller.
+ */
+ private[this] val encoderOut = ByteBuffer.allocate(bufferSize>>4).flip().asInstanceOf[ByteBuffer]
+
+ private[this] var lastCoderResult = CoderResult.UNDERFLOW
+ private[this] var endOfInput = false
+
+ private[this] def fillBuffer() = {
+ assert(!endOfInput)
+ if (lastCoderResult.isUnderflow) {
+ val position = encoderIn.compact().position
+ // We don't use Reader#read(CharBuffer) here because it is more efficient to write directly to the underlying char array
+ // since the default implementation copies data to a temporary char array anyway
+ reader.read(encoderIn.array, position, encoderIn.remaining) match {
+ case EOF => endOfInput = true
+ case c => encoderIn.position(position + c)
+ }
+ encoderIn.flip()
+ }
+ lastCoderResult = encoder.encode(encoderIn, encoderOut.compact(), endOfInput)
+ encoderOut.flip()
+ }
+
+ override def read(b: Array[Byte], off: Int, len: Int) = {
+ if (len < 0 || off < 0 || (off + len) > b.length) throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len)
+ if (len == 0) {
+ 0 // Always return 0 if len == 0
+ } else {
+ var read = 0
+ @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) {
+ if (encoderOut.hasRemaining) {
+ val c = encoderOut.remaining min len
+ encoderOut.get(b, off, c)
+ read += c
+ loop(off + c, len - c)
+ } else if (!endOfInput) {
+ fillBuffer()
+ loop(off, len)
+ }
+ }
+ loop(off, len)
+ if (read == 0 && endOfInput) EOF else read
+ }
+ }
+
+ @tailrec final override def read() = {
+ if (encoderOut.hasRemaining) {
+ encoderOut.get & 0xFF
+ } else if (endOfInput) {
+ EOF
+ } else {
+ fillBuffer()
+ read()
+ }
+ }
+
+ override def close() = reader.close()
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala
new file mode 100644
index 00000000..be6ebb3f
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala
@@ -0,0 +1,183 @@
+package better.files
+
+import java.io.{InputStream, LineNumberReader, Reader, StringReader}
+import java.nio.charset.Charset
+import java.time.format.DateTimeFormatter
+import java.util.StringTokenizer
+
+trait Scanner extends Iterator[String] with AutoCloseable {
+ def lineNumber(): Int
+
+ def next[A](implicit scan: Scannable[A]): A = scan(this)
+
+ def nextLine(): String
+
+ def lines: Iterator[String] = Iterator.continually(nextLine()).withHasNext(hasNext)
+}
+
+/**
+ * Faster, safer and more idiomatic Scala replacement for java.util.Scanner
+ * See: http://codeforces.com/blog/entry/7018
+ */
+object Scanner {
+
+ def apply(str: String): Scanner =
+ Scanner(str, StringSplitter.default)
+
+ def apply(str: String, splitter: StringSplitter): Scanner =
+ Scanner(new StringReader(str), splitter)
+
+ def apply(reader: Reader): Scanner =
+ Scanner(reader, StringSplitter.default)
+
+ def apply(reader: Reader, splitter: StringSplitter): Scanner =
+ Scanner(new LineNumberReader(reader.buffered), splitter)
+
+ def apply(inputStream: InputStream)(implicit charset: Charset = defaultCharset): Scanner =
+ Scanner(inputStream, StringSplitter.default)(charset)
+
+ def apply(inputStream: InputStream, splitter: StringSplitter)(implicit charset: Charset): Scanner =
+ Scanner(inputStream.reader(charset), splitter)
+
+ def apply(reader: LineNumberReader, splitter: StringSplitter): Scanner = new Scanner {
+ private[this] val tokens = reader.tokens(splitter)
+ override def lineNumber() = reader.getLineNumber
+ override def nextLine() = reader.readLine()
+ override def next() = tokens.next()
+ override def hasNext = tokens.hasNext
+ override def close() = reader.close()
+ }
+
+ val stdin: Scanner = Scanner(System.in)
+
+ trait Read[A] { // TODO: Move to own subproject when this is fixed https://github.com/typelevel/cats/issues/932
+ def apply(s: String): A
+ }
+
+ object Read {
+ def apply[A](f: String => A): Read[A] = new Read[A] {
+ override def apply(s: String) = f(s)
+ }
+ implicit val string : Read[String] = Read(identity)
+ implicit val boolean : Read[Boolean] = Read(_.toBoolean)
+ implicit val byte : Read[Byte] = Read(_.toByte) //TODO: https://issues.scala-lang.org/browse/SI-9706
+ implicit val short : Read[Short] = Read(_.toShort)
+ implicit val int : Read[Int] = Read(_.toInt)
+ implicit val long : Read[Long] = Read(_.toLong)
+ implicit val bigInt : Read[BigInt] = Read(BigInt(_))
+ implicit val float : Read[Float] = Read(_.toFloat)
+ implicit val double : Read[Double] = Read(_.toDouble)
+ implicit val bigDecimal : Read[BigDecimal] = Read(BigDecimal(_))
+ implicit def option[A: Read] : Read[Option[A]] = Read(s => when(s.nonEmpty)(implicitly[Read[A]].apply(s)))
+
+ // Java's time readers
+ import java.time._
+ import java.sql.{Date => SqlDate, Time => SqlTime, Timestamp => SqlTimestamp}
+
+ implicit val duration : Read[Duration] = Read(Duration.parse(_))
+ implicit val instant : Read[Instant] = Read(Instant.parse(_))
+ implicit val localDateTime : Read[LocalDateTime] = Read(LocalDateTime.parse(_))
+ implicit val localDate : Read[LocalDate] = Read(LocalDate.parse(_))
+ implicit val monthDay : Read[MonthDay] = Read(MonthDay.parse(_))
+ implicit val offsetDateTime : Read[OffsetDateTime] = Read(OffsetDateTime.parse(_))
+ implicit val offsetTime : Read[OffsetTime] = Read(OffsetTime.parse(_))
+ implicit val period : Read[Period] = Read(Period.parse(_))
+ implicit val year : Read[Year] = Read(Year.parse(_))
+ implicit val yearMonth : Read[YearMonth] = Read(YearMonth.parse(_))
+ implicit val zonedDateTime : Read[ZonedDateTime] = Read(ZonedDateTime.parse(_))
+ implicit val sqlDate : Read[SqlDate] = Read(SqlDate.valueOf)
+ implicit val sqlTime : Read[SqlTime] = Read(SqlTime.valueOf)
+ implicit val sqlTimestamp : Read[SqlTimestamp] = Read(SqlTimestamp.valueOf)
+
+ /**
+ * Use this to create custom readers e.g. to read a LocalDate using some custom format
+ * val readLocalDate: Read[LocalDate] = Read.temporalQuery(format = myFormat, query = LocalDate.from)
+ * @param format
+ * @param query
+ * @tparam A
+ * @return
+ */
+ def temporalQuery[A](format: DateTimeFormatter, query: temporal.TemporalQuery[A]): Read[A] =
+ Read(format.parse(_, query))
+ }
+}
+
+/**
+ * Implement this trait to make thing parsable
+ * In most cases, use Scanner.Read typeclass when you simply need access to one String token
+ * Use Scannable typeclass if you need access to the full scanner e.g. to detect encodings etc.
+ */
+trait Scannable[A] {
+ def apply(scanner: Scanner): A
+}
+
+object Scannable {
+ def apply[A](f: Scanner => A): Scannable[A] = new Scannable[A] {
+ override def apply(scanner: Scanner) = f(scanner)
+ }
+
+ implicit def fromRead[A](implicit read: Scanner.Read[A]): Scannable[A] =
+ Scannable(s => read(s.next()))
+
+ implicit def tuple2[T1, T2](implicit t1: Scannable[T1], t2: Scannable[T2]): Scannable[(T1, T2)] =
+ Scannable(s => t1(s) -> t2(s))
+
+ implicit def iterator[A](implicit scanner: Scannable[A]): Scannable[Iterator[A]] =
+ Scannable(s => Iterator.continually(scanner(s)).withHasNext(s.hasNext))
+}
+
+trait StringSplitter {
+ def split(s: String): TraversableOnce[String]
+}
+object StringSplitter {
+ val default = StringSplitter.anyOf(" \t\t\n\r")
+
+ /**
+ * Split string on this character
+ * This will return exactly 1 + n number of items where n is the number of occurence of delimiter in String s
+ *
+ * @param delimiter
+ * @return
+ */
+ def on(delimiter: Char): StringSplitter = new StringSplitter {
+ override def split(s: String) = new Iterator[String] {
+ private[this] var i = 0
+ private[this] var j = -1
+ private[this] val c = delimiter.toInt
+ _next()
+
+ private[this] def _next() = {
+ i = j + 1
+ val k = s.indexOf(c, i)
+ j = if (k < 0) s.length else k
+ }
+
+ override def hasNext = i <= s.length
+
+ override def next() = {
+ val res = s.substring(i, j)
+ _next()
+ res
+ }
+ }
+ }
+
+ /**
+ * Split this string using ANY of the characters from delimiters
+ *
+ * @param delimiters
+ * @param includeDelimiters
+ * @return
+ */
+ def anyOf(delimiters: String, includeDelimiters: Boolean = false): StringSplitter =
+ s => new StringTokenizer(s, delimiters, includeDelimiters)
+
+ /**
+ * Split string using a regex pattern
+ *
+ * @param pattern
+ * @return
+ */
+ def regex(pattern: String): StringSplitter =
+ s => s.split(pattern, -1)
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala
new file mode 100644
index 00000000..1da25b09
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala
@@ -0,0 +1,23 @@
+package better.files
+
+import java.io.OutputStream
+
+/**
+ * Write to multiple outputstreams at once
+ * If error happens on any one while doing an operation, only the last error is reported
+ * @param outs
+ */
+class TeeOutputStream(outs: OutputStream*) extends OutputStream {
+ override def write(b: Int) = tryAll(outs)(_.write(b))
+ override def flush() = tryAll(outs)(_.flush())
+ override def write(b: Array[Byte]) = tryAll(outs)(_.write(b))
+ override def write(b: Array[Byte], off: Int, len: Int) = tryAll(outs)(_.write(b, off, len))
+ override def close() = tryAll(outs)(_.close())
+}
+
+/**
+ * A sink outputstream similar to /dev/null - just consumes everything
+ */
+object NullOutputStream extends OutputStream {
+ override def write(b: Int) = {}
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala
new file mode 100644
index 00000000..be81f628
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala
@@ -0,0 +1,100 @@
+package better.files
+
+import java.nio.charset._
+import java.nio.{BufferOverflowException, ByteBuffer, CharBuffer}
+
+import scala.collection.JavaConverters._
+
+/**
+ * A Unicode charset that handles byte-order markers
+ *
+ * @param underlyingCharset Use this charset if no known byte-order marker is detected; use this for encoding too
+ * @param writeByteOrderMarkers If set, write BOMs while encoding
+ */
+class UnicodeCharset(underlyingCharset: Charset, writeByteOrderMarkers: Boolean)
+ extends Charset(underlyingCharset.name(), underlyingCharset.aliases().asScala.toArray) {
+ override def newDecoder() = new UnicodeDecoder(underlyingCharset)
+ override def newEncoder() = if (writeByteOrderMarkers) new BomEncoder(underlyingCharset) else underlyingCharset.newEncoder()
+ override def contains(cs: Charset) = underlyingCharset.contains(cs)
+}
+
+/**
+ * A Unicode decoder that uses the Unicode byte-order marker (BOM) to auto-detect the encoding
+ * (if none detected, falls back on the defaultCharset). This also gets around a bug in the JDK
+ * (http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4508058) where BOM is not consumed for UTF-8.
+ * See: https://github.com/pathikrit/better-files/issues/107
+ *
+ * @param defaultCharset Use this charset if no known byte-order marker is detected
+ */
+class UnicodeDecoder(defaultCharset: Charset) extends CharsetDecoder(defaultCharset, 1, 1) {
+ import UnicodeCharset.bomTable
+
+ private[this] var inferredCharset: Option[Charset] = None
+
+ @annotation.tailrec
+ private[this] def decode(in: ByteBuffer, out: CharBuffer, candidates: Set[Charset] = Set.empty): CoderResult = {
+ if (isCharsetDetected) {
+ detectedCharset().newDecoder().decode(in, out, true)
+ } else if (candidates.isEmpty || !in.hasRemaining) {
+ inferredCharset = Some(defaultCharset)
+ in.rewind()
+ decode(in, out)
+ } else if (candidates.forall(c => bomTable(c).length == in.position())) {
+ inferredCharset = candidates.headOption.ensuring(candidates.size == 1, "Ambiguous BOMs found")
+ decode(in, out)
+ } else {
+ val idx = in.position()
+ val byte = in.get()
+ def isPossible(charset: Charset) = bomTable(charset).lift(idx).contains(byte)
+ decode(in, out, candidates.filter(isPossible))
+ }
+ }
+
+ override def decodeLoop(in: ByteBuffer, out: CharBuffer) = decode(in = in, out = out, candidates = bomTable.keySet)
+
+ override def isCharsetDetected = inferredCharset.isDefined
+
+ override def isAutoDetecting = true
+
+ override def implReset() = inferredCharset = None
+
+ override def detectedCharset() = inferredCharset.getOrElse(throw new IllegalStateException("Insufficient bytes read to determine charset"))
+}
+
+/**
+ * Encoder that writes the BOM for this charset
+ * @param charset
+ */
+class BomEncoder(charset: Charset) extends CharsetEncoder(charset, 1, 1) {
+ private[this] val bom = UnicodeCharset.bomTable.getOrElse(charset, throw new IllegalArgumentException(s"$charset does not support BOMs")).toArray
+ private[this] var isBomWritten = false
+
+ override def encodeLoop(in: CharBuffer, out: ByteBuffer): CoderResult = {
+ if (!isBomWritten) {
+ try {
+ out.put(bom)
+ } catch {
+ case _: BufferOverflowException => return CoderResult.OVERFLOW
+ } finally {
+ isBomWritten = true
+ }
+ }
+ charset.newEncoder().encode(in, out, true)
+ }
+
+ override def implReset() = isBomWritten = false
+}
+
+object UnicodeCharset {
+ private[files] val bomTable: Map[Charset, IndexedSeq[Byte]] = Map(
+ "UTF-8" -> IndexedSeq(0xEF, 0xBB, 0xBF),
+ "UTF-16BE" -> IndexedSeq(0xFE, 0xFF),
+ "UTF-16LE" -> IndexedSeq(0xFF, 0xFE),
+ "UTF-32BE" -> IndexedSeq(0x00, 0x00, 0xFE, 0xFF),
+ "UTF-32LE" -> IndexedSeq(0xFF, 0xFE, 0x00, 0x00)
+ ).collect{case (charset, bytes) if Charset.isSupported(charset) => Charset.forName(charset) -> bytes.map(_.toByte)}
+ .ensuring(_.nonEmpty, "No unicode charset detected")
+
+ def apply(charset: Charset, writeByteOrderMarkers: Boolean = false): Charset =
+ if (bomTable.contains(charset)) new UnicodeCharset(charset, writeByteOrderMarkers) else charset
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala
new file mode 100644
index 00000000..80cd5fc8
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala
@@ -0,0 +1,74 @@
+package better.files
+
+import java.io.{OutputStream, Writer}
+import java.nio.charset.{Charset, CharsetDecoder, CodingErrorAction}
+import java.nio.{ByteBuffer, CharBuffer}
+
+import scala.annotation.tailrec
+
+/**
+ * Code ported from Java to Scala:
+ * https://github.com/apache/commons-io/blob/d357d9d563c4a34fa2ab3cdc68221c851a9de4f5/src/main/java/org/apache/commons/io/output/WriterOutputStream.java
+ */
+class WriterOutputStream(writer: Writer, decoder: CharsetDecoder, bufferSize: Int, flushImmediately: Boolean) extends OutputStream {
+
+ /**
+ * CharBuffer used as output for the decoder
+ */
+ private[this] val decoderOut = CharBuffer.allocate(bufferSize)
+
+ /**
+ * ByteBuffer used as output for the decoder. This buffer can be small
+ * as it is only used to transfer data from the decoder to the buffer provided by the caller.
+ */
+ private[this] val decoderIn = ByteBuffer.allocate(bufferSize>>4)
+
+ def this(writer: Writer, bufferSize: Int = defaultBufferSize, flushImmediately: Boolean = false)(implicit charset: Charset = defaultCharset) =
+ this(writer = writer, decoder = charset.newDecoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE).replaceWith("?"), bufferSize = bufferSize, flushImmediately = flushImmediately)
+
+ override def write(b: Array[Byte], off: Int, len: Int) = {
+ @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) {
+ val c = decoderIn.remaining min len
+ decoderIn.put(b, off, c)
+ processInput(endOfInput = false)
+ loop(off + c, len - c)
+ }
+ loop(off, len)
+ if (flushImmediately) flushOutput()
+ }
+
+ override def write(b: Int) = write(Array(b.toByte))
+
+ override def flush() = {
+ flushOutput()
+ writer.flush()
+ }
+
+ override def close() = {
+ processInput(endOfInput = true)
+ flushOutput()
+ writer.close()
+ }
+
+ private[this] def processInput(endOfInput: Boolean) = {
+ decoderIn.flip()
+ @tailrec def loop(): Unit = {
+ val coderResult = decoder.decode(decoderIn, decoderOut, endOfInput)
+ if (coderResult.isOverflow) {
+ flushOutput()
+ loop()
+ } else {
+ assert(coderResult.isUnderflow, "decoder is configured to replace malformed input and unmappable characters")
+ }
+ }
+ loop()
+ decoderIn.compact()
+ }
+
+ private[this] def flushOutput(): Unit = {
+ if (decoderOut.position > 0) {
+ writer.write(decoderOut.array, 0, decoderOut.position)
+ val _ = decoderOut.rewind()
+ }
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/package.scala b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/package.scala
new file mode 100644
index 00000000..bef8c1ed
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/main/scala/better/files/package.scala
@@ -0,0 +1,66 @@
+package better
+
+import java.io.{InputStream, StreamTokenizer}
+import java.nio.charset.Charset
+
+import scala.collection.mutable
+import scala.util.{Failure, Success, Try}
+
+package object files extends Implicits {
+
+ /**
+ * Default array buffer size
+ * Seems like a good value used by JDK: (see: java.io.BufferedInputStream.DEFAULT_BUFFER_SIZE)
+ */
+ val defaultBufferSize = 8192
+
+ /**
+ * The default charset used by better-files
+ * Note: It uses java.net.charset.Charset.defaultCharset() in general but if the default supports byte-order markers,
+ * it uses a more compliant version than the JDK one (see: https://github.com/pathikrit/better-files/issues/107)
+ */
+ val defaultCharset: Charset =
+ UnicodeCharset(Charset.defaultCharset())
+
+ val EOF = StreamTokenizer.TT_EOF
+
+ type Files = Iterator[File]
+
+ /**
+ * If bufferSize is set to less than or equal to 0, we don't buffer
+ * @param bufferSize
+ * @return
+ */
+ def resourceAsStream(name: String, bufferSize: Int = defaultBufferSize): InputStream =
+ currentClassLoader().getResourceAsStream(name).buffered(bufferSize)
+
+ // Some utils:
+ private[files] def newMultiMap[A, B]: mutable.MultiMap[A, B] = new mutable.HashMap[A, mutable.Set[B]] with mutable.MultiMap[A, B]
+
+ @inline private[files] def when[A](condition: Boolean)(f: => A): Option[A] = if (condition) Some(f) else None
+
+ @inline private[files] def repeat[U](n: Int)(f: => U): Unit = (1 to n).foreach(_ => f)
+
+ private[files] def currentClassLoader() = Thread.currentThread().getContextClassLoader
+
+ private[files] def eofReader(read: => Int): Iterator[Int] = Iterator.continually(read).takeWhile(_ != EOF)
+
+ /**
+ * Utility to apply f on all xs skipping over errors
+ * Throws the last error that happened
+ * *
+ * @param xs
+ * @param f
+ * @tparam A
+ */
+ private[files] def tryAll[A](xs: Seq[A])(f: A => Unit): Unit = {
+ val res = xs.foldLeft(Option.empty[Throwable]) {
+ case (currError, a) =>
+ Try(f(a)) match {
+ case Success(_) => currError
+ case Failure(e) => Some(e)
+ }
+ }
+ res.foreach(throwable => throw throwable)
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala
new file mode 100644
index 00000000..769cfbf9
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala
@@ -0,0 +1,15 @@
+package better.files
+
+import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
+
+import scala.concurrent.duration._
+import scala.language.postfixOps
+import scala.util.Properties.{isLinux, isMac}
+
+trait CommonSpec extends FlatSpec with BeforeAndAfterEach with Matchers {
+ val isCI = sys.env.get("CI").exists(_.toBoolean)
+
+ val isUnixOS = isLinux || isMac
+
+ def sleep(t: FiniteDuration = 2 second) = Thread.sleep(t.toMillis)
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala
new file mode 100644
index 00000000..36379eec
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala
@@ -0,0 +1,61 @@
+package better.files
+
+import scala.concurrent.duration._
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.language.postfixOps
+
+class FileMonitorSpec extends CommonSpec {
+ "file watcher" should "watch single files" in {
+ assume(isCI)
+ val file = File.newTemporaryFile(suffix = ".txt").writeText("Hello world")
+
+ var log = List.empty[String]
+ def output(msg: String) = synchronized {
+ println(msg)
+ log = msg :: log
+ }
+ /***************************************************************************/
+ val watcher = new FileMonitor(file) {
+ override def onCreate(file: File, count: Int) = output(s"$file got created $count time(s)")
+ override def onModify(file: File, count: Int) = output(s"$file got modified $count time(s)")
+ override def onDelete(file: File, count: Int) = output(s"$file got deleted $count time(s)")
+ }
+ watcher.start()
+ /***************************************************************************/
+ sleep(5 seconds)
+ file.writeText("hello world"); sleep()
+ file.clear(); sleep()
+ file.writeText("howdy"); sleep()
+ file.delete(); sleep()
+ sleep(5 seconds)
+ val sibling = (file.parent / "t1.txt").createIfNotExists(); sleep()
+ sibling.writeText("hello world"); sleep()
+ sleep(20 seconds)
+
+ log.size should be >= 2
+ log.exists(_ contains sibling.name) shouldBe false
+ log.forall(_ contains file.name) shouldBe true
+ }
+
+ ignore should "watch directories to configurable depth" in {
+ assume(isCI)
+ val dir = File.newTemporaryDirectory()
+ (dir/"a"/"b"/"c"/"d"/"e").createDirectories()
+ var log = List.empty[String]
+ def output(msg: String) = synchronized(log = msg :: log)
+
+ val watcher = new FileMonitor(dir, maxDepth = 2) {
+ override def onCreate(file: File, count: Int) = output(s"Create happened on ${file.name} $count times")
+ }
+ watcher.start()
+
+ sleep(5 seconds)
+ (dir/"a"/"b"/"t1").touch().writeText("hello world"); sleep()
+ (dir/"a"/"b"/"c"/"d"/"t1").touch().writeText("hello world"); sleep()
+ sleep(10 seconds)
+
+ withClue(log) {
+ log.size shouldEqual 1
+ }
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala
new file mode 100644
index 00000000..f197575a
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala
@@ -0,0 +1,549 @@
+package better.files
+
+import java.nio.file.{FileAlreadyExistsException, FileSystems, Files => JFiles}
+
+import better.files.Dsl._
+import better.files.File.{home, root}
+
+import scala.language.postfixOps
+import scala.util.Try
+
+class FileSpec extends CommonSpec {
+
+ /** try to cope with windows, which will return e.g. c:\ as root */
+ val rootStr = FileSystems.getDefault.getRootDirectories.iterator().next().toString
+ import java.io.File.{separator, separatorChar}
+
+ /**
+ * Helper for unix -> windows path references (as strings).
+ *
+ * @param path as unix path
+ * @return path in native format
+ */
+ def unixToNative(path: String): String = {
+ if (isUnixOS) {
+ path
+ } else {
+ path
+ .replaceFirst("^/", rootStr.replaceAllLiterally("\\", "\\\\")) // we must escape '\' in C:\
+ .replaceAllLiterally("/", separator)
+ }
+ }
+
+ var testRoot: File = _ //TODO: Get rid of mutable test vars
+ var fa: File = _
+ var a1: File = _
+ var a2: File = _
+ var t1: File = _
+ var t2: File = _
+ var t3: File = _
+ var fb: File = _
+ var b1: File = _
+ var b2: File = _
+
+ /**
+ * Setup the following directory structure under root
+ * /a
+ * /a1
+ * /a2
+ * a21.txt
+ * a22.txt
+ * /b
+ * b1/ --> ../a1
+ * b2.txt --> ../a2/a22.txt
+ */
+
+ override def beforeEach() = {
+ testRoot = File.newTemporaryDirectory("better-files")
+ fa = testRoot/"a"
+ a1 = testRoot/"a"/"a1"
+ a2 = testRoot/"a"/"a2"
+ t1 = testRoot/"a"/"a1"/"t1.txt"
+ t2 = testRoot/"a"/"a1"/"t2.txt"
+ t3 = testRoot/"a"/"a1"/"t3.scala.txt"
+ fb = testRoot/"b"
+ b1 = testRoot/"b"/"b1"
+ b2 = testRoot/'b/"b2.txt"
+ Seq(a1, a2, fb) foreach mkdirs
+ Seq(t1, t2) foreach touch
+ }
+
+ override def afterEach() = {
+ val _ = rm(testRoot)
+ }
+
+ override def withFixture(test: NoArgTest) = {
+ //val before = File.numberOfOpenFileDescriptors()
+ val result = super.withFixture(test)
+ //val after = File.numberOfOpenFileDescriptors()
+ //assert(before == after, s"Resource leakage detected in $test")
+ result
+ }
+
+ "files" can "be instantiated" in {
+ import java.io.{File => JFile}
+
+ val f = File("/User/johndoe/Documents") // using constructor
+ val f1: File = file"/User/johndoe/Documents" // using string interpolator
+ val f2: File = "/User/johndoe/Documents".toFile // convert a string path to a file
+ val f3: File = new JFile("/User/johndoe/Documents").toScala // convert a Java file to Scala
+ val f4: File = root/"User"/"johndoe"/"Documents" // using root helper to start from root
+ //val f5: File = `~` / "Documents" // also equivalent to `home / "Documents"`
+ val f6: File = "/User"/"johndoe"/"Documents" // using file separator DSL
+ val f7: File = home/"Documents"/"presentations"/`..` // Use `..` to navigate up to parent
+ val f8: File = root/"User"/"johndoe"/"Documents"/ `.`
+ val f9: File = File(f.uri)
+ val f10: File = File("../a") // using a relative path
+ Seq(f, f1, f2, f3, f4,/* f5,*/ f6, f7, f8, f9, f10) foreach {f =>
+ f.pathAsString should not include ".."
+ }
+
+ root.toString shouldEqual rootStr
+ home.toString.count(_ == separatorChar) should be > 1
+ (root/"usr"/"johndoe"/"docs").toString shouldEqual unixToNative("/usr/johndoe/docs")
+ Seq(f, f1, f2, f4, /*f5,*/ f6, f8, f9).map(_.toString).toSet shouldBe Set(f.toString)
+ }
+
+ it can "be instantiated with anchor" in {
+ // testRoot / a / a1 / t1.txt
+ val basedir = a1
+ File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc"))
+ File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc"))
+
+ File(basedir, "rel/path/to/loc").toString should be (unixToNative(basedir.toString + "/rel/path/to/loc"))
+ File(basedir, "../rel/path/to/loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
+ File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
+
+ val baseref = t1
+ File(baseref, "/abs/path/to/loc").toString should be (unixToNative("/abs/path/to/loc"))
+ File(baseref, "/abs", "path", "to", "loc").toString should be (unixToNative("/abs/path/to/loc"))
+
+ File(baseref, "rel/path/to/loc").toString should be (unixToNative(a1.toString + "/rel/path/to/loc"))
+ File(baseref, "../rel/path/to/loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
+ File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc"))
+ }
+
+ it can "be instantiated with non-existing abs anchor" in {
+ val anchorStr = "/abs/to/nowhere"
+ val anchorStr_a = anchorStr + "/a"
+ val basedir = File(anchorStr_a + "/last")
+
+ File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc"))
+ File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc"))
+
+ File(basedir, "rel/path/to/loc").toString should be (unixToNative(anchorStr_a + "/rel/path/to/loc"))
+ File(basedir, "../rel/path/to/loc").toString should be (unixToNative(anchorStr + "/rel/path/to/loc"))
+ File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(anchorStr + "/rel/path/to/loc"))
+ }
+
+ it can "be instantiated with non-existing relative anchor" in {
+ val relAnchor = File("rel/anc/b/last")
+ val basedir = relAnchor
+
+ File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc"))
+ File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc"))
+
+ File(basedir, "rel/path/to/loc").toString should be (unixToNative(File("rel/anc/b").toString + "/rel/path/to/loc"))
+ File(basedir, "../rel/path/to/loc").toString should be (unixToNative(File("rel/anc").toString + "/rel/path/to/loc"))
+ File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(File("rel/anc").toString + "/rel/path/to/loc"))
+ }
+
+ it should "do basic I/O" in {
+ t1 < "hello"
+ t1.contentAsString shouldEqual "hello"
+ t1.appendLine() << "world"
+ (t1!) shouldEqual String.format("hello%nworld%n")
+ t1.chars.toStream should contain theSameElementsInOrderAs String.format("hello%nworld%n").toSeq
+ "foo" `>:` t1
+ "bar" >>: t1
+ t1.contentAsString shouldEqual String.format("foobar%n")
+ t1.appendLines("hello", "world")
+ t1.contentAsString shouldEqual String.format("foobar%nhello%nworld%n")
+ t2.writeText("hello").appendText("world").contentAsString shouldEqual "helloworld"
+
+ (testRoot/"diary")
+ .createIfNotExists()
+ .appendLine()
+ .appendLines("My name is", "Inigo Montoya")
+ .printLines(Iterator("x", 1))
+ .lines.toSeq should contain theSameElementsInOrderAs Seq("", "My name is", "Inigo Montoya", "x", "1")
+ }
+
+ it should "handle BOM" in {
+ val lines = Seq("Line 1", "Line 2")
+ val expectedContent = lines.mkString(start = "", sep = "\n", end = "\n")
+ File.temporaryFile() foreach {file =>
+ file.appendLines(lines: _*)(charset = UnicodeCharset("UTF-8", writeByteOrderMarkers = true))
+ file.contentAsString(charset = "UTF-8") should not equal expectedContent
+ file.contentAsString shouldEqual expectedContent
+ }
+ }
+
+// TODO: Do not depend on self-referential tests
+// it should "glob" in {
+// assume(isCI)
+// a1.glob("*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt")
+// //a1.glob("*.txt").map(_.name).toSeq shouldEqual Seq("t1.txt", "t2.txt")
+// testRoot.glob("**/*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt")
+// val path = testRoot.path.toString.ensuring(testRoot.path.isAbsolute)
+// File(path).glob("**/*.{txt}").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt")
+// ("benchmarks"/"src").glob("**/*.{scala,java}").map(_.name).toSeq.sorted shouldEqual Seq("ArrayBufferScanner.java", "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
+// ("benchmarks"/"src").glob("**/*.{scala}").map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
+// ("benchmarks"/"src").glob("**/*.scala").map(_.name).toSeq.sorted shouldEqual Seq("Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
+// ("benchmarks"/"src").listRecursively.filter(_.extension.contains(".scala")).map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala")
+// ls("core"/"src"/"test") should have length 1
+// ("core"/"src"/"test").walk(maxDepth = 1) should have length 2
+// ("core"/"src"/"test").walk(maxDepth = 0) should have length 1
+// ("core"/"src"/"test").walk() should have length (("core"/"src"/"test").listRecursively.length + 1L)
+// ls_r("core"/"src"/"test") should have length 8
+// }
+
+ it should "support names/extensions" in {
+ assume(isCI)
+ fa.extension shouldBe None
+ fa.nameWithoutExtension shouldBe fa.name
+ t1.extension shouldBe Some(".txt")
+ t1.extension(includeDot = false) shouldBe Some("txt")
+ t3.extension shouldBe Some(".txt")
+ t3.extension(includeAll = true) shouldBe Some(".scala.txt")
+ t3.extension(includeDot = false, includeAll = true) shouldBe Some("scala.txt")
+ t1.name shouldBe "t1.txt"
+ t1.nameWithoutExtension shouldBe "t1"
+ t1.changeExtensionTo(".md").name shouldBe "t1.md"
+ (t1 < "hello world").changeExtensionTo(".txt").name shouldBe "t1.txt"
+ t1.contentType shouldBe Some("text/plain")
+ ("src" / "test").toString should include ("better-files")
+ (t1 == t1.toString) shouldBe false
+ (t1.contentAsString == t1.toString) shouldBe false
+ (t1 == t1.contentAsString) shouldBe false
+ t1.root shouldEqual fa.root
+ file"/tmp/foo.scala.html".extension shouldBe Some(".html")
+ file"/tmp/foo.scala.html".nameWithoutExtension shouldBe "foo"
+ file"/tmp/foo.scala.html".nameWithoutExtension(includeAll = false) shouldBe "foo.scala"
+ root.name shouldBe ""
+ }
+
+ it should "hide/unhide" in {
+ t1.isHidden shouldBe false
+ }
+
+ it should "support parent/child" in {
+ fa isChildOf testRoot shouldBe true
+ testRoot isChildOf root shouldBe true
+ root isChildOf root shouldBe true
+ fa isChildOf fa shouldBe true
+ b2 isChildOf b2 shouldBe false
+ b2 isChildOf b2.parent shouldBe true
+ root.parent shouldBe null
+ }
+
+ it should "support siblings" in {
+ (file"/tmp/foo.txt" sibling "bar.txt").pathAsString shouldBe unixToNative("/tmp/bar.txt")
+ fa.siblings.toList.map(_.name) shouldBe List("b")
+ fb isSiblingOf fa shouldBe true
+ }
+
+ it should "support sorting" in {
+ testRoot.list.toSeq.sorted(File.Order.byName) should not be empty
+ testRoot.list.toSeq.max(File.Order.bySize).isEmpty shouldBe false
+ Seq(fa, fb).contains(testRoot.list.toSeq.min(File.Order.byDepth)) shouldBe true
+ sleep()
+ t2.appendLine("modified!")
+ a1.list.toSeq.min(File.Order.byModificationTime) shouldBe t1
+ testRoot.list.toSeq.sorted(File.Order.byDirectoriesFirst) should not be empty
+ }
+
+ it must "have .size" in {
+ fb.isEmpty shouldBe true
+ t1.size shouldBe 0
+ t1.writeText("Hello World")
+ t1.size should be > 0L
+ testRoot.size should be > (t1.size + t2.size)
+ }
+
+ it should "set/unset permissions" in {
+ assume(isCI)
+ import java.nio.file.attribute.PosixFilePermission
+ //an[UnsupportedOperationException] should be thrownBy t1.dosAttributes
+ t1.permissions()(PosixFilePermission.OWNER_EXECUTE) shouldBe false
+
+ chmod_+(PosixFilePermission.OWNER_EXECUTE, t1)
+ t1.testPermission(PosixFilePermission.OWNER_EXECUTE) shouldBe true
+ t1.permissionsAsString shouldBe "rwxrw-r--"
+
+ chmod_-(PosixFilePermission.OWNER_EXECUTE, t1)
+ t1.isOwnerExecutable shouldBe false
+ t1.permissionsAsString shouldBe "rw-rw-r--"
+ }
+
+ it should "support equality" in {
+ fa shouldEqual (testRoot/"a")
+ fa shouldNot equal (testRoot/"b")
+ val c1 = fa.md5
+ fa.md5 shouldEqual c1
+ t1 < "hello"
+ t2 < "hello"
+ (t1 == t2) shouldBe false
+ (t1 === t2) shouldBe true
+ t2 < "hello world"
+ (t1 == t2) shouldBe false
+ (t1 === t2) shouldBe false
+ fa.md5 should not equal c1
+ }
+
+ it should "create if not exist directory structures" in {
+ File.usingTemporaryDirectory() {dir =>
+ val file = dir / "a" / "b" / "c.txt"
+ assert(file.notExists)
+ assert(file.parent.notExists)
+ file.createIfNotExists(createParents = true)
+ assert(file.exists)
+ assert(file.parent.exists)
+ file.writeText("Hello world")
+ assert(file.contentAsString === "Hello world")
+ }
+ }
+
+ it should "treat symlinks transparently in convenience methods" in {
+ File.usingTemporaryDirectory() {dir =>
+ val realDir = dir / "a"
+ val dirSymlink = dir / "b"
+ realDir.createDirectory()
+ JFiles.createSymbolicLink(dirSymlink.path, realDir.path)
+ dirSymlink.createDirectories()
+ a[FileAlreadyExistsException] should be thrownBy dirSymlink.createDirectories()(linkOptions = File.LinkOptions.noFollow)
+ /*a[FileAlreadyExistsException] shouldNot be thrownBy*/ dirSymlink.createDirectories()
+ }
+ }
+
+ it should "support chown/chgrp" in {
+ fa.ownerName should not be empty
+ fa.groupName should not be empty
+ a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chown("hitler", fa)
+ //a[java.nio.file.FileSystemException] should be thrownBy chown("root", fa)
+ a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chgrp("cool", fa)
+ //a[java.nio.file.FileSystemException] should be thrownBy chown("admin", fa)
+ //fa.chown("nobody").chgrp("nobody")
+ stat(t1) shouldBe a[java.nio.file.attribute.PosixFileAttributes]
+ }
+
+ it should "detect file locks" in {
+ File.temporaryFile() foreach {file =>
+ def lockInfo() = file.isReadLocked() -> file.isWriteLocked()
+ // TODO: Why is file.isReadLocked() should be false?
+ lockInfo() shouldBe (true -> false)
+ val channel = file.newRandomAccess(File.RandomAccessMode.readWrite).getChannel
+ val lock = channel.tryLock()
+ lockInfo() shouldBe (true -> true)
+ lock.release()
+ channel.close()
+ lockInfo() shouldBe (true -> false)
+ }
+ }
+
+ it should "support ln/cp/mv" in {
+ val magicWord = "Hello World"
+ t1 writeText magicWord
+ // link
+ // to relative target
+ val b0 = b1.sibling("b0")
+ java.nio.file.Files.createSymbolicLink(b0.path, java.nio.file.Paths.get("b1"))
+ b0.symbolicLink should not be empty
+ b0.symbolicLink.get.path.isAbsolute shouldBe false
+ // to absolute target
+ b1.linkTo(a1, symbolic = true)
+ ln_s(b2, t2)
+ (b1 / "t1.txt").contentAsString shouldEqual magicWord
+ // copy
+ b2.contentAsString shouldBe empty
+ t1.md5 should not equal t2.md5
+ a[java.nio.file.FileAlreadyExistsException] should be thrownBy (t1 copyTo t2)
+ t1.copyTo(t2, overwrite = true)
+ t1.exists shouldBe true
+ t1.md5 shouldEqual t2.md5
+ b2.contentAsString shouldEqual magicWord
+ // rename
+ t2.name shouldBe "t2.txt"
+ t2.exists shouldBe true
+ val t3 = t2 renameTo "t3.txt"
+ t3.name shouldBe "t3.txt"
+ t2.exists shouldBe false
+ t3.exists shouldBe true
+ // move
+ t3 moveTo t2
+ t2.exists shouldBe true
+ t3.exists shouldBe false
+ }
+
+ it should "support creating hard links with ln" in {
+ assume(isUnixOS)
+ val magicWord = "Hello World"
+ t1 writeText magicWord
+ t1.linkTo(t3, symbolic = false)
+ (a1 / "t3.scala.txt").contentAsString shouldEqual magicWord
+ }
+
+ it should "support custom charset" in {
+ import java.nio.charset.Charset
+ t1.writeText("你好世界")(charset = "UTF8")
+ t1.contentAsString(charset = "ISO-8859-1") should not equal "你好世界"
+ t1.contentAsString(charset = "UTF8") shouldEqual "你好世界"
+ val c1 = md5(t1)
+ val c2 = t1.overwrite("你好世界")(File.OpenOptions.default, Charset.forName("ISO-8859-1")).md5
+ c1 should not equal c2
+ c2 shouldEqual t1.checksum("md5")
+ }
+
+ it should "support hashing algos" in {
+ implicit val charset = java.nio.charset.StandardCharsets.UTF_8
+ t1.writeText("")
+ md5(t1) shouldEqual "D41D8CD98F00B204E9800998ECF8427E"
+ sha1(t1) shouldEqual "DA39A3EE5E6B4B0D3255BFEF95601890AFD80709"
+ sha256(t1) shouldEqual "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855"
+ sha512(t1) shouldEqual "CF83E1357EEFB8BDF1542850D66D8007D620E4050B5715DC83F4A921D36CE9CE47D0D13C5D85F2B0FF8318D2877EEC2F63B931BD47417A81A538327AF927DA3E"
+ }
+
+ it should "compute correct checksum for non-zero length string" in {
+ implicit val charset = java.nio.charset.StandardCharsets.UTF_8
+ t1.writeText("test")
+ md5(t1) shouldEqual "098F6BCD4621D373CADE4E832627B4F6"
+ sha1(t1) shouldEqual "A94A8FE5CCB19BA61C4C0873D391E987982FBBD3"
+ sha256(t1) shouldEqual "9F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A08"
+ sha512(t1) shouldEqual "EE26B0DD4AF7E749AA1A8EE3C10AE9923F618980772E473F8819A5D4940E0DB27AC185F8A0E1D5F84F88BC887FD67B143732C304CC5FA9AD8E6F57F50028A8FF"
+ }
+
+ it should "copy" in {
+ (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World")
+ (fb / "t5" / "t5.txt").createIfNotExists(createParents = true).writeText("Scala Awesome")
+ (fb / "t5" / "t3").notExists shouldBe true
+ cp(fb / "t3", fb / "t5")
+ (fb / "t3").exists shouldBe true
+ (fb / "t5" / "t3").exists shouldBe true
+ (fb / "t5" / "t5.txt").contentAsString shouldEqual "Scala Awesome"
+ assert((fb / "t3") === (fb / "t5" / "t3"))
+ }
+
+ it should "move" in {
+ (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World")
+ mv(fb / "t3", fb / "t5")
+ (fb / "t5" / "t4.txt").contentAsString shouldEqual "Hello World"
+ (fb / "t3").notExists shouldBe true
+ }
+
+ it should "delete" in {
+ fb.exists shouldBe true
+ fb.delete()
+ fb.exists shouldBe false
+ }
+
+ it should "touch" in {
+ (fb / "z1").exists shouldBe false
+ (fb / "z1").isEmpty shouldBe true
+ (fb / "z1").touch()
+ (fb / "z1").exists shouldBe true
+ (fb / "z1").isEmpty shouldBe true
+ Thread.sleep(1000)
+ (fb / "z1").lastModifiedTime.getEpochSecond should be < (fb / "z1").touch().lastModifiedTime.getEpochSecond
+ }
+
+ it should "md5" in {
+ val h1 = t1.hashCode
+ val actual = (t1 < "hello world").md5
+ val h2 = t1.hashCode
+ h1 shouldEqual h2
+ import scala.sys.process._
+ val expected = Try(s"md5sum ${t1.path}" !!) getOrElse (s"md5 ${t1.path}" !!)
+ expected.toUpperCase should include (actual)
+ actual should not equal h1
+ }
+
+ it should "support file in/out" in {
+ t1 < "hello world"
+ for {
+ in <- t1.inputStream
+ out <- t2.outputStream
+ } in.pipeTo(out)
+ t2.contentAsString shouldEqual "hello world"
+ t2.newInputStream.asString() shouldEqual "hello world"
+ }
+
+ it should "zip/unzip directories" in {
+ t1.writeText("hello world")
+ val zipFile = testRoot.zip()
+ zipFile.size should be > 100L
+ zipFile.name should endWith (".zip")
+
+ def test(output: File) = {
+ (output/"a"/"a1"/"t1.txt").contentAsString shouldEqual "hello world"
+ output === testRoot shouldBe true
+ (output/"a"/"a1"/"t1.txt").overwrite("hello")
+ (output !== testRoot) shouldBe true
+ }
+
+ test(zipFile.unzip())
+ test(zipFile.streamedUnzip())
+ }
+
+ it should "zip/unzip single files" in {
+ t1.writeText("hello world")
+ val zipFile = t1.zip()
+ zipFile.size should be > 100L
+ zipFile.name should endWith (".zip")
+ val destination = unzip(zipFile)(File.newTemporaryDirectory())
+ (destination/"t1.txt").contentAsString shouldEqual "hello world"
+ }
+
+ it should "gzip" in {
+ for {
+ writer <- (testRoot / "test.gz").newOutputStream.buffered.gzipped.writer.buffered.autoClosed
+ } writer.write("Hello world")
+
+ (testRoot / "test.gz").inputStream.map(_.buffered.gzipped.buffered.lines.toSeq) shouldEqual Seq("Hello world")
+ }
+
+ it should "read bytebuffers" in {
+ t1.writeText("hello world")
+ for {
+ fileChannel <- t1.newFileChannel.autoClosed
+ } fileChannel.toMappedByteBuffer.remaining() shouldEqual t1.bytes.length
+
+ (t2 writeBytes t1.bytes).contentAsString shouldEqual t1.contentAsString
+ }
+
+ it should "convert readers to inputstreams and writers to outputstreams" in {
+ File.temporaryFile() foreach {f =>
+ val text = List.fill(10000)("hello world")
+ for {
+ writer <- f.bufferedWriter
+ out <- writer.outputstream.autoClosed
+ } out.write(text.mkString("\n").getBytes)
+ val t = f.bufferedReader.flatMap(_.toInputStream.lines)
+ t.toList shouldEqual text
+ }
+ }
+
+ it should "serialize/deserialize" in {
+ class Person(val name: String, val age: Int) extends Serializable
+ val p1 = new Person("Chris", 34)
+
+ File.temporaryFile() foreach {f => //serialization round-trip test
+ assert(f.isEmpty)
+ f.writeSerialized(p1)
+ assert(f.nonEmpty)
+ val p2: Person = f.readDeserialized[Person]
+ assert(p1.name === p2.name)
+ assert(p1.age === p2.age)
+
+ val p3 = f.inputStream.map(_.asObjectInputStreamUsingClassLoader().deserialize[Person])
+ assert(p3.name === p2.name)
+ assert(p3.age === p2.age)
+ }
+ }
+
+ it should "count number of open file descriptors" in {
+ val expected = java.lang.management.ManagementFactory.getOperatingSystemMXBean
+ .asInstanceOf[com.sun.management.UnixOperatingSystemMXBean]
+ .getOpenFileDescriptorCount
+ assert((File.numberOfOpenFileDescriptors() - expected).abs <= 10)
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala
new file mode 100644
index 00000000..1acf7d7b
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala
@@ -0,0 +1,360 @@
+package better.files
+
+import better.files.Dsl._
+
+import java.io.File.separator
+
+import org.scalatest.BeforeAndAfterAll
+
+class GlobSpec extends CommonSpec with BeforeAndAfterAll {
+ var testDir: File = _
+ var globTree: File = _
+ var specialTree: File = _
+
+ var regexWildcardPath: File = _
+ var globWildcardPath: File = _
+ //
+ // Test target for glob
+ //
+ // tests/
+ // ├── globtree
+ // │   ├── a
+ // │   │   ├── a2
+ // │   │   │   ├── a2.txt
+ // │   │   │   └── x.txt
+ // │   │   ├── a.not
+ // │   │   ├── a.txt
+ // │   │   └── x.txt
+ // │   ├── b
+ // │   │   ├── a
+ // │   │   │   └── ba.txt
+ // │   │   └── b.txt
+ // │   ├── c
+ // │   │   ├── c.txt
+ // │   │   └── x.txt
+ // │   ├── empty
+ // │   ├── link_to_a -> a
+ // │   ├── one.txt
+ // │   ├── readme.md
+ // │   ├── three.txt
+ // │   └── two.txt
+ // └── special
+ // ├── .*
+ // │   └── a
+ // │ └── a.txt
+ // └── **
+ // └── a
+ // └── a.txt
+ //
+ override def beforeAll() = {
+ testDir = File.newTemporaryDirectory("glob-tests")
+ globTree = testDir / "globtree"
+
+ mkdir(globTree)
+ val a = mkdir(globTree / "a" )
+ mkdir(globTree / "a" / "a2")
+ touch(globTree / "a" / "a2" / "a2.txt")
+ touch(globTree / "a" / "a2" / "x.txt")
+ touch(globTree / "a" / "a.not")
+ touch(globTree / "a" / "a.txt")
+ touch(globTree / "a" / "x.txt")
+
+ mkdir(globTree / "b" )
+ mkdir(globTree / "b" / "a")
+ touch(globTree / "b" / "a" / "ba.txt")
+ touch(globTree / "b" / "b.txt")
+
+ mkdir(globTree / "c" )
+ touch(globTree / "c" / "c.txt")
+ touch(globTree / "c" / "x.txt")
+
+ mkdir(globTree / "empty" )
+
+ if (isUnixOS) {
+ ln_s(globTree / "link_to_a", a)
+ }
+
+ touch(globTree / "one.txt")
+ touch(globTree / "two.txt")
+ touch(globTree / "three.txt")
+ touch(globTree / "readme.md")
+
+ // Special target with path name components as wildcards
+ specialTree = testDir / "special"
+
+ // Windows does not support '*' in file names
+ if (isUnixOS) {
+ // regex
+ mkdir(specialTree)
+ regexWildcardPath = mkdir(specialTree / ".*")
+ mkdir(specialTree / ".*" / "a")
+ touch(specialTree / ".*" / "a" / "a.txt")
+
+ // glob
+ globWildcardPath = mkdir(specialTree / "**")
+ mkdir(specialTree / "**" / "a")
+ touch(specialTree / "**" / "a" / "a.txt")
+ }
+
+ ()
+ }
+
+ override def afterAll() = {
+ val _ = rm(testDir)
+ }
+
+ /**
+ * Helper in case something goes wrong...
+ */
+ private def debugPaths(files: Seq[File]): String = {
+ files
+ .sortBy(_.path)
+ .map(files => s"PATH: ${files.toString}")
+ .mkString(s"SIZE: ${files.size}\n", "\n", "\n")
+ }
+
+ /**
+ * Verity if candidates are equal with references.
+ * Does not accept empty sets, use assert(paths.isEmpty) for that.
+ *
+ * @param pathsIt candidates
+ * @param refPaths references
+ * @param baseDir basedir to for creating full path of references
+ */
+ private def verify(pathsIt: Files, refPaths: Seq[String], baseDir: File) = {
+ val paths = pathsIt.toSeq
+ val refs = refPaths
+ .map(refPath => baseDir/refPath)
+ .sortBy(_.path)
+
+ withClue("Result: " + debugPaths(paths) + "Reference: " + debugPaths(refs)) {
+ assert(paths.length === refPaths.length)
+ assert(paths.nonEmpty)
+ paths.sortBy(_.path).zip(refs).foreach({case (path, refPath) => assert(path === refPath)})
+ }
+ }
+
+ "glob" should "match plain file (e.g. 'file.ext')" in {
+ val refPaths = Seq(
+ "one.txt"
+ )
+ val paths = globTree.glob("one.txt")
+ verify(paths, refPaths, globTree)
+ }
+ it should "match path without glob (e.g. 'sub/dir/file.ext')" in {
+ val refPaths = Seq(
+ "a/a.txt"
+ )
+ val paths = globTree.glob("a/a.txt")
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "match file-glob (e.g. '*.ext')" in {
+ val refPaths = Seq(
+ "one.txt",
+ "two.txt",
+ "three.txt"
+ )
+ val paths = globTree.glob("*.txt")
+ verify(paths, refPaths, globTree)
+ assert(globTree.glob("*.txt", includePath = false)(File.PathMatcherSyntax.glob).isEmpty)
+ }
+
+ it should "match fixed sub dir and file-glob (e.g. '**/subdir/*.ext')" in {
+ // TODO: DOC: why top level 'a' is not matched
+ val refPaths = List(
+ "b/a/ba.txt"
+ )
+ val paths = globTree.glob("**/a/*.txt")
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "use parent dir for matching (e.g. plain 'subdir/*.ext')" in {
+ // e.g. check that b nor c are matched, nor b/a
+ val refPaths = Seq(
+ "a/a.txt",
+ "a/x.txt"
+ )
+ val paths = globTree.glob("a/*.txt")
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "match sub-directory glob with plain file (e.g. 'subdir/*/file.ext')" in {
+ val refPaths = Seq(
+ "a/x.txt",
+ "c/x.txt"
+ )
+ val paths = testDir.glob("globtree/*/x.txt")
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "match sub-directory glob with file-glob (e.g. 'subdir/*/*.ext')" in {
+ val refPaths = Seq(
+ "a/a.txt",
+ "a/x.txt",
+ "c/c.txt",
+ "c/x.txt",
+ "b/b.txt"
+ )
+ val paths = testDir.glob("globtree/*/*.txt")
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "match deep sub-directory glob with plain file (e.g. 'subdir/**/file.ext')" in {
+ val refPaths = Seq(
+ "a/a2/x.txt",
+ "a/x.txt",
+ "c/x.txt"
+ )
+ val p1s = globTree.glob("**/x.txt")
+ verify(p1s, refPaths, globTree)
+
+ val p2s = testDir.glob("globtree/**/x.txt")
+ verify(p2s, refPaths, globTree)
+ }
+
+ it should "match deep sub-directory glob with file-glob (e.g. 'subdir/**/*.ext')" in {
+ val refPaths = Seq(
+ "a/a.txt",
+ "a/x.txt",
+ "a/a2/x.txt",
+ "a/a2/a2.txt",
+ "c/x.txt",
+ "c/c.txt",
+ "b/b.txt",
+ "b/a/ba.txt"
+ )
+ val p1s = globTree.glob("**/*.txt")
+ verify(p1s, refPaths, globTree)
+
+ val p2s = testDir.glob("globtree/**/*.txt")
+ verify(p2s, refPaths, globTree)
+ }
+
+ it should "match deep file-glob (e.g. 'subdir/**.ext')" in {
+ val refPaths = Seq(
+ "one.txt",
+ "two.txt",
+ "three.txt",
+ "a/a.txt",
+ "a/x.txt",
+ "a/a2/x.txt",
+ "a/a2/a2.txt",
+ "b/a/ba.txt",
+ "b/b.txt",
+ "c/x.txt",
+ "c/c.txt"
+ )
+ val p1s = globTree.glob("**.txt")
+ verify(p1s, refPaths, globTree)
+
+ val p2s = testDir.glob("globtree/**.txt")
+ verify(p2s, refPaths, globTree)
+ }
+
+ it should "match everything (e.g. 'subdir/**')" in {
+ val refPaths = List(
+ "a",
+ "a/a.not",
+ "a/a.txt",
+ "a/a2",
+ "a/a2/a2.txt",
+ "a/a2/x.txt",
+ "a/x.txt",
+ "b",
+ "b/a",
+ "b/a/ba.txt",
+ "b/b.txt",
+ "c",
+ "c/c.txt",
+ "c/x.txt",
+ "empty",
+ "one.txt",
+ "readme.md",
+ "three.txt",
+ "two.txt") ++
+ when(isUnixOS)("link_to_a")
+
+ val paths = testDir.glob("globtree/**")
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "work with links (e.g. 'link_to_a/**.txt')" in {
+ assume(isUnixOS)
+ val refPaths = Seq(
+ "a/a.txt",
+ "a/x.txt",
+ "a/a2/x.txt",
+ "a/a2/a2.txt"
+ )
+
+ // TODO: DOC: File behaviour, links are resolved (abs + normalized path)
+
+ val p1s = globTree.glob("link_to_a/**.txt")(visitOptions = File.VisitOptions.follow)
+ verify(p1s, refPaths, globTree)
+
+ val p2s = globTree.glob("link_to_a/**.txt").toSeq
+ assert(p2s.isEmpty)
+
+ val p3s = testDir.glob("globtree/link_to_a/**.txt")(visitOptions = File.VisitOptions.follow)
+ verify(p3s, refPaths, globTree)
+
+ val p4s = testDir.glob("globtree/link_to_a/**.txt")
+ assert(p4s.isEmpty)
+ }
+
+ it should "not use dir name as wildcard (e.g. dirname is **)" in {
+ assume(isUnixOS)
+ val d = globWildcardPath // "path" / "with" / "**"
+ val paths = d.glob("*.txt")
+
+ assert(paths.isEmpty)
+ }
+
+ "Regex" should "match all txt-files under sub-directory (e.g. '.*/.*\\\\.txt')" in {
+ val refPaths = Seq(
+ "a/a.txt",
+ "a/x.txt",
+ "a/a2/x.txt",
+ "a/a2/a2.txt",
+ "c/x.txt",
+ "c/c.txt",
+ "b/b.txt",
+ "b/a/ba.txt"
+ )
+ val paths = globTree.glob(".*" + separator + ".*\\.txt")(File.PathMatcherSyntax.regex)
+
+ verify(paths, refPaths, globTree)
+ }
+
+ it should "match the same if `Regex` is used" in {
+ val pattern = (".*" + separator + ".*\\.txt").r
+
+ val pathsGlob = globTree.glob(pattern.regex)(File.PathMatcherSyntax.regex)
+ val pathsRegex = globTree.globRegex(pattern)
+
+ verify(pathsRegex, pathsGlob.toSeq.map(_.toString), globTree)
+
+ }
+
+ it should "use parent dir for matching (e.g. plain 'subdir/*.ext' instead of '**/subdir/*.ext)" in {
+ // e.g. check that b nor c are matched, nor b/a
+ val refPaths = Seq(
+ "a/a.txt",
+ "a/x.txt",
+ "a/a2/a2.txt",
+ "a/a2/x.txt"
+ )
+ val paths = globTree.glob("a" + separator + ".*\\.txt")(File.PathMatcherSyntax.regex)
+
+ verify(paths, refPaths, globTree)
+ assert(globTree.glob("a/.*\\.txt", includePath = false)(File.PathMatcherSyntax.regex).isEmpty)
+ }
+
+ it should "not use dir name as wildcard (e.g. dirname is .*)" in {
+ assume(isUnixOS)
+ val d = regexWildcardPath // "path" / "with" / ".*"
+ val paths = d.glob("a\\.txt")(File.PathMatcherSyntax.regex)
+ assert(paths.isEmpty)
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala
new file mode 100644
index 00000000..554f5358
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala
@@ -0,0 +1,250 @@
+package better.files
+
+import org.scalatest.matchers.{MatchResult, Matcher}
+
+import scala.reflect.ClassTag
+import scala.util.control.ControlThrowable
+
+class ManagedResourceSpec extends CommonSpec {
+ // Test classes
+
+ private class TestDisposable extends AutoCloseable {
+ var closeCount = 0
+
+ override def close(): Unit =
+ closeCount += 1
+ }
+
+ private class TestDisposableThatThrows extends TestDisposable {
+ override def close(): Unit = {
+ super.close()
+ throw new TestDisposeException
+ }
+ }
+
+ private class TestDisposableThatThrowsFatal extends TestDisposable {
+ override def close(): Unit = {
+ super.close()
+ throw new TestDisposeFatalException
+ }
+ }
+
+ private class TestEvalException extends Exception
+ private class TestDisposeException extends Exception
+ private class TestDisposeFatalException extends Exception with ControlThrowable
+
+ // Custom matchers
+
+ private class HaveSuppressedMatcher(classes: Class[_ <: Throwable]*) extends Matcher[Throwable] {
+ override def apply(left: Throwable): MatchResult = {
+ MatchResult(
+ (classes corresponds left.getSuppressed) {
+ (clazz, suppressed) => clazz isInstance suppressed
+ },
+ s"had suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}",
+ s"had not suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}"
+ )
+ }
+ }
+
+ private def haveSuppressed[E <: Throwable](implicit ct: ClassTag[E]) =
+ new HaveSuppressedMatcher(ct.runtimeClass.asInstanceOf[Class[_ <: Throwable]])
+
+ // Test body
+
+ behavior of "managed resources"
+
+ it should "map correctly" in {
+ val t = new TestDisposable
+
+ val result = for {
+ tc <- t.autoClosed
+ } yield {
+ t.closeCount shouldBe 0
+ "hello"
+ }
+
+ result shouldBe "hello"
+ t.closeCount shouldBe 1
+ }
+
+ it should "flatMap correctly" in {
+ val t = new TestDisposable
+
+ val result = (for {
+ tc <- t.autoClosed
+ v <- Iterator("one", "two", "three")
+ } yield {
+ t.closeCount shouldBe 0
+ v
+ }).toSeq
+
+ result should contain inOrder ("one", "two", "three")
+ t.closeCount shouldBe 1
+ }
+
+ it should "handle exceptions correctly" in {
+ val t = new TestDisposable
+
+ a [TestEvalException] should be thrownBy {
+ for {
+ tc <- t.autoClosed
+ } {
+ t.closeCount shouldBe 0
+ throw new TestEvalException
+ }
+ }
+ t.closeCount shouldBe 1
+
+ var lastSeen = ""
+ a [TestEvalException] should be thrownBy {
+ for {
+ tc <- t.autoClosed
+ v <- Iterator("one", "two", "three")
+ } {
+ t.closeCount shouldBe 1
+ lastSeen = v
+ if (v == "two") throw new TestEvalException
+ }
+ }
+ t.closeCount shouldBe 2
+ lastSeen shouldBe "two"
+ }
+
+ it should "handle disposal exceptions correctly" in {
+ // For some mysterious reason, thrownBy doesn't work here, in this specific test case. No clue why, despite spending an entire day trying to figure it out,
+ // including repeatedly stepping through the innards of ScalaTest in a debugger. Catching the exception manually does work, though.
+ val messageNoException = "no exception was thrown"
+ def messageWrongException(e: Throwable): String =
+ s"an exception was thrown, but not a TestDisposeException; instead it's a ${e.getClass.getName}"
+
+ val t = new TestDisposableThatThrows
+
+ val e1 =
+ try {
+ for {
+ tc <- t.autoClosed
+ } {
+ t.closeCount shouldBe 0
+ }
+ None
+ }
+ catch {
+ case e: TestDisposeException =>
+ Some(e)
+ }
+ assert(e1.nonEmpty, messageNoException)
+ e1 foreach { e1c => assert(e1c.isInstanceOf[TestDisposeException], messageWrongException(e1c)) }
+ t.closeCount shouldBe 1
+
+ var lastSeen = ""
+ val e2 =
+ try {
+ val i = for {
+ tc <- t.autoClosed
+ v <- Iterator("one", "two", "three")
+ } yield {
+ t.closeCount shouldBe 1
+ lastSeen = v
+ v
+ }
+ while (i.hasNext) i.next()
+ None
+ }
+ catch {
+ case e: TestDisposeException =>
+ Some(e)
+ }
+ lastSeen shouldBe "three"
+ assert(e2.nonEmpty, messageNoException)
+ e2 foreach { e2c => assert(e2c.isInstanceOf[TestDisposeException], messageWrongException(e2c)) }
+ t.closeCount shouldBe 2
+ }
+
+ it should "handle non-local returns correctly" in {
+ val t = new TestDisposable
+
+ def doTheThing(): String = {
+ throw the [ControlThrowable] thrownBy {
+ for {
+ tc <- t.autoClosed
+ } {
+ t.closeCount shouldBe 0
+ return "hello"
+ }
+ }
+ }
+ doTheThing() shouldBe "hello"
+ t.closeCount shouldBe 1
+
+ def doTheThings(): String = {
+ throw the [ControlThrowable] thrownBy {
+ for {
+ tc <- t.autoClosed
+ v <- Iterator("one", "two", "three")
+ } {
+ t.closeCount shouldBe 1
+ if (v == "two") return v
+ }
+ }
+ }
+ doTheThings() shouldBe "two"
+ t.closeCount shouldBe 2
+ }
+
+ it should "handle multiple exceptions correctly" in {
+ val t = new TestDisposableThatThrows
+
+ the [TestEvalException] thrownBy {
+ for {
+ tc <- t.autoClosed
+ } {
+ t.closeCount shouldBe 0
+ throw new TestEvalException
+ }
+ } should haveSuppressed [TestDisposeException]
+ t.closeCount shouldBe 1
+
+ var lastSeen = ""
+ the [TestEvalException] thrownBy {
+ for {
+ tc <- t.autoClosed
+ v <- Iterator("one", "two", "three")
+ } {
+ t.closeCount shouldBe 1
+ lastSeen = v
+ if (v == "two") throw new TestEvalException
+ }
+ } should haveSuppressed [TestDisposeException]
+ lastSeen shouldBe "two"
+ t.closeCount shouldBe 2
+ }
+
+ it should "give fatal exceptions precedence" in {
+ val t = new TestDisposableThatThrowsFatal
+
+ the [TestDisposeFatalException] thrownBy {
+ for {
+ tc <- t.autoClosed
+ } {
+ t.closeCount shouldBe 0
+ throw new TestEvalException
+ }
+ } should haveSuppressed [TestEvalException]
+ t.closeCount shouldBe 1
+
+ var lastSeen = ""
+ the [TestDisposeFatalException] thrownBy {
+ for {
+ tc <- t.autoClosed
+ v <- Iterator("one", "two", "three")
+ } {
+ t.closeCount shouldBe 1
+ lastSeen = v
+ if (v == "two") throw new TestEvalException
+ }
+ } should haveSuppressed [TestEvalException]
+ t.closeCount shouldBe 2
+ lastSeen shouldBe "two"
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala
new file mode 100644
index 00000000..54f0a117
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala
@@ -0,0 +1,79 @@
+package better.files
+
+import Dsl._
+
+import scala.language.existentials
+
+class ScannerSpec extends CommonSpec {
+ def t1 = File.newTemporaryFile()
+
+
+ "splitter" should "split" in {
+ val csvSplitter = StringSplitter.on(',')
+ def split(s: String) = csvSplitter.split(s).toList
+
+ assert(split(",") === List("", ""))
+ assert(split("") === List(""))
+ assert(split("Hello World") === List("Hello World"))
+ assert(split("Hello,World") === List("Hello", "World"))
+
+ assert(split(",,") === List("", "", ""))
+ assert(split(",Hello,World,") === List("", "Hello", "World", ""))
+ assert(split(",Hello,World") === List("", "Hello", "World"))
+ assert(split("Hello,World,") === List("Hello", "World", ""))
+ }
+
+ "scanner" should "parse files" in {
+ val data = t1 << s"""
+ | Hello World
+ | 1 2 3
+ | Ok 23 football
+ """.stripMargin
+ data.scanner() foreach {scanner =>
+ assert(scanner.lineNumber() == 0)
+ assert(scanner.next[String] == "Hello")
+ assert(scanner.lineNumber() == 2)
+ assert(scanner.next[String] == "World")
+ assert(scanner.next[Int] == 1)
+ assert(scanner.next[Int] == 2)
+ assert(scanner.lineNumber() == 3)
+ assert(scanner.next[Int] == 3)
+ assert(scanner.nextLine() == " Ok 23 football")
+ assert(!scanner.hasNext)
+ a[NoSuchElementException] should be thrownBy scanner.next()
+ assert(!scanner.hasNext)
+ }
+ data.tokens().toSeq shouldEqual data.newScanner().toSeq
+ }
+
+ it should "parse longs/booleans" in {
+ val data = for {
+ scanner <- Scanner("10 false").autoClosed
+ } yield scanner.next[(Long, Boolean)]
+ data shouldBe ((10L, false))
+ }
+
+ it should "parse custom parsers" in {
+ val file = t1 < """
+ |Garfield
+ |Woofer
+ """.stripMargin
+
+ sealed trait Animal
+ case class Dog(name: String) extends Animal
+ case class Cat(name: String) extends Animal
+
+ implicit val animalParser: Scannable[Animal] = Scannable {scanner =>
+ val name = scanner.next[String]
+ if (name == "Garfield") Cat(name) else Dog(name)
+ }
+ file.scanner() foreach {scanner =>
+ Seq.fill(2)(scanner.next[Animal]) should contain theSameElementsInOrderAs Seq(Cat("Garfield"), Dog("Woofer"))
+ }
+ }
+
+ it should "parse empty tokens" in {
+ val scanner = Scanner("hello||world", StringSplitter.on('|'))
+ List.fill(3)(scanner.next[Option[String]]) shouldEqual List(Some("hello"), None, Some("world"))
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/project/Dependencies.scala b/scalaplugin/src/test/resource/better-files/project/Dependencies.scala
new file mode 100644
index 00000000..5e0a037d
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/project/Dependencies.scala
@@ -0,0 +1,15 @@
+import sbt._
+
+object Dependencies {
+ val scalatest = "org.scalatest" %% "scalatest" % "3.0.4" % Test
+
+ // Used in Akka file watcher
+ val akka = "com.typesafe.akka" %% "akka-actor" % "2.5.6"
+
+ // For shapeless based Reader/Scanner
+ val shapeless = "com.chuusai" %% "shapeless" % "2.3.2"
+
+ // Used in Benchmarks only
+ val commonsio = "commons-io" % "commons-io" % "2.5"
+ val fastjavaio = "fastjavaio" % "fastjavaio" % "1.0" from "https://github.com/williamfiset/FastJavaIO/releases/download/v1.0/fastjavaio.jar"
+}
diff --git a/scalaplugin/src/test/resource/better-files/project/build.properties b/scalaplugin/src/test/resource/better-files/project/build.properties
new file mode 100644
index 00000000..74e2336b
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/project/build.properties
@@ -0,0 +1,2 @@
+# This can only be upto version supported by CircleCI. See: https://circleci.com/docs/1.0/language-scala/
+sbt.version=0.13.9
diff --git a/scalaplugin/src/test/resource/better-files/project/plugins.sbt b/scalaplugin/src/test/resource/better-files/project/plugins.sbt
new file mode 100644
index 00000000..dba2a1da
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/project/plugins.sbt
@@ -0,0 +1,9 @@
+addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.2")
+addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0")
+addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0")
+addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.1.9")
+addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.5.4")
+addSbtPlugin("com.updateimpact" % "updateimpact-sbt-plugin" % "2.1.1")
+addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "0.5.1")
+addSbtPlugin("com.codacy" % "sbt-codacy-coverage" % "1.3.8")
diff --git a/scalaplugin/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala b/scalaplugin/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala
new file mode 100644
index 00000000..d60487d9
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala
@@ -0,0 +1,24 @@
+package better.files
+
+import better.files.Scanner.Read
+
+import shapeless._
+
+import scala.util.Try
+
+object ShapelessScanner {
+ implicit val hNilScannable: Scannable[HNil] =
+ Scannable(_ => HNil)
+
+ implicit def hListScannable[H, T <: HList](implicit h: Lazy[Scannable[H]], t: Scannable[T]): Scannable[H :: T] =
+ Scannable(s => h.value(s) :: t(s))
+
+ implicit def genericScannable[A, R](implicit gen: Generic.Aux[A, R], reprScannable: Lazy[Scannable[R]]): Scannable[A] =
+ Scannable(s => gen.from(reprScannable.value(s)))
+
+ implicit val cnilReader: Read[CNil] =
+ Read(s => throw new RuntimeException(s"Could not read $s into this coproduct"))
+
+ implicit def coproductReader[H, T <: Coproduct](implicit h: Read[H], t: Read[T]): Read[H :+: T] =
+ Read(s => Try(Inl(h(s))).getOrElse(Inr(t(s))))
+}
diff --git a/scalaplugin/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala b/scalaplugin/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala
new file mode 100644
index 00000000..34557261
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala
@@ -0,0 +1,32 @@
+package better.files
+
+import shapeless._
+
+class ShapelessScannerSpec extends CommonSpec {
+ import ShapelessScanner._
+
+ val text = """
+ 12 Bob True
+ 13 Mary False
+ 26 Rick True
+ """
+
+ "Shapeless Scanner" should "parse HList" in {
+ val in = Scanner(text)
+
+ type Row = Int :: String :: Boolean :: HNil
+ val out = Seq.fill(3)(in.next[Row])
+ assert(out == Seq(
+ 12 :: "Bob" :: true :: HNil,
+ 13 :: "Mary" :: false :: HNil,
+ 26 :: "Rick" :: true :: HNil
+ ))
+ }
+
+ "Shapeless Scanner" should "parse case class" in {
+ val in = Scanner(text)
+
+ case class Person(id: Int, name: String, isMale: Boolean)
+ assert(in.next[Iterator[Person]].map(_.id).sum == 51)
+ }
+}
diff --git a/scalaplugin/src/test/resource/better-files/site/index.html b/scalaplugin/src/test/resource/better-files/site/index.html
new file mode 100644
index 00000000..29931115
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/site/index.html
@@ -0,0 +1,16 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8">
+ <title>Better Files</title>
+ <script language="JavaScript">
+ function doRedirect() {
+ window.location.replace("latest/api/better/files/File.html");
+ }
+ doRedirect();
+ </script>
+ </head>
+ <body>
+ <a href="latest/api/better/files/File.html">ScalaDoc</a>
+ </body>
+</html>
diff --git a/scalaplugin/src/test/resource/better-files/site/tech_talk_preview.png b/scalaplugin/src/test/resource/better-files/site/tech_talk_preview.png
new file mode 100644
index 00000000..9f9f7599
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/site/tech_talk_preview.png
Binary files differ
diff --git a/scalaplugin/src/test/resource/better-files/version.sbt b/scalaplugin/src/test/resource/better-files/version.sbt
new file mode 100644
index 00000000..0750fecd
--- /dev/null
+++ b/scalaplugin/src/test/resource/better-files/version.sbt
@@ -0,0 +1 @@
+version in ThisBuild := "3.2.1-SNAPSHOT" \ No newline at end of file
diff --git a/scalaplugin/src/test/scala/mill/scalaplugin/BetterFilesTests.scala b/scalaplugin/src/test/scala/mill/scalaplugin/BetterFilesTests.scala
new file mode 100644
index 00000000..f2f3a832
--- /dev/null
+++ b/scalaplugin/src/test/scala/mill/scalaplugin/BetterFilesTests.scala
@@ -0,0 +1,115 @@
+package mill.scalaplugin
+
+import ammonite.ops.ImplicitWd._
+import ammonite.ops._
+import mill.define.{Cross,Task}
+import mill.discover.Discovered
+import mill.eval.Result
+import utest._
+import mill.util.JsonFormatters._
+
+object BetterFilesBuild{
+ trait BetterFilesModule extends ScalaModule{ outer =>
+ def scalaVersion = "2.12.4"
+ override def sources = basePath/'src/'main/'scala
+ override def scalacOptions = Seq(
+ "-deprecation", // Emit warning and location for usages of deprecated APIs.
+ "-encoding", "utf-8", // Specify character encoding used by source files.
+ "-explaintypes", // Explain type errors in more detail.
+ "-feature", // Emit warning and location for usages of features that should be imported explicitly.
+ "-language:existentials", // Existential types (besides wildcard types) can be written and inferred
+ "-language:experimental.macros", // Allow macro definition (besides implementation and application)
+ "-language:higherKinds", // Allow higher-kinded types
+ "-language:implicitConversions", // Allow definition of implicit functions called views
+ "-unchecked", // Enable additional warnings where generated code depends on assumptions.
+ "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access.
+ "-Xfatal-warnings", // Fail the compilation if there are any warnings.
+ "-Xfuture", // Turn on future language features.
+ "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
+ "-Xlint:by-name-right-associative", // By-name parameter of right associative operator.
+ "-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error.
+ "-Xlint:delayedinit-select", // Selecting member of DelayedInit.
+ "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
+ "-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
+ "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
+ "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
+ "-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
+ "-Xlint:nullary-unit", // Warn when nullary methods return Unit.
+ "-Xlint:option-implicit", // Option.apply used implicit view.
+ "-Xlint:package-object-classes", // Class or object defined in package object.
+ "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
+ "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
+ "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
+ "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
+ "-Xlint:unsound-match", // Pattern match may not be typesafe.
+ "-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.
+ "-Ypartial-unification", // Enable partial unification in type constructor inference
+ "-Ywarn-dead-code", // Warn when dead code is identified.
+ "-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined.
+ "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
+ "-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`.
+ "-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
+ "-Ywarn-nullary-unit", // Warn when nullary methods return Unit.
+ "-Ywarn-numeric-widen", // Warn when numerics are widened.
+ "-Ywarn-unused:implicits", // Warn if an implicit parameter is unused.
+ "-Ywarn-unused:imports", // Warn if an import selector is not referenced.
+ "-Ywarn-unused:locals", // Warn if a local definition is unused.
+ "-Ywarn-unused:params", // Warn if a value parameter is unused.
+ "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused.
+ "-Ywarn-unused:privates", // Warn if a private member is unused.
+ "-Ywarn-value-discard" // Warn when non-Unit expression results are unused.
+ )
+ override def javacOptions = Seq("-source", "1.8", "-target", "1.8", "-Xlint")
+ object test extends this.Tests{
+ override def projectDeps =
+ if (this == Core.test) Seq(Core)
+ else Seq(outer, Core.test)
+ def basePath = outer.basePath
+ override def ivyDeps = Seq(Dep("org.scalatest", "scalatest", "3.0.4"))
+ override def sources = basePath/'src/'test/'scala
+ def testFramework = "org.scalatest.tools.Framework"
+ }
+ }
+ object Core extends BetterFilesModule{
+ def basePath = BetterFilesTests.srcPath/"core"
+ }
+ object Akka extends BetterFilesModule{
+ override def projectDeps = Seq(Core)
+ def basePath = BetterFilesTests.srcPath/"akka"
+ override def ivyDeps = Seq(Dep("com.typesafe.akka", "akka-actor", "2.5.6"))
+ }
+ object ShapelessScanner extends BetterFilesModule{
+ override def projectDeps = Seq(Core)
+ def basePath = BetterFilesTests.srcPath/"shapeless"
+ override def ivyDeps = Seq(Dep("com.chuusai", "shapeless", "2.3.2"))
+ }
+ object Benchmarks extends BetterFilesModule{
+ override def projectDeps = Seq(Core)
+ def basePath = BetterFilesTests.srcPath/"benchmarks"
+ override def ivyDeps = Seq(
+ Dep.Java("commons-io", "commons-io", "2.5")
+ // "fastjavaio" % "fastjavaio" % "1.0" from "https://github.com/williamfiset/FastJavaIO/releases/download/v1.0/fastjavaio.jar"
+ )
+ }
+}
+object BetterFilesTests extends TestSuite{
+ val workspacePath = pwd / 'target / 'workspace / "better-files"
+ val srcPath = pwd / 'scalaplugin / 'src / 'test / 'resource / "better-files"
+ val tests = Tests{
+ rm(workspacePath)
+ mkdir(workspacePath/up)
+ cp(srcPath, workspacePath)
+ val mapping = Discovered.mapping(BetterFilesBuild)
+ def eval[T](t: Task[T]) = TestEvaluator.eval(mapping, workspacePath)(t)
+
+ 'test - {
+
+ val Right(_) = eval(BetterFilesBuild.Core.test.test())
+ val Right(_) = eval(BetterFilesBuild.Akka.test.test())
+ val Right(_) = eval(BetterFilesBuild.ShapelessScanner.test.test())
+ // Doesn't work yet, need to support curling down a jar and caching it
+ val Left(_) = eval(BetterFilesBuild.Benchmarks.test.test())
+ }
+
+ }
+}