From cf5fb5fdfd477e0bb4ffa0e4fec3a8ec01bf5cf1 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Sun, 31 Dec 2017 22:56:45 -0800 Subject: Split Acyclic/Jawn/BetterFiles tests into their own `integration/` test suite. Those tests now download a snapshot of the relevant git repo rather than vendoring the files, and use a bare `build.sc` instead of having the build object be included in the test classpath. Tests pass using `sbt integration/test`, but `mill integration.test` still doesn't work --- scalalib/src/main/scala/mill/scalalib/Lib.scala | 2 +- scalalib/src/main/scala/mill/scalalib/Module.scala | 12 +- .../src/main/scala/mill/scalalib/TestRunner.scala | 5 +- scalalib/src/test/resource/acyclic/build.sbt | 49 - .../test/resource/acyclic/project/build.properties | 1 - .../src/test/resource/acyclic/project/build.sbt | 2 - .../acyclic/src/main/resources/scalac-plugin.xml | 4 - .../acyclic/src/main/scala/acyclic/package.scala | 23 - .../acyclic/plugin/DependencyExtraction.scala | 100 -- .../main/scala/acyclic/plugin/GraphAnalysis.scala | 103 -- .../src/main/scala/acyclic/plugin/Plugin.scala | 26 - .../main/scala/acyclic/plugin/PluginPhase.scala | 180 --- .../src/test/resources/fail/cyclicgraph/A.scala | 6 - .../src/test/resources/fail/cyclicgraph/B.scala | 6 - .../src/test/resources/fail/cyclicgraph/C.scala | 6 - .../src/test/resources/fail/cyclicgraph/D.scala | 7 - .../src/test/resources/fail/cyclicgraph/E.scala | 7 - .../test/resources/fail/cyclicpackage/a/A1.scala | 7 - .../test/resources/fail/cyclicpackage/a/A2.scala | 4 - .../resources/fail/cyclicpackage/a/package.scala | 5 - .../test/resources/fail/cyclicpackage/b/B1.scala | 3 - .../test/resources/fail/cyclicpackage/b/B2.scala | 5 - .../resources/fail/cyclicpackage/b/package.scala | 5 - .../test/resources/fail/halfpackagecycle/A.scala | 5 - .../test/resources/fail/halfpackagecycle/B.scala | 3 - .../resources/fail/halfpackagecycle/c/C1.scala | 3 - .../resources/fail/halfpackagecycle/c/C2.scala | 6 - .../fail/halfpackagecycle/c/package.scala | 5 - .../src/test/resources/fail/indirect/A.scala | 7 - .../src/test/resources/fail/indirect/B.scala | 3 - .../src/test/resources/fail/indirect/C.scala | 5 - .../acyclic/src/test/resources/fail/simple/A.scala | 7 - .../acyclic/src/test/resources/fail/simple/B.scala | 6 - .../src/test/resources/force/simple/A.scala | 7 - .../src/test/resources/force/simple/B.scala | 6 - .../acyclic/src/test/resources/force/skip/A.scala | 7 - .../acyclic/src/test/resources/force/skip/B.scala | 6 - .../test/resources/success/cyclicunmarked/A.scala | 5 - .../test/resources/success/cyclicunmarked/B.scala | 6 - .../acyclic/src/test/resources/success/dag/A.scala | 4 - .../acyclic/src/test/resources/success/dag/B.scala | 5 - .../acyclic/src/test/resources/success/dag/C.scala | 3 - .../acyclic/src/test/resources/success/dag/D.scala | 6 - .../acyclic/src/test/resources/success/dag/E.scala | 6 - .../src/test/resources/success/java/SomeJava.java | 4 - .../resources/success/pkg/halfacyclic/a/A1.scala | 6 - .../resources/success/pkg/halfacyclic/a/A2.scala | 5 - .../success/pkg/halfacyclic/a/package.scala | 5 - .../resources/success/pkg/halfacyclic/b/B1.scala | 5 - .../resources/success/pkg/halfacyclic/b/B2.scala | 6 - .../resources/success/pkg/innercycle/a/A1.scala | 6 - .../resources/success/pkg/innercycle/a/A2.scala | 6 - .../success/pkg/innercycle/a/package.scala | 6 - .../resources/success/pkg/mutualcyclic/a/A1.scala | 7 - .../resources/success/pkg/mutualcyclic/a/A2.scala | 6 - .../resources/success/pkg/mutualcyclic/b/B1.scala | 6 - .../resources/success/pkg/mutualcyclic/b/B2.scala | 7 - .../resources/success/pkg/single/pkg/package.scala | 5 - .../src/test/resources/success/simple/A.scala | 4 - .../src/test/resources/success/simple/B.scala | 5 - .../src/test/scala/acyclic/CycleTests.scala | 68 -- .../acyclic/src/test/scala/acyclic/TestUtils.scala | 92 -- scalalib/src/test/resource/better-files/.gitignore | 206 ---- scalalib/src/test/resource/better-files/CHANGES.md | 73 -- scalalib/src/test/resource/better-files/LICENSE | 21 - scalalib/src/test/resource/better-files/README.md | 637 ---------- .../src/test/resource/better-files/akka/README.md | 394 ------ .../src/main/scala/better/files/FileWatcher.scala | 67 -- .../test/scala/better/files/FileWatcherSpec.scala | 101 -- .../resource/better-files/benchmarks/README.md | 24 - .../main/java/better/files/ArrayBufferScanner.java | 78 -- .../src/main/scala/better/files/Scanners.scala | 158 --- .../src/test/scala/better/files/Benchmark.scala | 10 - .../scala/better/files/EncodingBenchmark.scala | 39 - .../test/scala/better/files/ScannerBenchmark.scala | 66 - scalalib/src/test/resource/better-files/build.sbt | 163 --- scalalib/src/test/resource/better-files/circle.yml | 21 - .../core/src/main/scala/better/files/Dsl.scala | 155 --- .../core/src/main/scala/better/files/File.scala | 1257 -------------------- .../src/main/scala/better/files/FileMonitor.scala | 72 -- .../src/main/scala/better/files/Implicits.scala | 324 ----- .../main/scala/better/files/ManagedResource.scala | 91 -- .../scala/better/files/ReaderInputStream.scala | 83 -- .../core/src/main/scala/better/files/Scanner.scala | 183 --- .../main/scala/better/files/TeeOutputStream.scala | 23 - .../main/scala/better/files/UnicodeCharset.scala | 100 -- .../scala/better/files/WriterOutputStream.scala | 74 -- .../core/src/main/scala/better/files/package.scala | 66 - .../src/test/scala/better/files/CommonSpec.scala | 15 - .../test/scala/better/files/FileMonitorSpec.scala | 61 - .../src/test/scala/better/files/FileSpec.scala | 549 --------- .../src/test/scala/better/files/GlobSpec.scala | 360 ------ .../scala/better/files/ManagedResourceSpec.scala | 250 ---- .../src/test/scala/better/files/ScannerSpec.scala | 79 -- .../better-files/project/Dependencies.scala | 15 - .../resource/better-files/project/build.properties | 2 - .../test/resource/better-files/project/plugins.sbt | 9 - .../main/scala/better/files/ShapelessScanner.scala | 24 - .../scala/better/files/ShapelessScannerSpec.scala | 32 - .../src/test/resource/better-files/site/index.html | 16 - .../better-files/site/tech_talk_preview.png | Bin 60942 -> 0 bytes .../src/test/resource/better-files/version.sbt | 1 - .../resource/hello-world/src/main/scala/Main.scala | 2 +- scalalib/src/test/resource/jawn/.gitignore | 20 - scalalib/src/test/resource/jawn/.travis.yml | 6 - scalalib/src/test/resource/jawn/README.md | 427 ------- .../jawn/ast/src/main/scala/jawn/ast/JParser.scala | 35 - .../jawn/ast/src/main/scala/jawn/ast/JValue.scala | 314 ----- .../ast/src/main/scala/jawn/ast/JawnFacade.scala | 51 - .../ast/src/main/scala/jawn/ast/Renderer.scala | 101 -- .../ast/src/test/scala/jawn/ArbitraryUtil.scala | 49 - .../jawn/ast/src/test/scala/jawn/AstTest.scala | 79 -- .../jawn/ast/src/test/scala/jawn/ParseCheck.scala | 169 --- .../src/test/resource/jawn/benchmark/build.sbt | 21 - .../src/main/scala/jawn/JmhBenchmarks.scala | 120 -- .../benchmark/src/main/scala/jawn/Parboiled.scala | 105 -- .../src/main/scala/jawn/ParseLongBench.scala | 133 --- scalalib/src/test/resource/jawn/build.sbt | 162 --- .../jawn/parser/src/main/resources/utf8.json | 7 - .../parser/src/main/scala/jawn/AsyncParser.scala | 319 ----- .../src/main/scala/jawn/ByteBasedParser.scala | 104 -- .../src/main/scala/jawn/ByteBufferParser.scala | 42 - .../parser/src/main/scala/jawn/ChannelParser.scala | 164 --- .../src/main/scala/jawn/CharBasedParser.scala | 98 -- .../parser/src/main/scala/jawn/CharBuilder.scala | 56 - .../src/main/scala/jawn/CharSequenceParser.scala | 18 - .../jawn/parser/src/main/scala/jawn/Facade.scala | 34 - .../parser/src/main/scala/jawn/MutableFacade.scala | 35 - .../parser/src/main/scala/jawn/NullFacade.scala | 30 - .../jawn/parser/src/main/scala/jawn/Parser.scala | 507 -------- .../parser/src/main/scala/jawn/SimpleFacade.scala | 42 - .../parser/src/main/scala/jawn/StringParser.scala | 25 - .../parser/src/main/scala/jawn/SupportParser.scala | 31 - .../parser/src/main/scala/jawn/SyncParser.scala | 37 - .../jawn/parser/src/main/scala/jawn/Syntax.scala | 27 - .../parser/src/test/scala/jawn/ChannelSpec.scala | 25 - .../src/test/scala/jawn/CharBuilderSpec.scala | 23 - .../src/test/scala/jawn/JNumIndexCheck.scala | 81 -- .../parser/src/test/scala/jawn/SyntaxCheck.scala | 131 -- .../jawn/parser/src/test/scala/jawn/TestUtil.scala | 18 - .../test/resource/jawn/project/ReleaseHelper.scala | 34 - .../test/resource/jawn/project/build.properties | 1 - .../src/test/resource/jawn/project/plugins.sbt | 6 - scalalib/src/test/resource/jawn/randjson.py | 78 -- scalalib/src/test/resource/jawn/randjson2.py | 53 - .../support/argonaut/src/main/scala/Parser.scala | 45 - .../argonaut/src/test/scala/ParserSpec.scala | 41 - .../support/json4s/src/main/scala/Parser.scala | 59 - .../jawn/support/play/src/main/scala/Parser.scala | 20 - .../support/rojoma-v3/src/main/scala/Parser.scala | 18 - .../support/rojoma/src/main/scala/Parser.scala | 18 - .../jawn/support/spray/src/main/scala/Parser.scala | 17 - .../src/main/scala/jawn/util/InvalidLong.scala | 7 - .../jawn/util/src/main/scala/jawn/util/Slice.scala | 95 -- .../util/src/main/scala/jawn/util/package.scala | 96 -- .../src/test/scala/jawn/util/ParseLongCheck.scala | 72 -- .../util/src/test/scala/jawn/util/SliceCheck.scala | 131 -- scalalib/src/test/resource/jawn/version.sbt | 1 - .../resolve-deps/src/main/scala/Main.scala | 3 - .../test/scala/mill/scalalib/AcyclicTests.scala | 78 -- .../scala/mill/scalalib/BetterFilesTests.scala | 111 -- .../test/scala/mill/scalalib/HelloWorldTests.scala | 8 +- .../src/test/scala/mill/scalalib/JawnTests.scala | 92 -- 163 files changed, 15 insertions(+), 11544 deletions(-) delete mode 100644 scalalib/src/test/resource/acyclic/build.sbt delete mode 100644 scalalib/src/test/resource/acyclic/project/build.properties delete mode 100644 scalalib/src/test/resource/acyclic/project/build.sbt delete mode 100644 scalalib/src/test/resource/acyclic/src/main/resources/scalac-plugin.xml delete mode 100644 scalalib/src/test/resource/acyclic/src/main/scala/acyclic/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/DependencyExtraction.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/GraphAnalysis.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/Plugin.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/PluginPhase.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/C.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/D.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/E.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/C.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/force/simple/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/force/simple/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/force/skip/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/force/skip/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/dag/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/dag/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/dag/C.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/dag/D.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/dag/E.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/java/SomeJava.java delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B1.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B2.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/single/pkg/package.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/simple/A.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/resources/success/simple/B.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/scala/acyclic/CycleTests.scala delete mode 100644 scalalib/src/test/resource/acyclic/src/test/scala/acyclic/TestUtils.scala delete mode 100644 scalalib/src/test/resource/better-files/.gitignore delete mode 100644 scalalib/src/test/resource/better-files/CHANGES.md delete mode 100644 scalalib/src/test/resource/better-files/LICENSE delete mode 100644 scalalib/src/test/resource/better-files/README.md delete mode 100644 scalalib/src/test/resource/better-files/akka/README.md delete mode 100644 scalalib/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala delete mode 100644 scalalib/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/benchmarks/README.md delete mode 100644 scalalib/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java delete mode 100644 scalalib/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala delete mode 100644 scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala delete mode 100644 scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala delete mode 100644 scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala delete mode 100644 scalalib/src/test/resource/better-files/build.sbt delete mode 100644 scalalib/src/test/resource/better-files/circle.yml delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/project/Dependencies.scala delete mode 100644 scalalib/src/test/resource/better-files/project/build.properties delete mode 100644 scalalib/src/test/resource/better-files/project/plugins.sbt delete mode 100644 scalalib/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala delete mode 100644 scalalib/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala delete mode 100644 scalalib/src/test/resource/better-files/site/index.html delete mode 100644 scalalib/src/test/resource/better-files/site/tech_talk_preview.png delete mode 100644 scalalib/src/test/resource/better-files/version.sbt delete mode 100644 scalalib/src/test/resource/jawn/.gitignore delete mode 100644 scalalib/src/test/resource/jawn/.travis.yml delete mode 100644 scalalib/src/test/resource/jawn/README.md delete mode 100644 scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JParser.scala delete mode 100644 scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JValue.scala delete mode 100644 scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JawnFacade.scala delete mode 100644 scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/Renderer.scala delete mode 100644 scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ArbitraryUtil.scala delete mode 100644 scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/AstTest.scala delete mode 100644 scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ParseCheck.scala delete mode 100644 scalalib/src/test/resource/jawn/benchmark/build.sbt delete mode 100644 scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/JmhBenchmarks.scala delete mode 100644 scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/Parboiled.scala delete mode 100644 scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/ParseLongBench.scala delete mode 100644 scalalib/src/test/resource/jawn/build.sbt delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/resources/utf8.json delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/AsyncParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBasedParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBufferParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ChannelParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBasedParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBuilder.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharSequenceParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Facade.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/MutableFacade.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/NullFacade.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SimpleFacade.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/StringParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SupportParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SyncParser.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Syntax.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/ChannelSpec.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/CharBuilderSpec.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/JNumIndexCheck.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/SyntaxCheck.scala delete mode 100644 scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/TestUtil.scala delete mode 100644 scalalib/src/test/resource/jawn/project/ReleaseHelper.scala delete mode 100644 scalalib/src/test/resource/jawn/project/build.properties delete mode 100644 scalalib/src/test/resource/jawn/project/plugins.sbt delete mode 100644 scalalib/src/test/resource/jawn/randjson.py delete mode 100644 scalalib/src/test/resource/jawn/randjson2.py delete mode 100644 scalalib/src/test/resource/jawn/support/argonaut/src/main/scala/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/support/argonaut/src/test/scala/ParserSpec.scala delete mode 100644 scalalib/src/test/resource/jawn/support/json4s/src/main/scala/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/support/play/src/main/scala/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/support/rojoma-v3/src/main/scala/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/support/rojoma/src/main/scala/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/support/spray/src/main/scala/Parser.scala delete mode 100644 scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/InvalidLong.scala delete mode 100644 scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/Slice.scala delete mode 100644 scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/package.scala delete mode 100644 scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/ParseLongCheck.scala delete mode 100644 scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/SliceCheck.scala delete mode 100644 scalalib/src/test/resource/jawn/version.sbt delete mode 100644 scalalib/src/test/resource/resolve-deps/src/main/scala/Main.scala delete mode 100644 scalalib/src/test/scala/mill/scalalib/AcyclicTests.scala delete mode 100644 scalalib/src/test/scala/mill/scalalib/BetterFilesTests.scala delete mode 100644 scalalib/src/test/scala/mill/scalalib/JawnTests.scala (limited to 'scalalib') diff --git a/scalalib/src/main/scala/mill/scalalib/Lib.scala b/scalalib/src/main/scala/mill/scalalib/Lib.scala index 55c28a06..098da786 100644 --- a/scalalib/src/main/scala/mill/scalalib/Lib.scala +++ b/scalalib/src/main/scala/mill/scalalib/Lib.scala @@ -206,7 +206,7 @@ object Lib{ .unsafePerformSync .flatMap(_.toOption) - localArtifacts.map(p => PathRef(Path(p), quick = true)) + localArtifacts.map(p => PathRef(Path(p), quick = true)).filter(_.path.ext == "jar") } } def scalaCompilerIvyDeps(scalaVersion: String) = Seq( diff --git a/scalalib/src/main/scala/mill/scalalib/Module.scala b/scalalib/src/main/scala/mill/scalalib/Module.scala index 1ff4c240..c444aeaa 100644 --- a/scalalib/src/main/scala/mill/scalalib/Module.scala +++ b/scalalib/src/main/scala/mill/scalalib/Module.scala @@ -36,7 +36,7 @@ trait TestModule extends Module with TaskModule { jvmOptions = forkArgs(), options = Seq( testFramework(), - (runDepClasspath().map(_.path) :+ compile().classes.path).mkString(" "), + (runDepClasspath().map(_.path) :+ compile().classes.path).distinct.mkString(" "), Seq(compile().classes.path).mkString(" "), args.mkString(" "), outputPath.toString @@ -210,12 +210,14 @@ trait Module extends mill.Module with TaskModule { outer => ) } def assemblyClasspath = T{ - (runDepClasspath().filter(_.path.ext != "pom") ++ - Seq(resources(), compile().classes)).map(_.path).filter(exists) + runDepClasspath() ++ Seq(resources(), compile().classes) } def assembly = T{ - createAssembly(assemblyClasspath(), prependShellScript = prependShellScript()) + createAssembly( + assemblyClasspath().map(_.path).filter(exists), + prependShellScript = prependShellScript() + ) } def classpath = T{ Seq(resources(), compile().classes) } @@ -263,7 +265,7 @@ trait Module extends mill.Module with TaskModule { outer => def console() = T.command{ interactiveSubprocess( mainClass = "scala.tools.nsc.MainGenericRunner", - classPath = externalCompileDepClasspath().map(_.path) :+ compile().classes.path, + classPath = assemblyClasspath().map(_.path), options = Seq("-usejavacp") ) } diff --git a/scalalib/src/main/scala/mill/scalalib/TestRunner.scala b/scalalib/src/main/scala/mill/scalalib/TestRunner.scala index d92a9deb..0a3df35e 100644 --- a/scalalib/src/main/scala/mill/scalalib/TestRunner.scala +++ b/scalalib/src/main/scala/mill/scalalib/TestRunner.scala @@ -62,6 +62,7 @@ object TestRunner { args: Seq[String]) (implicit ctx: LogCtx): (String, Seq[Result]) = { val outerClassLoader = getClass.getClassLoader + pprint.log(entireClasspath.map(_.toIO.toURI.toURL).toArray, height=9999) val cl = new URLClassLoader( entireClasspath.map(_.toIO.toURI.toURL).toArray, ClassLoader.getSystemClassLoader().getParent()){ @@ -84,9 +85,7 @@ object TestRunner { val tasks = runner.tasks( for((cls, fingerprint) <- testClasses.toArray) - yield { - new TaskDef(cls.getName.stripSuffix("$"), fingerprint, true, Array()) - } + yield new TaskDef(cls.getName.stripSuffix("$"), fingerprint, true, Array()) ) val events = mutable.Buffer.empty[Event] for(t <- tasks){ diff --git a/scalalib/src/test/resource/acyclic/build.sbt b/scalalib/src/test/resource/acyclic/build.sbt deleted file mode 100644 index 3fd0f8e4..00000000 --- a/scalalib/src/test/resource/acyclic/build.sbt +++ /dev/null @@ -1,49 +0,0 @@ - -organization := "com.lihaoyi" - -name := "acyclic" - -version := "0.1.7" - -scalaVersion := "2.11.8" - -crossScalaVersions := Seq("2.10.6", "2.11.8", "2.12.0") - -resolvers += Resolver.sonatypeRepo("releases") - -libraryDependencies ++= Seq( - "com.lihaoyi" %% "utest" % "0.4.4" % "test", - "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided" -) - -testFrameworks += new TestFramework("utest.runner.Framework") - -unmanagedSourceDirectories in Test <+= baseDirectory(_ / "src" / "test" / "resources") - -// Sonatype -publishArtifact in Test := false - -publishTo <<= version { (v: String) => - Some("releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2") -} - -pomExtra := ( - https://github.com/lihaoyi/acyclic - - - MIT license - http://www.opensource.org/licenses/mit-license.php - - - - git://github.com/lihaoyi/utest.git - scm:git://github.com/lihaoyi/acyclic.git - - - - lihaoyi - Li Haoyi - https://github.com/lihaoyi - - - ) diff --git a/scalalib/src/test/resource/acyclic/project/build.properties b/scalalib/src/test/resource/acyclic/project/build.properties deleted file mode 100644 index 817bc38d..00000000 --- a/scalalib/src/test/resource/acyclic/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=0.13.9 diff --git a/scalalib/src/test/resource/acyclic/project/build.sbt b/scalalib/src/test/resource/acyclic/project/build.sbt deleted file mode 100644 index 7a1f37db..00000000 --- a/scalalib/src/test/resource/acyclic/project/build.sbt +++ /dev/null @@ -1,2 +0,0 @@ - -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") \ No newline at end of file diff --git a/scalalib/src/test/resource/acyclic/src/main/resources/scalac-plugin.xml b/scalalib/src/test/resource/acyclic/src/main/resources/scalac-plugin.xml deleted file mode 100644 index 7fd6e95b..00000000 --- a/scalalib/src/test/resource/acyclic/src/main/resources/scalac-plugin.xml +++ /dev/null @@ -1,4 +0,0 @@ - - acyclic - acyclic.plugin.RuntimePlugin - \ No newline at end of file diff --git a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/package.scala b/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/package.scala deleted file mode 100644 index 0d656160..00000000 --- a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/package.scala +++ /dev/null @@ -1,23 +0,0 @@ -import scala.reflect.internal.annotations.compileTimeOnly -package object acyclic { - /** - * Import this within a file to make Acyclic verify that the file does not - * have any circular dependencies with other files. - */ - @compileTimeOnly("acyclic.file is just a marker and not a real value") - def file = () - - /** - * - */ - @compileTimeOnly("acyclic.file is just a marker and not a real value") - def skipped = () - - /** - * Import this within a package object to make Acyclic verify that the entire - * package does not have any circular dependencies with other files or - * packages. Circular dependencies *within* the package are Ok. - */ - @compileTimeOnly("acyclic.pkg is just a marker and not a real value") - def pkg = () -} diff --git a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/DependencyExtraction.scala b/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/DependencyExtraction.scala deleted file mode 100644 index 46aacc2b..00000000 --- a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/DependencyExtraction.scala +++ /dev/null @@ -1,100 +0,0 @@ -//acyclic -package acyclic.plugin -import acyclic.file -import scala.tools.nsc.Global -object DependencyExtraction{ - def apply(global: Global)(unit: global.CompilationUnit): Seq[(global.Symbol, global.Tree)] = { - import global._ - - class CollectTypeTraverser[T](pf: PartialFunction[Type, T]) extends TypeTraverser { - var collected: List[T] = Nil - def traverse(tpe: Type): Unit = { - if (pf.isDefinedAt(tpe)) - collected = pf(tpe) :: collected - mapOver(tpe) - } - } - - class ExtractDependenciesTraverser extends Traverser { - protected val depBuf = collection.mutable.ArrayBuffer.empty[(Symbol, Tree)] - protected def addDependency(sym: Symbol, tree: Tree): Unit = depBuf += ((sym, tree)) - def dependencies: collection.immutable.Set[(Symbol, Tree)] = { - // convert to immutable set and remove NoSymbol if we have one - depBuf.toSet - } - - } - - class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser { - override def traverse(tree: Tree): Unit = { - tree match { - case i @ Import(expr, selectors) => - - selectors.foreach { - case ImportSelector(nme.WILDCARD, _, null, _) => - // in case of wildcard import we do not rely on any particular name being defined - // on `expr`; all symbols that are being used will get caught through selections - case ImportSelector(name: Name, _, _, _) => - def lookupImported(name: Name) = expr.symbol.info.member(name) - // importing a name means importing both a term and a type (if they exist) - addDependency(lookupImported(name.toTermName), tree) - addDependency(lookupImported(name.toTypeName), tree) - } - case select: Select => - addDependency(select.symbol, tree) - /* - * Idents are used in number of situations: - * - to refer to local variable - * - to refer to a top-level package (other packages are nested selections) - * - to refer to a term defined in the same package as an enclosing class; - * this looks fishy, see this thread: - * https://groups.google.com/d/topic/scala-internals/Ms9WUAtokLo/discussion - */ - case ident: Ident => - addDependency(ident.symbol, tree) - case typeTree: TypeTree => - val typeSymbolCollector = new CollectTypeTraverser({ - case tpe if !tpe.typeSymbol.isPackage => tpe.typeSymbol - }) - typeSymbolCollector.traverse(typeTree.tpe) - val deps = typeSymbolCollector.collected.toSet - deps.foreach(addDependency(_, tree)) - case Template(parents, self, body) => - traverseTrees(body) - case other => () - } - super.traverse(tree) - } - } - - def byMembers(): collection.immutable.Set[(Symbol, Tree)] = { - val traverser = new ExtractDependenciesByMemberRefTraverser - if (!unit.isJava) - traverser.traverse(unit.body) - traverser.dependencies - } - - - class ExtractDependenciesByInheritanceTraverser extends ExtractDependenciesTraverser { - override def traverse(tree: Tree): Unit = tree match { - case Template(parents, self, body) => - // we are using typeSymbol and not typeSymbolDirect because we want - // type aliases to be expanded - val parentTypeSymbols = parents.map(parent => parent.tpe.typeSymbol).toSet - debuglog("Parent type symbols for " + tree.pos + ": " + parentTypeSymbols.map(_.fullName)) - parentTypeSymbols.foreach(addDependency(_, tree)) - traverseTrees(body) - case tree => super.traverse(tree) - } - } - - def byInheritence(): collection.immutable.Set[(Symbol, Tree)] = { - val traverser = new ExtractDependenciesByInheritanceTraverser - if (!unit.isJava) - traverser.traverse(unit.body) - traverser.dependencies - } - - (byMembers() | byInheritence()).toSeq - } -} diff --git a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/GraphAnalysis.scala b/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/GraphAnalysis.scala deleted file mode 100644 index bf72ce39..00000000 --- a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/GraphAnalysis.scala +++ /dev/null @@ -1,103 +0,0 @@ -package acyclic.plugin -import acyclic.file -import scala.tools.nsc.Global -import collection.mutable - -sealed trait Value{ - def pkg: List[String] - def prettyPrint: String -} -object Value{ - case class File(path: String, pkg: List[String] = Nil) extends Value{ - def prettyPrint = s"file $path" - } - case class Pkg(pkg: List[String]) extends Value{ - def prettyPrint = s"package ${pkg.mkString(".")}" - } - object Pkg{ - def apply(s: String): Pkg = apply(s.split('.').toList) - } -} - -trait GraphAnalysis{ - val global: Global - import global._ - - case class Node[+T <: Value](value: T, dependencies: Map[Value, Seq[Tree]]){ - override def toString = s"DepNode(\n $value, \n ${dependencies.keys}\n)" - } - - type DepNode = Node[Value] - type FileNode = Node[Value.File] - type PkgNode = Node[Value.Pkg] - - object DepNode{ - /** - * Does a double Breadth-First-Search to find the shortest cycle starting - * from `from` within the DepNodes in `among`. - */ - def smallestCycle(from: DepNode, among: Seq[DepNode]): Seq[DepNode] = { - val nodeMap = among.map(n => n.value -> n).toMap - val distances = mutable.Map(from -> 0) - val queue = mutable.Queue(from) - while(queue.nonEmpty){ - val next = queue.dequeue() - val children = next.dependencies - .keys - .collect(nodeMap) - .filter(!distances.contains(_)) - - children.foreach(distances(_) = distances(next) + 1) - queue.enqueue(children.toSeq:_*) - } - var route = List(from) - while(route.length == 1 || route.head != from){ - route ::= among.filter(x => x.dependencies.keySet.contains(route.head.value)) - .minBy(distances) - } - route.tail - } - - /** - * Finds the strongly-connected components of the directed DepNode graph - * by finding cycles in a Depth-First manner and collapsing any components - * whose nodes are involved in the cycle. - */ - def stronglyConnectedComponents(nodes: Seq[DepNode]): Seq[Seq[DepNode]] = { - - val nodeMap = nodes.map(n => n.value -> n).toMap - - val components = mutable.Map.empty[DepNode, Int] ++ nodes.zipWithIndex.toMap - val visited = mutable.Set.empty[DepNode] - - nodes.foreach(n => rec(n, Nil)) - - def rec(node: DepNode, path: List[DepNode]): Unit = { - if (path.exists(components(_) == components(node))) { - val cycle = path.reverse - .dropWhile(components(_) != components(node)) - - val involved = cycle.map(components) - val firstIndex = involved.head - for ((n, i) <- components.toSeq){ - if (involved.contains(i)){ - components(n) = firstIndex - } - } - } else if (!visited(node)) { - visited.add(node) - // sketchy sorting to make sure we're doing this deterministically... - for((key, lines) <- node.dependencies.toSeq.sortBy(_._1.toString)){ - rec(nodeMap(key), node :: path) - } - } - } - - components.groupBy{case (node, i) => i} - .toSeq - .sortBy(_._1) - .map(_._2.keys.toSeq) - } - } - -} \ No newline at end of file diff --git a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/Plugin.scala b/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/Plugin.scala deleted file mode 100644 index 257894c9..00000000 --- a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/Plugin.scala +++ /dev/null @@ -1,26 +0,0 @@ -package acyclic.plugin -import acyclic.file -import tools.nsc.Global -import scala.collection.SortedSet - -class RuntimePlugin(global: Global) extends TestPlugin(global) -class TestPlugin(val global: Global, - cycleReporter: Seq[(Value, SortedSet[Int])] => Unit = _ => ()) - extends tools.nsc.plugins.Plugin { - - val name = "acyclic" - - var force = false - // Yeah processOptions is deprecated but keep using it anyway for 2.10.x compatibility - override def processOptions(options: List[String], error: String => Unit): Unit = { - if (options.contains("force")) { - force = true - } - } - val description = "Allows the developer to prohibit inter-file dependencies" - - - val components = List[tools.nsc.plugins.PluginComponent]( - new PluginPhase(this.global, cycleReporter, force) - ) -} diff --git a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/PluginPhase.scala b/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/PluginPhase.scala deleted file mode 100644 index eaee91a7..00000000 --- a/scalalib/src/test/resource/acyclic/src/main/scala/acyclic/plugin/PluginPhase.scala +++ /dev/null @@ -1,180 +0,0 @@ - -package acyclic.plugin -import acyclic.file -import scala.collection.{SortedSet, mutable} -import scala.tools.nsc.{Global, Phase} -import tools.nsc.plugins.PluginComponent - -/** - * - Break dependency graph into strongly connected components - * - Turn acyclic packages into virtual "files" in the dependency graph, as - * aggregates of all the files within them - * - Any strongly connected component which includes an acyclic.file or - * acyclic.pkg is a failure - * - Pick an arbitrary cycle and report it - * - Don't report more than one cycle per file/pkg, to avoid excessive spam - */ -class PluginPhase(val global: Global, - cycleReporter: Seq[(Value, SortedSet[Int])] => Unit, - force: => Boolean) - extends PluginComponent - with GraphAnalysis { t => - - import global._ - - val runsAfter = List("typer") - - override val runsBefore = List("patmat") - - val phaseName = "acyclic" - def pkgName(unit: CompilationUnit) = { - unit.body - .collect{case x: PackageDef => x.pid.toString} - .flatMap(_.split('.')) - } - - def units = global.currentRun - .units - .toSeq - .sortBy(_.source.content.mkString.hashCode()) - - def findAcyclics() = { - val acyclicNodePaths = for { - unit <- units - if unit.body.children.collect{ - case Import(expr, List(sel)) => - expr.symbol.toString == "package acyclic" && sel.name.toString == "file" - }.exists(x => x) - } yield { - Value.File(unit.source.path, pkgName(unit)) - } - val skipNodePaths = for { - unit <- units - if unit.body.children.collect{ - case Import(expr, List(sel)) => - expr.symbol.toString == "package acyclic" && sel.name.toString == "skipped" - }.exists(x => x) - } yield { - Value.File(unit.source.path, pkgName(unit)) - } - - val acyclicPkgNames = for { - unit <- units - pkgObject <- unit.body.collect{case x: ModuleDef if x.name.toString == "package" => x } - if pkgObject.impl.children.collect{case Import(expr, List(sel)) => - expr.symbol.toString == "package acyclic" && sel.name.toString == "pkg" - }.exists(x => x) - } yield { - Value.Pkg( - pkgObject.symbol - .enclosingPackageClass - .fullName - .split('.') - .toList - ) - } - (skipNodePaths, acyclicNodePaths, acyclicPkgNames) - } - - override def newPhase(prev: Phase): Phase = new Phase(prev) { - override def run() { - val unitMap = units.map(u => u.source.path -> u).toMap - val nodes = for (unit <- units) yield { - - val deps = DependencyExtraction(t.global)(unit) - - val connections = for{ - (sym, tree) <- deps - if sym != NoSymbol - if sym.sourceFile != null - if sym.sourceFile.path != unit.source.path - } yield (sym.sourceFile.path, tree) - - Node[Value.File]( - Value.File(unit.source.path, pkgName(unit)), - connections.groupBy(c => Value.File(c._1, pkgName(unitMap(c._1))): Value) - .mapValues(_.map(_._2)) - ) - } - - val nodeMap = nodes.map(n => n.value -> n).toMap - - val (skipNodePaths, acyclicFiles, acyclicPkgs) = findAcyclics() - - val allAcyclics = acyclicFiles ++ acyclicPkgs - - // synthetic nodes for packages, which aggregate the dependencies of - // their contents - val pkgNodes = acyclicPkgs.map{ value => - Node( - value, - nodes.filter(_.value.pkg.startsWith(value.pkg)) - .flatMap(_.dependencies.toSeq) - .groupBy(_._1) - .mapValues(_.flatMap(_._2)) - ) - } - - val linkedNodes: Seq[DepNode] = (nodes ++ pkgNodes).map{ d => - val extraLinks = for{ - (value: Value.File, pos) <- d.dependencies - acyclicPkg <- acyclicPkgs - if nodeMap(value).value.pkg.startsWith(acyclicPkg.pkg) - if !d.value.pkg.startsWith(acyclicPkg.pkg) - } yield (acyclicPkg, pos) - d.copy(dependencies = d.dependencies ++ extraLinks) - } - - // only care about cycles with size > 1 here - val components = DepNode.stronglyConnectedComponents(linkedNodes) - .filter(_.size > 1) - - val usedNodes = mutable.Set.empty[DepNode] - for{ - c <- components - n <- c - if !usedNodes.contains(n) - if (!force && allAcyclics.contains(n.value)) || (force && !skipNodePaths.contains(n.value)) - }{ - val cycle = DepNode.smallestCycle(n, c) - val cycleInfo = - (cycle :+ cycle.head).sliding(2) - .map{ case Seq(a, b) => (a.value, a.dependencies(b.value))} - .toSeq - cycleReporter( - cycleInfo.map{ case (a, b) => a -> b.map(_.pos.line).to[SortedSet]} - ) - - global.error("Unwanted cyclic dependency") - for (Seq((value, locs), (nextValue, _)) <- (cycleInfo :+ cycleInfo.head).sliding(2)){ - global.inform("") - value match{ - case Value.Pkg(pkg) => global.inform(s"package ${pkg.mkString(".")}") - case Value.File(_, _) => - } - - units.find(_.source.path == locs.head.pos.source.path) - .get - .echo(locs.head.pos, "") - - val otherLines = locs.tail - .map(_.pos.line) - .filter(_ != locs.head.pos.line) - - global.inform("symbol: " + locs.head.symbol.toString) - - if (!otherLines.isEmpty){ - global.inform("More dependencies at lines " + otherLines.mkString(" ")) - } - - } - global.inform("") - usedNodes ++= cycle - } - } - - def name: String = "acyclic" - } - - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/A.scala deleted file mode 100644 index a0ff0100..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/A.scala +++ /dev/null @@ -1,6 +0,0 @@ -package fail.cyclicgraph -import acyclic.file - -class A{ - val e = new E -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/B.scala deleted file mode 100644 index d1004f5a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/B.scala +++ /dev/null @@ -1,6 +0,0 @@ -package fail.cyclicgraph -import acyclic.file - -class B { - val a: A = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/C.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/C.scala deleted file mode 100644 index 9aebe3a0..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/C.scala +++ /dev/null @@ -1,6 +0,0 @@ -package fail.cyclicgraph -import acyclic.file - -object C extends A{ - val a: A = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/D.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/D.scala deleted file mode 100644 index 9c148b0a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/D.scala +++ /dev/null @@ -1,7 +0,0 @@ -package fail.cyclicgraph -import acyclic.file - -class D { - val b: A = null - val c = C -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/E.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/E.scala deleted file mode 100644 index 00551a06..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicgraph/E.scala +++ /dev/null @@ -1,7 +0,0 @@ -package fail.cyclicgraph -import acyclic.file - -class E { - val a: A = null - val d = new D -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A1.scala deleted file mode 100644 index 530e7820..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A1.scala +++ /dev/null @@ -1,7 +0,0 @@ -package fail.cyclicpackage -package a -import acyclic.file - -class A1 extends b.B1{ - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A2.scala deleted file mode 100644 index 95606566..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -package fail.cyclicpackage.a -class A2 { - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/package.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/package.scala deleted file mode 100644 index 9ee69111..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/a/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package fail.cyclicpackage - -package object a { - import acyclic.pkg -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B1.scala deleted file mode 100644 index 9b9de725..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B1.scala +++ /dev/null @@ -1,3 +0,0 @@ -package fail.cyclicpackage.b -import acyclic.file -class B1 \ No newline at end of file diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B2.scala deleted file mode 100644 index 87cabd93..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/B2.scala +++ /dev/null @@ -1,5 +0,0 @@ -package fail.cyclicpackage -package b -import acyclic.file - -class B2 extends a.A2 \ No newline at end of file diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/package.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/package.scala deleted file mode 100644 index 5f6d9041..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/cyclicpackage/b/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package fail.cyclicpackage - -package object b { - import acyclic.pkg -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/A.scala deleted file mode 100644 index d8d118b6..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -package fail.halfpackagecycle - -class A { - val thing = c.C1 -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/B.scala deleted file mode 100644 index 114d6197..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -package fail.halfpackagecycle - -class B extends A diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C1.scala deleted file mode 100644 index be4eecf8..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C1.scala +++ /dev/null @@ -1,3 +0,0 @@ -package fail.halfpackagecycle.c - -object C1 diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C2.scala deleted file mode 100644 index be3e0c63..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/C2.scala +++ /dev/null @@ -1,6 +0,0 @@ -package fail.halfpackagecycle -package c - -class C2 { - lazy val b = new B -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/package.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/package.scala deleted file mode 100644 index 295a9e7a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/halfpackagecycle/c/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package fail.halfpackagecycle - -package object c { - import acyclic.pkg -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/A.scala deleted file mode 100644 index ec4fa106..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/A.scala +++ /dev/null @@ -1,7 +0,0 @@ -package fail.indirect -import acyclic.file - -object A -class A { - val b: B = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/B.scala deleted file mode 100644 index f9f8450a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -package fail.indirect - -class B extends C diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/C.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/C.scala deleted file mode 100644 index 986baaf3..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/indirect/C.scala +++ /dev/null @@ -1,5 +0,0 @@ -package fail.indirect - -class C { - val a = A -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/A.scala deleted file mode 100644 index e1f95ae9..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/A.scala +++ /dev/null @@ -1,7 +0,0 @@ -package fail.simple -import acyclic.file - - -class A { - val b: B = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/B.scala deleted file mode 100644 index fa9ee63f..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/fail/simple/B.scala +++ /dev/null @@ -1,6 +0,0 @@ -package fail.simple - -class B { - val a1: A = new A - val a2: A = new A -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/force/simple/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/force/simple/A.scala deleted file mode 100644 index 24a2a633..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/force/simple/A.scala +++ /dev/null @@ -1,7 +0,0 @@ -package force.simple - - - -class A { - val b: B = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/force/simple/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/force/simple/B.scala deleted file mode 100644 index 50c5d305..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/force/simple/B.scala +++ /dev/null @@ -1,6 +0,0 @@ -package force.simple - -class B { - val a1: A = new A - val a2: A = new A -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/force/skip/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/force/skip/A.scala deleted file mode 100644 index 3f2464cd..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/force/skip/A.scala +++ /dev/null @@ -1,7 +0,0 @@ -package force.skip -import acyclic.skipped - - -class A { - val b: B = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/force/skip/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/force/skip/B.scala deleted file mode 100644 index b00c6db2..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/force/skip/B.scala +++ /dev/null @@ -1,6 +0,0 @@ -package force.skip -import acyclic.skipped -class B { - val a1: A = new A - val a2: A = new A -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/A.scala deleted file mode 100644 index 902ee5fe..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.cyclicunmarked - -class A { - val b: B = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/B.scala deleted file mode 100644 index 203707ed..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/cyclicunmarked/B.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.cyclicunmarked - -class B { - val a1: A = new A - val a2: A = new A -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/A.scala deleted file mode 100644 index c9a27490..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -package success.dag - -class A { -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/B.scala deleted file mode 100644 index 3858e677..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/B.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.dag - -class B { - val a: A = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/C.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/C.scala deleted file mode 100644 index c4635adf..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/C.scala +++ /dev/null @@ -1,3 +0,0 @@ -package success.dag - -object C extends A diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/D.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/D.scala deleted file mode 100644 index 3ab67e39..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/D.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.dag - -class D { - val b: A = null - val c = C -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/E.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/E.scala deleted file mode 100644 index 4148d75a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/dag/E.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.dag - -class E { - val a: A = null - val d = new D -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/java/SomeJava.java b/scalalib/src/test/resource/acyclic/src/test/resources/success/java/SomeJava.java deleted file mode 100644 index cad93696..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/java/SomeJava.java +++ /dev/null @@ -1,4 +0,0 @@ - -public interface SomeJava { - -} \ No newline at end of file diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A1.scala deleted file mode 100644 index 3d5bc5b3..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A1.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.halfacyclicpackage -package a - -class A1 extends b.B1{ - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A2.scala deleted file mode 100644 index 88ee4a03..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/A2.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.halfacyclicpackage.a - -class A2 { - - } diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/package.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/package.scala deleted file mode 100644 index 54f98aff..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/a/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.halfacyclicpackage - -package object a { - import acyclic.pkg -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B1.scala deleted file mode 100644 index 074f808a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B1.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.halfacyclicpackage.b - -class B1 { - - } diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B2.scala deleted file mode 100644 index 6e4dfdd5..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/halfacyclic/b/B2.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.halfacyclicpackage -package b - -class B2 extends a.A2{ - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A1.scala deleted file mode 100644 index 583e6c68..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A1.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.pkg.innercycle.a - -class A1 { - val x: A2 = null - def y = p -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A2.scala deleted file mode 100644 index 65f656a4..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/A2.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.pkg.innercycle.a - -class A2 { - val x: A1 = null - def z = p -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/package.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/package.scala deleted file mode 100644 index 165fda66..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/innercycle/a/package.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.pkg.innercycle - -package object a { - val p: A1 with A2 = null - import acyclic.pkg -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A1.scala deleted file mode 100644 index 3158f120..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A1.scala +++ /dev/null @@ -1,7 +0,0 @@ -package success.cyclicpackage -package a - - -class A1 extends b.B1{ - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A2.scala deleted file mode 100644 index 1c36fe2a..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/a/A2.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.cyclicpackage.a - - -class A2 { - - } diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B1.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B1.scala deleted file mode 100644 index 33e10fc1..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B1.scala +++ /dev/null @@ -1,6 +0,0 @@ -package success.cyclicpackage.b - - -class B1 { - - } diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B2.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B2.scala deleted file mode 100644 index 57e324ce..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/mutualcyclic/b/B2.scala +++ /dev/null @@ -1,7 +0,0 @@ -package success.cyclicpackage -package b - - -class B2 extends a.A2{ - -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/single/pkg/package.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/single/pkg/package.scala deleted file mode 100644 index c39b5e62..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/pkg/single/pkg/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.singlepackage - -package object pkg { - import acyclic.pkg -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/simple/A.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/simple/A.scala deleted file mode 100644 index 24b9d0d3..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/simple/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -package success.simple - -class A { -} diff --git a/scalalib/src/test/resource/acyclic/src/test/resources/success/simple/B.scala b/scalalib/src/test/resource/acyclic/src/test/resources/success/simple/B.scala deleted file mode 100644 index b7ca5335..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/resources/success/simple/B.scala +++ /dev/null @@ -1,5 +0,0 @@ -package success.simple - -class B { - val a: A = null -} diff --git a/scalalib/src/test/resource/acyclic/src/test/scala/acyclic/CycleTests.scala b/scalalib/src/test/resource/acyclic/src/test/scala/acyclic/CycleTests.scala deleted file mode 100644 index ff831aad..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/scala/acyclic/CycleTests.scala +++ /dev/null @@ -1,68 +0,0 @@ -package acyclic - -import utest._ -import TestUtils.{make, makeFail} -import scala.tools.nsc.util.ScalaClassLoader.URLClassLoader -import acyclic.plugin.Value.{Pkg, File} -import scala.collection.SortedSet -import acyclic.file - -object CycleTests extends TestSuite{ - - def tests = TestSuite{ - 'fail{ - 'simple-makeFail("fail/simple")(Seq( - File("B.scala") -> SortedSet(4, 5), - File("A.scala") -> SortedSet(6) - )) - - 'indirect-makeFail("fail/indirect")(Seq( - File("A.scala") -> SortedSet(6), - File("B.scala") -> SortedSet(3), - File("C.scala") -> SortedSet(4) - )) - 'cyclicgraph-makeFail("fail/cyclicgraph")( - Seq( - File("A.scala") -> SortedSet(5), - File("E.scala") -> SortedSet(6), - File("D.scala") -> SortedSet(6), - File("C.scala") -> SortedSet(4, 5) - ) - ) - 'cyclicpackage-makeFail("fail/cyclicpackage")( - Seq( - Pkg("fail.cyclicpackage.b") -> SortedSet(5), - Pkg("fail.cyclicpackage.a") -> SortedSet(5) - ) - ) - 'halfpackagecycle-makeFail("fail/halfpackagecycle")(Seq( - File("B.scala") -> SortedSet(3), - File("A.scala") -> SortedSet(4), - Pkg("fail.halfpackagecycle.c") -> SortedSet(5) - )) - } - 'success{ - 'simple-make("success/simple") - 'ignorejava-make("success/java") - 'cyclicunmarked-make("success/cyclicunmarked") - 'dag-make("success/dag") - 'pkg{ - "single" - make("success/pkg/single") - "mutualcyclic" - make("success/pkg/mutualcyclic") - "halfacyclic" - make("success/pkg/halfacyclic") - "innercycle" - make("success/pkg/innercycle") - } - } - 'self-make("../../main/scala", extraIncludes = Nil) - 'force{ - 'fail-makeFail("force/simple", force = true)(Seq( - File("B.scala") -> SortedSet(4, 5), - File("A.scala") -> SortedSet(6) - )) - 'pass-make("force/simple") - 'skip-make("force/skip", force = true) - } - } -} - - diff --git a/scalalib/src/test/resource/acyclic/src/test/scala/acyclic/TestUtils.scala b/scalalib/src/test/resource/acyclic/src/test/scala/acyclic/TestUtils.scala deleted file mode 100644 index 7bff8248..00000000 --- a/scalalib/src/test/resource/acyclic/src/test/scala/acyclic/TestUtils.scala +++ /dev/null @@ -1,92 +0,0 @@ -package acyclic - -import tools.nsc.{Global, Settings} -import tools.nsc.reporters.ConsoleReporter -import tools.nsc.plugins.Plugin - -import java.net.URLClassLoader -import scala.tools.nsc.util.ClassPath -import utest._, asserts._ -import scala.reflect.io.VirtualDirectory -import acyclic.plugin.Value -import scala.collection.SortedSet - -object TestUtils { - def getFilePaths(src: String): List[String] = { - val f = new java.io.File(src) - if (f.isDirectory) f.list.toList.flatMap(x => getFilePaths(src + "/" + x)) - else List(src) - } - - /** - * Attempts to compile a resource folder as a compilation run, in order - * to test whether it succeeds or fails correctly. - */ - def make(path: String, - extraIncludes: Seq[String] = Seq("src/main/scala/acyclic/package.scala"), - force: Boolean = false) = { - val src = "src/test/resources/" + path - val sources = getFilePaths(src) ++ extraIncludes - - val vd = new VirtualDirectory("(memory)", None) - lazy val settings = new Settings - val loader = getClass.getClassLoader.asInstanceOf[URLClassLoader] - val entries = loader.getURLs map(_.getPath) - settings.outputDirs.setSingleOutput(vd) - - // annoyingly, the Scala library is not in our classpath, so we have to add it manually - val sclpath = entries.map( - _.replaceAll("scala-compiler.jar", "scala-library.jar") - ) - - settings.classpath.value = ClassPath.join(entries ++ sclpath : _*) - - if (force) settings.pluginOptions.value = List("acyclic:force") - - var cycles: Option[Seq[Seq[(acyclic.plugin.Value, SortedSet[Int])]]] = None - lazy val compiler = new Global(settings, new ConsoleReporter(settings)){ - override protected def loadRoughPluginsList(): List[Plugin] = { - List(new plugin.TestPlugin(this, foundCycles => cycles = cycles match{ - case None => Some(Seq(foundCycles)) - case Some(oldCycles) => Some(oldCycles :+ foundCycles) - })) - } - } - val run = new compiler.Run() - run.compile(sources) - - if (vd.toList.isEmpty) throw CompilationException(cycles.get) - } - - def makeFail(path: String, force: Boolean = false)(expected: Seq[(Value, SortedSet[Int])]*) = { - def canonicalize(cycle: Seq[(Value, SortedSet[Int])]): Seq[(Value, SortedSet[Int])] = { - val startIndex = cycle.indexOf(cycle.minBy(_._1.toString)) - cycle.toList.drop(startIndex) ++ cycle.toList.take(startIndex) - } - - val ex = intercept[CompilationException]{ make(path, force = force) } - val cycles = ex.cycles - .map(canonicalize) - .map( - _.map{ - case (Value.File(p, pkg), v) => (Value.File(p, Nil), v) - case x => x - } - ) - .toSet - - def expand(v: Value) = v match{ - case Value.File(filePath, pkg) => Value.File("src/test/resources/" + path + "/" + filePath, Nil) - case v => v - } - - val fullExpected = expected.map(_.map(x => x.copy(_1 = expand(x._1)))) - .map(canonicalize) - .toSet - - assert(fullExpected.forall(cycles.contains)) - } - - case class CompilationException(cycles: Seq[Seq[(Value, SortedSet[Int])]]) extends Exception - -} diff --git a/scalalib/src/test/resource/better-files/.gitignore b/scalalib/src/test/resource/better-files/.gitignore deleted file mode 100644 index 6f460f93..00000000 --- a/scalalib/src/test/resource/better-files/.gitignore +++ /dev/null @@ -1,206 +0,0 @@ -# Created by https://www.gitignore.io/api/linux,osx,windows,intellij,eclipse,sbt,scala - -### Eclipse ### - -.metadata -bin/ -tmp/ -*.tmp -*.bak -*.swp -*~.nib -local.properties -.settings/ -.loadpath -.recommenders - -# External tool builders -.externalToolBuilders/ - -# Locally stored "Eclipse launch configurations" -*.launch - -# PyDev specific (Python IDE for Eclipse) -*.pydevproject - -# CDT-specific (C/C++ Development Tooling) -.cproject - -# Java annotation processor (APT) -.factorypath - -# PDT-specific (PHP Development Tools) -.buildpath - -# sbteclipse plugin -.target - -# Tern plugin -.tern-project - -# TeXlipse plugin -.texlipse - -# STS (Spring Tool Suite) -.springBeans - -# Code Recommenders -.recommenders/ - -# Scala IDE specific (Scala & Java development for Eclipse) -.cache-main -.scala_dependencies -.worksheet - -### Eclipse Patch ### -# Eclipse Core -.project - -# JDT-specific (Eclipse Java Development Tools) -.classpath - -### Intellij ### -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 -.idea/ - -# User-specific stuff: -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/dictionaries - -# Sensitive or high-churn files: -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.xml -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml - -# Gradle: -.idea/**/gradle.xml -.idea/**/libraries - -# CMake -cmake-build-debug/ - -# Mongo Explorer plugin: -.idea/**/mongoSettings.xml - -## File-based project format: -*.iws - -## Plugin-specific files: - -# IntelliJ -/out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -### Intellij Patch ### -# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 - -# *.iml -# modules.xml -# .idea/misc.xml -# *.ipr - -# Sonarlint plugin -.idea/sonarlint - -### Linux ### -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - -### OSX ### -*.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -### SBT ### -# Simple Build Tool -# http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control - -dist/* -target/ -lib_managed/ -src_managed/ -project/boot/ -project/plugins/project/ -.history -.cache -.lib/ - -### Scala ### -*.class -*.log - -### Windows ### -# Windows thumbnail cache files -Thumbs.db -ehthumbs.db -ehthumbs_vista.db - -# Folder config file -Desktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msm -*.msp - -# Windows shortcuts -*.lnk - -# End of https://www.gitignore.io/api/linux,osx,windows,intellij,eclipse,sbt,scala diff --git a/scalalib/src/test/resource/better-files/CHANGES.md b/scalalib/src/test/resource/better-files/CHANGES.md deleted file mode 100644 index 4103ef04..00000000 --- a/scalalib/src/test/resource/better-files/CHANGES.md +++ /dev/null @@ -1,73 +0,0 @@ -better-files follows the following `MAJOR.MINOR.PATCH` release conventions: -- **Changes in `PATCH` version**: - - Minor functionality changes (usually bug fixes) - - No breaking public API changes - - New APIs might be added -- **Change in `MINOR` version**: - - In addition to `PATCH` changes - - Minor API shape changes e.g. renaming, deprecations - - Trivial to modify code to address compilation issues -- **Change in `MAJOR` version**: - - In addition to `MINOR` changes - - Significant structural and API changes - ------------ - -## v4.0.0 -* [Issue #129](https://github.com/pathikrit/better-files/issues/129): JSR-203 and JimFS compatibility -* [Issue #88](https://github.com/pathikrit/better-files/issues/88): Strongly typed relative and absolute path APIs -* [Issue #122](https://github.com/pathikrit/better-files/issues/122): Scala Platform Release - Support for Scala 2.13 and 2.11 -* Move Scanner to own module that depends on cats/shapeless -* Remove implicit options from all APIs - -## v3.2.1 -* [Issue #193](https://github.com/pathikrit/better-files/issues/193): Handle fast changing directory watching on Windows -* [Issue #195](https://github.com/pathikrit/better-files/issues/195): Do not swallow `FileAlreadyExistsException` when creating directory or file -* [Add](https://github.com/pathikrit/better-files/commit/00f27867ebd0cddec1ace7835dcc2375869fb3ae) method to check verified file existence (or non-existence) -* [Issue #198](https://github.com/pathikrit/better-files/issues/198): `InputStreamOps#asString` doesn't close the stream on exception -* [PR #199](https://github.com/pathikrit/better-files/pull/199): Utils for Object I/O - -## v3.2.0 -* [Rename](https://github.com/pathikrit/better-files/commit/ec34a6f843fec941b51bdddafc2e07e5bc0e1cbb) PosixFilePermissions.OTHERS* APIs -* [Issue #186](https://github.com/pathikrit/better-files/issues/186): Splitter based Scanner -* [Issue #173](https://github.com/pathikrit/better-files/issues/173): Better ARM handling of fatal errors -* [Issue #182](https://github.com/pathikrit/better-files/issues/182): Move and Copy *into* directory utils -* [Issue #189](https://github.com/pathikrit/better-files/issues/189): Util to read String from an InputStream -* [Issue #187](https://github.com/pathikrit/better-files/issues/187): Readers for `java.time.*` and `java.sql.*` -* [Restore File.usingTemp](https://github.com/pathikrit/better-files/commit/35184a642245db3d1e41fc02c7bfbec0b19a43bb) first introduced in [7c60ca](https://github.com/pathikrit/better-files/commit/d3522e8da63b55c7d3fa14cc9b0b76acd57c60ca) -* [Fix](https://github.com/pathikrit/better-files/pull/184) bug in appendBytes - -## v3.1.0 -* [Issue #140](https://github.com/pathikrit/better-files/issues/140): Batch up events for file monitoring -* [Issue #136](https://github.com/pathikrit/better-files/issues/136): Use execution contexts for file monitoring -* [Issue #152](https://github.com/pathikrit/better-files/issues/152): Streamed unzipping -* [Issue #150](https://github.com/pathikrit/better-files/issues/150): `ManagedResource[File]` for temp files -* [Issue #126](https://github.com/pathikrit/better-files/pull/159): New Typeclassed approach to ARM -* [Issue #160](https://github.com/pathikrit/better-files/issues/160): Ability to convert Reader/Writer to Input/Output streams -* [Issue #77](https://github.com/pathikrit/better-files/issues/77): Better UNIX-y behaviour for `cp` and `mv` DSL utils -* [Issue #169](https://github.com/pathikrit/better-files/issues/169): Support for symbols in file DSL -* [Issue #171](https://github.com/pathikrit/better-files/issues/171): Handle `createDirectories()` on symlinks to existing directories - -## v3.0.0 -* [Issue #9](https://github.com/pathikrit/better-files/issues/9): File resource utils -* [Issue #114](https://github.com/pathikrit/better-files/issues/114): Glob with automatic path -* [Issue #107](https://github.com/pathikrit/better-files/issues/107): Handle Byte-order markers -* [PR #113](https://github.com/pathikrit/better-files/pull/113): File anchor util -* [Issue #105](https://github.com/pathikrit/better-files/issues/105): Remove dependency on scala.io -* [File.usingTemp](https://github.com/pathikrit/better-files/commit/d3522e8da63b55c7d3fa14cc9b0b76acd57c60ca) -* [Optional symbolic operations](https://github.com/pathikrit/better-files/issues/102) -* [PR #100](https://github.com/pathikrit/better-files/pull/100): Fix issue in unzip of parents -* [PR #101](https://github.com/pathikrit/better-files/pull/101): Removed File.Type -* [Issue #96](https://github.com/pathikrit/better-files/issues/96): Teeing outputstreams -* [File.testPermission](https://github.com/pathikrit/better-files/commit/7b175c582643790e4d2fd21552e47cc9c615dfbb) -* [File.nonEmpty](https://github.com/pathikrit/better-files/commit/18c9cd51b7b2e503ff4944050ac5119470869e6e) -* [Update metadata API](https://github.com/pathikrit/better-files/commit/c3d65951d80f09b813e158a9e3a1785c622353b3) -* [Issue #80](https://github.com/pathikrit/better-files/issues/80): Unzip filters -* [PR #107](https://github.com/pathikrit/better-files/pull/127): Java serialization utils - -## v2.17.1 -* [PR #99](https://github.com/pathikrit/better-files/pull/99): Release for Scala 2.12 - -## v2.17.0 -* [PR #78](https://github.com/pathikrit/better-files/pull/78): Change `write(Array[Byte])` to `writeByteArray()`. Same for `append` -* [Issue #76](https://github.com/pathikrit/better-files/issues/76): Move `better.files.Read` typeclass to `better.files.Scanner.Read` diff --git a/scalalib/src/test/resource/better-files/LICENSE b/scalalib/src/test/resource/better-files/LICENSE deleted file mode 100644 index a63964f8..00000000 --- a/scalalib/src/test/resource/better-files/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2017 Pathikrit Bhowmick - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/scalalib/src/test/resource/better-files/README.md b/scalalib/src/test/resource/better-files/README.md deleted file mode 100644 index 9877c3bc..00000000 --- a/scalalib/src/test/resource/better-files/README.md +++ /dev/null @@ -1,637 +0,0 @@ -# better-files [![License][licenseImg]][licenseLink] [![CircleCI][circleCiImg]][circleCiLink] [![Codacy][codacyImg]][codacyLink] - -`better-files` is a [dependency-free](project/Dependencies.scala) *pragmatic* [thin Scala wrapper](core/src/main/scala/better/files/File.scala) around [Java NIO](https://docs.oracle.com/javase/tutorial/essential/io/fileio.html). - -## Talks [![Gitter][gitterImg]][gitterLink] - - [ScalaDays NYC 2016][scalaDaysNyc2016Event] ([slides][scalaDaysNyc2016Slides]) - - - ScalaDays NYC 2016: Introduction to better-files - - - - [ScalaDays Berlin 2016][scalaDaysBerlin2016Event] ([video][scalaDaysBerlin2016Video], [slides][scalaDaysBerlin2016Slides]) - - [Scalæ by the Bay 2016][scalæByTheBay2016Event] ([video][scalæByTheBay2016Video], [slides][scalæByTheBay2016Slides]) - -## Tutorial [![Scaladoc][scaladocImg]][scaladocLink] - 0. [Instantiation](#instantiation) - 0. [Simple I/O](#file-readwrite) - 0. [Streams](#streams) - 0. [Encodings](#encodings) - 0. [Java serialization utils](#java-serialization-utils) - 0. [Java compatibility](#java-interoperability) - 0. [Globbing](#globbing) - 0. [File system operations](#file-system-operations) - 0. [Temporary files](#temporary-files) - 0. [UNIX DSL](#unix-dsl) - 0. [File attributes](#file-attributes) - 0. [File comparison](#file-comparison) - 0. [Zip/Unzip](#zip-apis) - 0. [Automatic Resource Management](#lightweight-arm) - 0. [Scanner](#scanner) - 0. [File Monitoring](#file-monitoring) - 0. [Reactive File Watcher](#akka-file-watcher) - -## sbt [![UpdateImpact][updateImpactImg]][updateImpactLink] -In your `build.sbt`, add this: -```scala -libraryDependencies += "com.github.pathikrit" %% "better-files" % version -``` -To use the [Akka based file monitor](akka), also add this: -```scala -libraryDependencies ++= Seq( - "com.github.pathikrit" %% "better-files-akka" % version, - "com.typesafe.akka" %% "akka-actor" % "2.5.6" -) -``` -Latest `version`: [![Maven][mavenImg]][mavenLink] [![Scaladex][scaladexImg]][scaladexLink] - -Although this library is currently only actively developed for Scala 2.12 and 2.13, -you can find reasonably recent versions of this library for Scala 2.10 and 2.11 [here](https://oss.sonatype.org/#nexus-search;quick~better-files). - -## Tests [![codecov][codecovImg]][codecovLink] -* [FileSpec](core/src/test/scala/better/files/FileSpec.scala) -* [FileWatcherSpec](akka/src/test/scala/better/files/FileWatcherSpec.scala) -* [Benchmarks](benchmarks/) - -[licenseImg]: https://img.shields.io/github/license/pathikrit/better-files.svg -[licenseImg2]: https://img.shields.io/:license-mit-blue.svg -[licenseLink]: LICENSE - -[circleCiImg]: https://img.shields.io/circleci/project/pathikrit/better-files/master.svg -[circleCiImg2]: https://circleci.com/gh/pathikrit/better-files/tree/master.svg -[circleCiLink]: https://circleci.com/gh/pathikrit/better-files - -[codecovImg]: https://img.shields.io/codecov/c/github/pathikrit/better-files/master.svg -[codecovImg2]: https://codecov.io/github/pathikrit/better-files/coverage.svg?branch=master -[codecovLink]: http://codecov.io/github/pathikrit/better-files?branch=master - -[codacyImg]: https://img.shields.io/codacy/0e2aeb7949bc49e6802afcc43a7a1aa1.svg -[codacyImg2]: https://api.codacy.com/project/badge/grade/0e2aeb7949bc49e6802afcc43a7a1aa1 -[codacyLink]: https://www.codacy.com/app/pathikrit/better-files/dashboard - -[mavenImg]: https://img.shields.io/maven-central/v/com.github.pathikrit/better-files_2.12.svg -[mavenImg2]: https://maven-badges.herokuapp.com/maven-central/com.github.pathikrit/better-files_2.12/badge.svg -[mavenLink]: http://search.maven.org/#search%7Cga%7C1%7Cbetter-files - -[gitterImg]: https://img.shields.io/gitter/room/pathikrit/better-files.svg -[gitterImg2]: https://badges.gitter.im/Join%20Chat.svg -[gitterLink]: https://gitter.im/pathikrit/better-files - -[scaladexImg]: https://index.scala-lang.org/pathikrit/better-files/better-files/latest.svg -[scaladexLink]: https://index.scala-lang.org/pathikrit/better-files - -[scaladocImg]: https://www.javadoc.io/badge/com.github.pathikrit/better-files_2.12.svg?color=blue&label=scaladocs - -[scaladocLink]: http://pathikrit.github.io/better-files/latest/api/better/files/File.html - -[updateImpactImg]: https://app.updateimpact.com/badge/704376701047672832/root.svg?config=compile -[updateImpactLink]: https://app.updateimpact.com/latest/704376701047672832/root - -[scalaDaysNyc2016Event]: http://event.scaladays.org/scaladays-nyc-2016/#!#schedulePopupExtras-7664 -[scalaDaysNyc2016Video]: https://www.youtube.com/watch?v=uaYKkpqs6CE - -[scalaDaysNyc2016VideoPreview]: site/tech_talk_preview.png -[scalaDaysNyc2016Slides]: https://slides.com/pathikrit/better-files/ - -[scalaDaysBerlin2016Event]: http://event.scaladays.org/scaladays-berlin-2016#!#schedulePopupExtras-7668 -[scalaDaysBerlin2016Video]: https://www.youtube.com/watch?v=m2YsD5cgnzI -[scalaDaysBerlin2016Slides]: https://slides.com/pathikrit/better-files/ - -[scalæByTheBay2016Event]: http://sched.co/7iUn -[scalæByTheBay2016Video]: https://www.youtube.com/watch?v=bLiCE6NGjrk&t=251s -[scalæByTheBay2016Slides]: https://slides.com/pathikrit/better-files/ - -------- -### Instantiation -The following are all equivalent: -```scala -import better.files._ -import java.io.{File => JFile} - -val f = File("/User/johndoe/Documents") // using constructor -val f1: File = file"/User/johndoe/Documents" // using string interpolator -val f2: File = "/User/johndoe/Documents".toFile // convert a string path to a file -val f3: File = new JFile("/User/johndoe/Documents").toScala // convert a Java file to Scala -val f4: File = root/"User"/"johndoe"/"Documents" // using root helper to start from root -val f5: File = `~` / "Documents" // also equivalent to `home / "Documents"` -val f6: File = "/User"/"johndoe"/"Documents" // using file separator DSL -val f7: File = "/User"/'johndoe/'Documents // same as above but using Symbols instead of Strings -val f8: File = home/"Documents"/"presentations"/`..` // use `..` to navigate up to parent -``` - -**Note**: Rename the import if you think the usage of the class `File` may confuse your teammates: -```scala -import better.files.{File => ScalaFile, _} -import java.io.File -``` -I personally prefer renaming the Java crap instead: -```scala -import better.files._ -import java.io.{File => JFile} -``` - -### File Read/Write -Dead simple I/O: -```scala -val file = root/"tmp"/"test.txt" -file.overwrite("hello") -file.appendLine().append("world") -assert(file.contentAsString == "hello\nworld") -``` -If you are someone who likes symbols, then the above code can also be written as: -```scala -import better.files.Dsl.SymbolicOperations - -file < "hello" // same as file.overwrite("hello") -file << "world" // same as file.appendLines("world") -assert(file! == "hello\nworld") -``` -Or even, right-associatively: -```scala -import better.files.Dsl.SymbolicOperations - -"hello" `>:` file -"world" >>: file -val bytes: Array[Byte] = file.loadBytes -``` -[Fluent Interface](https://en.wikipedia.org/wiki/Fluent_interface): -```scala - (root/"tmp"/"diary.txt") - .createIfNotExists() - .appendLine() - .appendLines("My name is", "Inigo Montoya") - .moveToDirectory(home/"Documents") - .renameTo("princess_diary.txt") - .changeExtensionTo(".md") - .lines -``` - -### Streams -Various ways to slurp a file without loading the contents into memory: - ```scala -val bytes : Iterator[Byte] = file.bytes -val chars : Iterator[Char] = file.chars -val lines : Iterator[String] = file.lineIterator //file.lines loads all lines in memory -``` -Note: The above APIs can be traversed at most once e.g. `file.bytes` is a `Iterator[Byte]` which only allows `TraversableOnce`. -To traverse it multiple times without creating a new iterator instance, convert it into some other collection e.g. `file.bytes.toStream` - -You can write an `Iterator[Byte]` or an `Iterator[String]` back to a file: -```scala -file.writeBytes(bytes) -file.printLines(lines) -``` - -### Encodings -You can supply your own charset too for anything that does a read/write (it assumes `java.nio.charset.Charset.defaultCharset()` if you don't provide one): -```scala -val content: String = file.contentAsString // default charset - -// custom charset: -import java.nio.charset.Charset -file.contentAsString(charset = Charset.forName("US-ASCII")) - -//or simply using implicit conversion from Strings -file.write("hello world")(charset = "US-ASCII") - ``` - -Note: By default, `better-files` [correctly handles BOMs while decoding](core/src/main/scala/better/files/UnicodeCharset.scala). -If you wish to have the [incorrect JDK behaviour](http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4508058), -you would need to supply Java's UTF-8 charset e.g.: -```scala -file.contentAsString(charset = Charset.forName("UTF-8")) // Default incorrect JDK behaviour for UTF-8 (see: JDK-4508058) -``` - -If you also wish to write BOMs while encoding, you would need to supply it as: -```scala -file.write("hello world")(charset = UnicodeCharset("UTF-8", writeByteOrderMarkers = true)) -``` - -### Java serialization utils -Some common utils to serialize/deserialize using Java's serialization util -```scala -case class Person(name: String, age: Int) -val person = new Person("Chris", 24) - -// Write -file.newOutputStream.buffered.asObjectOutputStream.serialize(obj).flush() - -// Read -val person2 = file.newInputStream.buffered.asObjectInputStream.readObject().asInstanceOf[Person] -assert(person == person2) -``` - -The above can be simply written as: -```scala -val person2: Person = file.writeSerialized(person).readDeserialized[Person] -assert(person == person2) -``` - -### Java interoperability -You can always access the Java I/O classes: -```scala -val file: File = tmp / "hello.txt" -val javaFile : java.io.File = file.toJava -val uri : java.net.URI = file.uri -val url : java.net.URL = file.url -val reader : java.io.BufferedReader = file.newBufferedReader -val outputstream : java.io.OutputStream = file.newOutputStream -val writer : java.io.BufferedWriter = file.newBufferedWriter -val inputstream : java.io.InputStream = file.newInputStream -val path : java.nio.file.Path = file.path -val fs : java.nio.file.FileSystem = file.fileSystem -val channel : java.nio.channel.FileChannel = file.newFileChannel -val ram : java.io.RandomAccessFile = file.newRandomAccess -val fr : java.io.FileReader = file.newFileReader -val fw : java.io.FileWriter = file.newFileWriter(append = true) -val printer : java.io.PrintWriter = file.newPrintWriter -``` -The library also adds some useful [implicits](http://pathikrit.github.io/better-files/latest/api/better/files/Implicits.html) to above classes e.g.: -```scala -file1.reader > file2.writer // pipes a reader to a writer -System.in > file2.out // pipes an inputstream to an outputstream -src.pipeTo(sink) // if you don't like symbols - -val bytes : Iterator[Byte] = inputstream.bytes -val bis : BufferedInputStream = inputstream.buffered -val bos : BufferedOutputStream = outputstream.buffered -val reader : InputStreamReader = inputstream.reader -val writer : OutputStreamWriter = outputstream.writer -val printer : PrintWriter = outputstream.printWriter -val br : BufferedReader = reader.buffered -val bw : BufferedWriter = writer.buffered -val mm : MappedByteBuffer = fileChannel.toMappedByteBuffer -val str : String = inputstream.asString //Read a string from an InputStream -``` -`better-files` also supports [certain conversions that are not supported out of the box by the JDK](https://stackoverflow.com/questions/62241/how-to-convert-a-reader-to-inputstream-and-a-writer-to-outputstream) - -[`tee`](http://stackoverflow.com/questions/7987395/) multiple outputstreams: -```scala -val s3 = s1 tee s2 -s3.printWriter.println(s"Hello world") // gets written to both s1 and s2 -``` - -### Globbing -No need to port [this](http://docs.oracle.com/javase/tutorial/essential/io/find.html) to Scala: -```scala -val dir = "src"/"test" -val matches: Iterator[File] = dir.glob("*.{java,scala}") -// above code is equivalent to: -dir.listRecursively.filter(f => f.extension == Some(".java") || f.extension == Some(".scala")) -``` - -You can even use more advanced regex syntax instead of [glob syntax](http://docs.oracle.com/javase/tutorial/essential/io/fileOps.html#glob): -```scala -val matches = dir.globRegex("^\\w*$".r) //equivalent to dir.glob("^\\w*$")(syntax = File.PathMatcherSyntax.regex) -``` - -By default, glob syntax in `better-files` is [different from](https://github.com/pathikrit/better-files/issues/114) -the default JDK glob behaviour since it always includes path. To use the default behaviour: -```scala -dir.glob("**/*.txt", includePath = false) // JDK default -//OR -dir.glob("*.txt", includePath = true) // better-files default -``` -You can also extend the `File.PathMatcherSyntax` to create your own matchers. - -For custom cases: -```scala -dir.collectChildren(_.isSymbolicLink) // collect all symlinks in a directory -``` -For simpler cases, you can always use `dir.list` or `dir.walk(maxDepth: Int)` - -### File system operations -Utilities to `ls`, `cp`, `rm`, `mv`, `ln`, `md5`, `touch`, `cat` etc: -```scala -file.touch() -file.delete() // unlike the Java API, also works on directories as expected (deletes children recursively) -file.clear() // If directory, deletes all children; if file clears contents -file.renameTo(newName: String) -file.moveTo(destination) -file.moveToDirectory(destination) -file.copyTo(destination) // unlike the default API, also works on directories (copies recursively) -file.copyToDirectory(destination) -file.linkTo(destination) // ln destination file -file.symbolicLinkTo(destination) // ln -s destination file -file.{checksum, md5, sha1, sha256, sha512, digest} // also works for directories -file.setOwner(user: String) // chown user file -file.setGroup(group: String) // chgrp group file -Seq(file1, file2) `>:` file3 // same as cat file1 file2 > file3 (must import import better.files.Dsl.SymbolicOperations) -Seq(file1, file2) >>: file3 // same as cat file1 file2 >> file3 (must import import better.files.Dsl.SymbolicOperations) -file.isReadLocked; file.isWriteLocked; file.isLocked -File.numberOfOpenFileDescriptors // number of open file descriptors -``` -You can also load resources from your classpath using `File.resource` or `File.copyResource`. - -### Temporary files -Utils to create temporary files: -```scala -File.newTemporaryDirectory() -File.newTemporaryFile() -``` -The above APIs allow optional specifications of `prefix`, `suffix` and `parentDir`. -These files are [not deleted automatically on exit by the JVM](http://stackoverflow.com/questions/16691437/when-are-java-temporary-files-deleted) (you have to set `deleteOnExit` which adds to `shutdownHook`). - -A cleaner alternative is to use self-deleting file contexts which deletes the file immediately when done: -```scala -for { - tempFile <- File.temporaryFile() -} doSomething(tempFile) // tempFile is auto deleted at the end of this block - even if an exception happens -``` - -OR equivalently: -```scala -File.usingTemporaryFile() {tempFile => - //do something -} // tempFile is auto deleted at the end of this block - even if an exception happens -``` - -You can make any files temporary (i.e. delete after use) by doing this: -```scala -val foo = File.home / "Downloads" / "foo.txt" - -for { - temp <- foo.toTemporary -} doSomething(temp) // foo is deleted at the end of this block - even if an exception happens -``` - -### UNIX DSL -All the above can also be expressed using [methods](http://pathikrit.github.io/better-files/latest/api/better/files/Dsl$.html) reminiscent of the command line: -```scala -import better.files._ -import better.files.Dsl._ // must import Dsl._ to bring in these utils - -pwd / cwd // current dir -cp(file1, file2) -mv(file1, file2) -rm(file) /*or*/ del(file) -ls(file) /*or*/ dir(file) -ln(file1, file2) // hard link -ln_s(file1, file2) // soft link -cat(file1) -cat(file1) >>: file -touch(file) -mkdir(file) -mkdirs(file) // mkdir -p -chown(owner, file) -chgrp(owner, file) -chmod_+(permission, files) // add permission -chmod_-(permission, files) // remove permission -md5(file); sha1(file); sha256(file); sha512(file) -unzip(zipFile)(targetDir) -zip(file*)(targetZipFile) -``` - -### File attributes -Query various file attributes e.g.: -```scala -file.name // simpler than java.io.File#getName -file.extension -file.contentType -file.lastModifiedTime // returns JSR-310 time -file.owner -file.group -file.isDirectory; file.isSymbolicLink; file.isRegularFile -file.isHidden -file.hide(); file.unhide() -file.isOwnerExecutable; file.isGroupReadable // etc. see file.permissions -file.size // for a directory, computes the directory size -file.posixAttributes; file.dosAttributes // see file.attributes -file.isEmpty // true if file has no content (or no children if directory) or does not exist -file.isParentOf; file.isChildOf; file.isSiblingOf; file.siblings -file("dos:system") = true // set custom meta-data for file (similar to Files.setAttribute) -``` -All the above APIs let you specify the [`LinkOption`](http://docs.oracle.com/javase/8/docs/api/java/nio/file/LinkOption.html) either directly: -```scala -file.isDirectory(LinkOption.NOFOLLOW_LINKS) -``` -Or using the [`File.LinkOptions`](http://pathikrit.github.io/better-files/latest/api/better/files/File$$LinkOptions$.html) helper: -```scala -file.isDirectory(File.LinkOptions.noFollow) -``` - -`chmod`: -```scala -import java.nio.file.attribute.PosixFilePermission -file.addPermission(PosixFilePermission.OWNER_EXECUTE) // chmod +X file -file.removePermission(PosixFilePermission.OWNER_WRITE) // chmod -w file -assert(file.permissionsAsString == "rw-r--r--") - -// The following are all equivalent: -assert(file.permissions contains PosixFilePermission.OWNER_EXECUTE) -assert(file.testPermission(PosixFilePermission.OWNER_EXECUTE)) -assert(file.isOwnerExecutable) -``` - -### File comparison -Use `==` to check for path-based equality and `===` for content-based equality: -```scala -file1 == file2 // equivalent to `file1.isSamePathAs(file2)` -file1 === file2 // equivalent to `file1.isSameContentAs(file2)` (works for regular-files and directories) -file1 != file2 // equivalent to `!file1.isSamePathAs(file2)` -file1 !== file2 // equivalent to `!file1.isSameContentAs(file2)` -``` -There are also various [`Ordering[File]` instances](http://pathikrit.github.io/better-files/latest/api/better/files/File$$Order$.html) included, e.g.: -```scala -val files = myDir.list.toSeq -files.sorted(File.Order.byName) -files.max(File.Order.bySize) -files.min(File.Order.byDepth) -files.max(File.Order.byModificationTime) -files.sorted(File.Order.byDirectoriesFirst) -``` - -### Zip APIs -You don't have to lookup on StackOverflow "[How to zip/unzip in Java/Scala?](http://stackoverflow.com/questions/9324933/)": -```scala -// Unzipping: -val zipFile: File = file"path/to/research.zip" -val research: File = zipFile.unzipTo(destination = home/"Documents"/"research") - -// Zipping: -val zipFile: File = directory.zipTo(destination = home/"Desktop"/"toEmail.zip") - -// Zipping in: -val zipFile = File("countries.zip").zipIn(file"usa.txt", file"russia.txt") - -// Zipping/Unzipping to temporary files/directories: -val someTempZipFile: File = directory.zip() -val someTempDir: File = zipFile.unzip() -assert(directory === someTempDir) - -// Gzip handling: -File("countries.gz").newInputStream.gzipped.lines.take(10).foreach(println) -``` - -### Lightweight ARM -Auto-close Java closeables: -```scala -for { - in <- file1.newInputStream.autoClosed - out <- file2.newOutputStream.autoClosed -} in.pipeTo(out) -// The input and output streams are auto-closed once out of scope -``` -`better-files` provides convenient managed versions of all the Java closeables e.g. instead of writing: -```scala -for { - reader <- file.newBufferedReader.autoClosed -} foo(reader) -``` -You can write: -```scala -for { - reader <- file.bufferedReader // returns ManagedResource[BufferedReader] -} foo(reader) - -// or simply: -file.bufferedReader.foreach(foo) -``` - -You can also define your own custom disposable resources e.g.: -```scala -trait Shutdownable { - def shutdown(): Unit = () -} - -object Shutdownable { - implicit val disposable: Disposable[Shutdownable] = Disposable(_.shutdown()) -} - -val s: Shutdownable = .... - -for { - instance <- new ManagedResource(s) -} doSomething(s) // s is disposed after this -``` - -### Scanner -Although [`java.util.Scanner`](http://docs.oracle.com/javase/8/docs/api/java/util/Scanner.html) has a feature-rich API, it only allows parsing primitives. -It is also [notoriously slow](https://www.cpe.ku.ac.th/~jim/java-io.html) since it uses regexes and does un-Scala things like returns nulls and throws exceptions. - -`better-files` provides a [faster](benchmarks#benchmarks), richer, safer, more idiomatic and compossible [Scala replacement](http://pathikrit.github.io/better-files/latest/api/better/files/Scanner.html) -that [does not use regexes](core/src/main/scala/better/files/Scanner.scala), allows peeking, accessing line numbers, returns `Option`s whenever possible and lets the user mixin custom parsers: -```scala -val data = t1 << s""" - | Hello World - | 1 true 2 3 -""".stripMargin -val scanner: Scanner = data.newScanner() -assert(scanner.next[String] == "Hello") -assert(scanner.lineNumber == 1) -assert(scanner.next[String] == "World") -assert(scanner.next[(Int, Boolean)] == (1, true)) -assert(scanner.tillEndOfLine() == " 2 3") -assert(!scanner.hasNext) -``` -If you are simply interested in tokens, you can use `file.tokens()` - -Writing your own custom scanners: -```scala -sealed trait Animal -case class Dog(name: String) extends Animal -case class Cat(name: String) extends Animal - -implicit val animalParser: Scannable[Animal] = Scannable {scanner => - val name = scanner.next[String] - if (name == "Garfield") Cat(name) else Dog(name) -} - -val scanner = file.newScanner() -println(scanner.next[Animal]) -``` - -The [shapeless-scanner](shapeless/src/main/scala/better/files/ShapelessScanner.scala) module lets you scan [`HList`s](https://github.com/milessabin/shapeless/blob/master/core/src/main/scala/shapeless/hlists.scala): -```scala -val in = Scanner(""" - 12 Bob True - 13 Mary False - 26 Rick True -""") - -import shapeless._ - -type Row = Int :: String :: Boolean :: HNil - -val out = Seq.fill(3)(in.next[Row]) -assert(out == Seq( - 12 :: "Bob" :: true :: HNil, - 13 :: "Mary" :: false :: HNil, - 26 :: "Rick" :: true :: HNil -)) -``` - -[and case-classes](https://meta.plasm.us/posts/2015/11/08/type-classes-and-generic-derivation/): - -```scala -case class Person(id: Int, name: String, isMale: Boolean) -val out2 = Seq.fill(3)(in.next[Person]) -``` - -Simple CSV reader: -```scala -val file = """ - 23,foo - 42,bar -""" -val csvScanner = file.newScanner(StringSpliiter.on(',')) -csvScanner.next[Int] //23 -csvScanner.next[String] //foo -``` - -### File Monitoring -Vanilla Java watchers: -```scala -import java.nio.file.{StandardWatchEventKinds => EventType} -val service: java.nio.file.WatchService = myDir.newWatchService -myDir.register(service, events = Seq(EventType.ENTRY_CREATE, EventType.ENTRY_DELETE)) -``` -The above APIs are [cumbersome to use](https://docs.oracle.com/javase/tutorial/essential/io/notification.html#process) (involves a lot of type-casting and null-checking), -are based on a blocking [polling-based model](http://docs.oracle.com/javase/8/docs/api/java/nio/file/WatchKey.html), -does not easily allow [recursive watching of directories](https://docs.oracle.com/javase/tutorial/displayCode.html?code=https://docs.oracle.com/javase/tutorial/essential/io/examples/WatchDir.java) -and nor does it easily allow [watching regular files](http://stackoverflow.com/questions/16251273/) without writing a lot of Java boilerplate. - -`better-files` abstracts all the above ugliness behind a [simple interface](core/src/main/scala/better/files/File.scala#1100): -```scala -val watcher = new FileMonitor(myDir, recursive = true) { - override def onCreate(file: File, count: Int) = println(s"$file got created") - override def onModify(file: File, count: Int) = println(s"$file got modified $count times") - override def onDelete(file: File, count: Int) = println(s"$file got deleted") -} -watcher.start() -``` -Sometimes, instead of overwriting each of the 3 methods above, it is more convenient to override the dispatcher itself: -```scala -import java.nio.file.{Path, StandardWatchEventKinds => EventType, WatchEvent} - -val watcher = new FileMonitor(myDir, recursive = true) { - override def onEvent(eventType: WatchEvent.Kind[Path], file: File, count: Int) = eventType match { - case EventType.ENTRY_CREATE => println(s"$file got created") - case EventType.ENTRY_MODIFY => println(s"$file got modified $count") - case EventType.ENTRY_DELETE => println(s"$file got deleted") - } -} -``` - -### Akka File Watcher -`better-files` also provides a powerful yet concise [reactive file watcher](akka/src/main/scala/better/files/FileWatcher.scala) -based on [Akka actors](http://doc.akka.io/docs/akka/snapshot/scala/actors.html) that supports dynamic dispatches: - ```scala -import akka.actor.{ActorRef, ActorSystem} -import better.files._, FileWatcher._ - -implicit val system = ActorSystem("mySystem") - -val watcher: ActorRef = (home/"Downloads").newWatcher(recursive = true) - -// register partial function for an event -watcher ! on(EventType.ENTRY_DELETE) { - case file if file.isDirectory => println(s"$file got deleted") -} - -// watch for multiple events -watcher ! when(events = EventType.ENTRY_CREATE, EventType.ENTRY_MODIFY) { - case (EventType.ENTRY_CREATE, file, count) => println(s"$file got created") - case (EventType.ENTRY_MODIFY, file, count) => println(s"$file got modified $count times") -} -``` diff --git a/scalalib/src/test/resource/better-files/akka/README.md b/scalalib/src/test/resource/better-files/akka/README.md deleted file mode 100644 index 391cec2e..00000000 --- a/scalalib/src/test/resource/better-files/akka/README.md +++ /dev/null @@ -1,394 +0,0 @@ -Reproduction of [this Java Advent article](http://www.javaadvent.com/2015/12/reactive-file-system-monitoring-using-akka-actors.html) - ------ - -In this article, we will discuss: - -0. File system monitoring using [Java NIO.2][nio2] -1. Common pitfalls of the default Java library -2. Design a simple thread-based file system monitor -3. Use the above to design a reactive file system monitor using the [actor][akka] [model][actorModel] - -**Note**: Although all the code samples here are in Scala, it can be rewritten in simple Java too. To quickly familiarize yourself with Scala syntax, [here is a very short and nice Scala cheatsheet][cheatsheet]. For a more comprehensive guide to Scala for Java programmers, [consult this][cheatsheet2] (not needed to follow this article). - -For the absolute shortest cheatsheet, the following Java code: - -```java -public void foo(int x, int y) { - int z = x + y - if (z == 1) { - System.out.println(x); - } else { - System.out.println(y); - } -} -``` - -is equivalent to the following Scala code: - -```scala -def foo(x: Int, y: Int): Unit = { - val z: Int = x + y - z match { - case 1 => println(x) - case _ => println(y) - } -} -``` - - -All the code presented here is available under MIT license as part of the [better-files][better-files-watcher] library on [GitHub][better-files]. - ------------ - -Let's say you are tasked to build a cross-platform desktop file-search engine. You quickly realize that after the initial indexing of all the files, you need to also quickly reindex any new files (or directories) that got created or updated. A naive way would be to simply rescan the entire file system every few minutes; but that would be incredibly inefficient since most operating systems expose file system notification APIs that allow the application programmer to register callbacks for changes e.g. [ionotify][ionotify-wiki] in Linux, [FSEvenets][fsevents-wiki] in Mac and [FindFirstChangeNotification][FindFirstChangeNotification] in Windows. - -But now you are stuck dealing with OS-specific APIs! Thankfully, beginning Java SE 7, we have a platform independent abstraction for watching file system changes via the [WatchService API][javadoc-watchservice]. The WatchService API was developed as part of [Java NIO.2][nio2-wiki], under [JSR-51][jsr-51] and here is a "hello world" example of using it to watch a given [Path][javadoc-path]: - -```scala -import java.nio.file._ -import java.nio.file.StandardWatchEventKinds._ -import scala.collection.JavaConversions._ - -def watch(directory: Path): Unit = { - // First create the service - val service: WatchService = directory.getFileSystem.newWatchService() - - // Register the service to the path and also specify which events we want to be notified about - directory.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) - - while (true) { - val key: WatchKey = service.take() // Wait for this key to be signalled - for {event <- key.pollEvents()} { - // event.context() is the path to the file that got changed - event.kind() match { - case ENTRY_CREATE => println(s"${event.context()} got created") - case ENTRY_MODIFY => println(s"${event.context()} got modified") - case ENTRY_DELETE => println(s"${event.context()} got deleted") - case _ => - // This can happen when OS discards or loses an event. - // See: http://docs.oracle.com/javase/8/docs/api/java/nio/file/StandardWatchEventKinds.html#OVERFLOW - println(s"Unknown event $event happened at ${event.context()}") - } - } - key.reset() // Do not forget to do this!! See: http://stackoverflow.com/questions/20180547/ - } -} -``` - -Although the above is a good first attempt, it lacks in several aspects: - -0. **Bad Design**: The above code looks unnatural and you probably had to [look it up on StackOverflow][so-down] to get it right. Can we do better? -2. **Bad Design**: The code does not do a very good job of handling errors. What happens when we encounter a file we could not open? -3. **Gotcha**: The Java API only allows us to watch the directory for changes to its direct children; it [does not recursively watch a directory][so-recursive-watching] for you. -4. **Gotcha**: The Java API [does not allow us to watch a single file][so-only-watch-dirs], only a directory. -5. **Gotcha**: Even if we resolve the aformentioned issues, the Java API [does not automatically start watching a new child file][so-autowatch] or directory created under the root. -6. **Bad Design**: The code as implemented above, exposes a blocking/polling, thread-based model. Can we use a better concurrency abstraction? - ------------ - - -Let's start with each of the above concerns. - -* **A better interface**: Here is what *my ideal* interface would look like: - -```scala -abstract class FileMonitor(root: Path) { - def start(): Unit - def onCreate(path: Path): Unit - def onModify(path: Path): Unit - def onDelete(path: Path): Unit - def stop(): Unit -} -``` - -That way, I can simply write the example code as: - -```scala -val watcher = new FileMonitor(myFile) { - override def onCreate(path: Path) = println(s"$path got created") - override def onModify(path: Path) = println(s"$path got modified") - override def onDelete(path: Path) = println(s"$path got deleted") -} -watcher.start() -``` - -Ok, let's try to adapt the first example using a Java `Thread` so that we can expose "my ideal interface": - -```scala -trait FileMonitor { // My ideal interface - val root: Path // starting file - def start(): Unit // start the monitor - def onCreate(path: Path) = {} // on-create callback - def onModify(path: Path) = {} // on-modify callback - def onDelete(path: Path) = {} // on-delete callback - def onUnknownEvent(event: WatchEvent[_]) = {} // handle lost/discarded events - def onException(e: Throwable) = {} // handle errors e.g. a read error - def stop(): Unit // stop the monitor -} -``` - -And here is a very basic thread-based implementation: - -```scala -class ThreadFileMonitor(val root: Path) extends Thread with FileMonitor { - setDaemon(true) // daemonize this thread - setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler { - override def uncaughtException(thread: Thread, exception: Throwable) = onException(exception) - }) - - val service = root.getFileSystem.newWatchService() - - override def run() = Iterator.continually(service.take()).foreach(process) - - override def interrupt() = { - service.close() - super.interrupt() - } - - override def start() = { - watch(root) - super.start() - } - - protected[this] def watch(file: Path): Unit = { - file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) - } - - protected[this] def process(key: WatchKey) = { - key.pollEvents() foreach { - case event: WatchEvent[Path] => dispatch(event.kind(), event.context()) - case event => onUnknownEvent(event) - } - key.reset() - } - - def dispatch(eventType: WatchEvent.Kind[Path], file: Path): Unit = { - eventType match { - case ENTRY_CREATE => onCreate(file) - case ENTRY_MODIFY => onModify(file) - case ENTRY_DELETE => onDelete(file) - } - } -} -``` - -The above looks much cleaner! Now we can watch files to our heart's content without poring over the details of JavaDocs by simply implementing the `onCreate(path)`, `onModify(path)`, `onDelete(path)` etc. - -* **Exception handling**: This is already done above. `onException` gets called whenever we encounter an exception and the invoker can decide what to do next by implementing it. - -* **Recursive watching**: The Java API **does not allow recursive watching of directories**. We need to modify the `watch(file)` to recursively attach the watcher: - -```scala -def watch(file: Path, recursive: Boolean = true): Unit = { - if (Files.isDirectory(file)) { - file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) - // recursively call watch on children of this file - if (recursive) { - Files.list(file).iterator() foreach {f => watch(f, recursive)} - } - } -} -``` - -* **Watching regular files**: As mentioned before, the Java API **can only watch directories**. One hack we can do to watch single files is to set a watcher on its parent directory and only react if the event trigerred on the file itself. - -```scala -override def start() = { - if (Files.isDirectory(root)) { - watch(root, recursive = true) - } else { - watch(root.getParent, recursive = false) - } - super.start() -} -``` - -And, now in `process(key)`, we make sure we react to either a directory or that file only: - -```scala -def reactTo(target: Path) = Files.isDirectory(root) || (root == target) -``` - -And, we check before `dispatch` now: - -```scala -case event: WatchEvent[Path] => - val target = event.context() - if (reactTo(target)) { - dispatch(event.kind(), target) - } -``` - -* **Auto-watching new items**: The Java API, **does not auto-watch any new sub-files**. We can address this by attaching the watcher ourselves in `process(key)` when an `ENTRY_CREATE` event is fired: - -```scala -if (reactTo(target)) { - if (Files.isDirectory(root) && event.kind() == ENTRY_CREATE) { - watch(root.resolve(target)) - } - dispatch(event.kind(), target) -} -``` - -Putting it all together, we have our final [`FileMonitor.scala`][FileMonitor.scala]: - -```scala -class ThreadFileMonitor(val root: Path) extends Thread with FileMonitor { - setDaemon(true) // daemonize this thread - setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler { - override def uncaughtException(thread: Thread, exception: Throwable) = onException(exception) - }) - - val service = root.getFileSystem.newWatchService() - - override def run() = Iterator.continually(service.take()).foreach(process) - - override def interrupt() = { - service.close() - super.interrupt() - } - - override def start() = { - if (Files.isDirectory(root)) { - watch(root, recursive = true) - } else { - watch(root.getParent, recursive = false) - } - super.start() - } - - protected[this] def watch(file: Path, recursive: Boolean = true): Unit = { - if (Files.isDirectory(file)) { - file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) - if (recursive) { - Files.list(file).iterator() foreach {f => watch(f, recursive)} - } - } - } - - private[this] def reactTo(target: Path) = Files.isDirectory(root) || (root == target) - - protected[this] def process(key: WatchKey) = { - key.pollEvents() foreach { - case event: WatchEvent[Path] => - val target = event.context() - if (reactTo(target)) { - if (Files.isDirectory(root) && event.kind() == ENTRY_CREATE) { - watch(root.resolve(target)) - } - dispatch(event.kind(), target) - } - case event => onUnknownEvent(event) - } - key.reset() - } - - def dispatch(eventType: WatchEvent.Kind[Path], file: Path): Unit = { - eventType match { - case ENTRY_CREATE => onCreate(file) - case ENTRY_MODIFY => onModify(file) - case ENTRY_DELETE => onDelete(file) - } - } -} -``` - ------ -Now, that we have addressed all the gotchas and distanced ourselves from the intricacies of the WatchService API, we are still tightly coupled to the thread-based API. -We will use the above class to expose a different concurrency model, namely, the [actor model][actorModel2] instead to design a reactive, dynamic and resilient file-system watcher using [Akka][akka-docs]. Although the [construction of Akka actors][akka-actors] is beyond the scope of this article, we will present a very simple actor that uses the `ThreadFileMonitor`: - -```scala -import java.nio.file.{Path, WatchEvent} - -import akka.actor._ - -class FileWatcher(file: Path) extends ThreadFileMonitor(file) with Actor { - import FileWatcher._ - - // MultiMap from Events to registered callbacks - protected[this] val callbacks = newMultiMap[Event, Callback] - - // Override the dispatcher from ThreadFileMonitor to inform the actor of a new event - override def dispatch(event: Event, file: Path) = self ! Message.NewEvent(event, file) - - // Override the onException from the ThreadFileMonitor - override def onException(exception: Throwable) = self ! Status.Failure(exception) - - // when actor starts, start the ThreadFileMonitor - override def preStart() = super.start() - - // before actor stops, stop the ThreadFileMonitor - override def postStop() = super.interrupt() - - override def receive = { - case Message.NewEvent(event, target) if callbacks contains event => - callbacks(event) foreach {f => f(event -> target)} - - case Message.RegisterCallback(events, callback) => - events foreach {event => callbacks.addBinding(event, callback)} - - case Message.RemoveCallback(event, callback) => - callbacks.removeBinding(event, callback) - } -} - -object FileWatcher { - type Event = WatchEvent.Kind[Path] - type Callback = PartialFunction[(Event, Path), Unit] - - sealed trait Message - object Message { - case class NewEvent(event: Event, file: Path) extends Message - case class RegisterCallback(events: Seq[Event], callback: Callback) extends Message - case class RemoveCallback(event: Event, callback: Callback) extends Message - } -} -``` - -This allows us to dynamically register and remove callbacks to react to file system events: - -```scala -// initialize the actor instance -val system = ActorSystem("mySystem") -val watcher: ActorRef = system.actorOf(Props(new FileWatcher(Paths.get("/home/pathikrit")))) - -// util to create a RegisterCallback message for the actor -def when(events: Event*)(callback: Callback): Message = { - Message.RegisterCallback(events.distinct, callback) -} - -// send the register callback message for create/modify events -watcher ! when(events = ENTRY_CREATE, ENTRY_MODIFY) { - case (ENTRY_CREATE, file) => println(s"$file got created") - case (ENTRY_MODIFY, file) => println(s"$file got modified") -} -``` - -Full source: [`FileWatcher.scala`][FileWatcher.scala] - ------ - -[actorModel]: https://en.wikipedia.org/wiki/Actor_model -[actorModel2]: http://berb.github.io/diploma-thesis/original/054_actors.html -[akka]: http://akka.io -[akka-actors]: http://doc.akka.io/docs/akka/snapshot/scala/actors.html -[akka-docs]: http://doc.akka.io/docs/akka/2.4.1/java.html -[better-files]: https://github.com/pathikrit/better-files -[better-files-watcher]: https://github.com/pathikrit/better-files#akka-file-watcher -[cheatsheet]: http://learnxinyminutes.com/docs/scala/ -[cheatsheet2]: http://techblog.realestate.com.au/java-to-scala-cheatsheet/ -[FileWatcher.scala]: https://github.com/pathikrit/better-files/blob/2ea6bb694551f1fe6e9ce58dbd1b814391a02e5a/akka/src/main/scala/better/files/FileWatcher.scala -[FileMonitor.scala]: https://github.com/pathikrit/better-files/blob/2ea6bb694551f1fe6e9ce58dbd1b814391a02e5a/core/src/main/scala/better/files/FileMonitor.scala -[FindFirstChangeNotification]: https://msdn.microsoft.com/en-us/library/aa364417(VS.85).aspx -[fsevents-wiki]: https://en.wikipedia.org/wiki/FSEvents -[ionotify-wiki]: https://en.wikipedia.org/wiki/Inotify -[nio2]: https://docs.oracle.com/javase/tutorial/essential/io/fileio.html -[nio2-wiki]: https://en.wikipedia.org/wiki/Non-blocking_I/O_(Java) -[jsr-51]: https://www.jcp.org/en/jsr/detail?id=51 -[javadoc-path]: https://docs.oracle.com/javase/8/docs/api/java/nio/file/Path.html -[javadoc-watchservice]: https://docs.oracle.com/javase/8/docs/api/java/nio/file/WatchService.html -[so-autowatch]: https://github.com/lloydmeta/schwatcher/issues/44 -[so-down]: http://memecrunch.com/meme/YBHZ/stackoverflow-is-down/image.jpg -[so-recursive-watching]: http://stackoverflow.com/questions/18701242/how-to-watch-a-folder-and-subfolders-for-changes -[so-only-watch-dirs]: http://stackoverflow.com/questions/16251273/can-i-watch-for-single-file-change-with-watchservice-not-the-whole-directory diff --git a/scalalib/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala b/scalalib/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala deleted file mode 100644 index 66594d20..00000000 --- a/scalalib/src/test/resource/better-files/akka/src/main/scala/better/files/FileWatcher.scala +++ /dev/null @@ -1,67 +0,0 @@ -package better.files - -import akka.actor._ - -/** - * An actor that can watch a file or a directory - * Instead of directly calling the constructor of this, call file.newWatcher to create the actor - * - * @param file watch this file (or directory) - * @param maxDepth In case of directories, how much depth should we watch - */ -class FileWatcher(file: File, maxDepth: Int) extends Actor { - import FileWatcher._ - - def this(file: File, recursive: Boolean = true) = this(file, if (recursive) Int.MaxValue else 0) - - protected[this] val callbacks = newMultiMap[Event, Callback] - - protected[this] val monitor: File.Monitor = new FileMonitor(file, maxDepth) { - override def onEvent(event: Event, file: File, count: Int) = self ! Message.NewEvent(event, file, count) - override def onException(exception: Throwable) = self ! Status.Failure(exception) - } - - override def preStart() = monitor.start()(executionContext = context.dispatcher) - - override def receive = { - case Message.NewEvent(event, target, count) if callbacks.contains(event) => callbacks(event).foreach(f => repeat(count)(f(event -> target))) - case Message.RegisterCallback(events, callback) => events.foreach(event => callbacks.addBinding(event, callback)) - case Message.RemoveCallback(event, callback) => callbacks.removeBinding(event, callback) - } - - override def postStop() = monitor.stop() -} - -object FileWatcher { - import java.nio.file.{Path, WatchEvent} - - type Event = WatchEvent.Kind[Path] - type Callback = PartialFunction[(Event, File), Unit] - - sealed trait Message - object Message { - case class NewEvent(event: Event, file: File, count: Int) extends Message - case class RegisterCallback(events: Traversable[Event], callback: Callback) extends Message - case class RemoveCallback(event: Event, callback: Callback) extends Message - } - - implicit val disposeActorSystem: Disposable[ActorSystem] = - Disposable(_.terminate()) - - implicit class FileWatcherOps(file: File) { - def watcherProps(recursive: Boolean): Props = - Props(new FileWatcher(file, recursive)) - - def newWatcher(recursive: Boolean = true)(implicit system: ActorSystem): ActorRef = - system.actorOf(watcherProps(recursive)) - } - - def when(events: Event*)(callback: Callback): Message = - Message.RegisterCallback(events, callback) - - def on(event: Event)(callback: File => Unit): Message = - when(event) { case (`event`, file) => callback(file) } - - def stop(event: Event, callback: Callback): Message = - Message.RemoveCallback(event, callback) -} diff --git a/scalalib/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala b/scalalib/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala deleted file mode 100644 index 014373cd..00000000 --- a/scalalib/src/test/resource/better-files/akka/src/test/scala/better/files/FileWatcherSpec.scala +++ /dev/null @@ -1,101 +0,0 @@ -package better.files - -import Dsl._ - -import scala.concurrent.duration._ -import scala.collection.mutable -import scala.language.postfixOps - -class FileWatcherSpec extends CommonSpec { - "file watcher" should "watch directories" in { - assume(isCI) - File.usingTemporaryDirectory() {dir => - (dir / "a" / "b" / "c.txt").createIfNotExists(createParents = true) - - var actualEvents = List.empty[String] - def output(file: File, event: String) = synchronized { - val msg = s"${dir.path relativize file.path} got $event" - println(msg) - actualEvents = msg :: actualEvents - } - /***************************************************************************/ - import java.nio.file.{StandardWatchEventKinds => Events} - import FileWatcher._ - - import akka.actor.{ActorRef, ActorSystem} - implicit val system = ActorSystem() - - val watcher: ActorRef = dir.newWatcher() - - watcher ! when(events = Events.ENTRY_CREATE, Events.ENTRY_MODIFY) { // watch for multiple events - case (Events.ENTRY_CREATE, file) => output(file, "created") - case (Events.ENTRY_MODIFY, file) => output(file, "modified") - } - - watcher ! on(Events.ENTRY_DELETE)(file => output(file, "deleted")) // register partial function for single event - /***************************************************************************/ - sleep(5 seconds) - - val expectedEvents = mutable.ListBuffer.empty[String] - - def doIO[U](logs: String*)(f: => U): Unit = { - expectedEvents ++= logs - f - sleep() - } - - doIO("a/b/c.txt got modified") { - (dir / "a" / "b" / "c.txt").writeText("Hello world") - } - doIO("a/b got deleted", "a/b/c.txt got deleted") { - rm(dir / "a" / "b") - } - doIO("d got created") { - mkdir(dir / "d") - } - doIO("d/e.txt got created") { - touch(dir / "d" / "e.txt") - } - doIO("d/f got created") { - mkdirs(dir / "d" / "f" / "g") - } - doIO("d/f/g/e.txt got created") { - touch(dir / "d" / "f" / "g" / "e.txt") - } - - doIO("a/e.txt got created", "d/f/g/e.txt got deleted") { - (dir / "d" / "f" / "g" / "e.txt") moveTo (dir / "a" / "e.txt") - } - - sleep(10 seconds) - - println( - s""" - |Expected=${expectedEvents.sorted} - |Actual=${actualEvents.sorted} - |""".stripMargin) - - expectedEvents.diff(actualEvents) shouldBe empty - - def checkNotWatching[U](msg: String)(f: => U) = { - val before = List(actualEvents : _*) - f - sleep() - val after = List(actualEvents : _*) - assert(before === after, msg) - } - - system.stop(watcher) - sleep() - checkNotWatching("stop watching after actor is stopped") { - mkdirs(dir / "e") - } - - system.terminate() - sleep() - checkNotWatching("stop watching after actor-system is stopped") { - mkdirs(dir / "f") - } - } - } -} diff --git a/scalalib/src/test/resource/better-files/benchmarks/README.md b/scalalib/src/test/resource/better-files/benchmarks/README.md deleted file mode 100644 index ed092ece..00000000 --- a/scalalib/src/test/resource/better-files/benchmarks/README.md +++ /dev/null @@ -1,24 +0,0 @@ -Benchmarks -==== -* [Scanner benchmarks](src/main/scala/better/files/Scanners.scala): -``` -> sbt "benchmarks/test" -JavaScanner : 2191 ms -StringBuilderScanner : 1325 ms -CharBufferScanner : 1117 ms -StreamingScanner : 212 ms -IterableScanner : 365 ms -IteratorScanner : 297 ms -BetterFilesScanner : 272 ms -ArrayBufferScanner : 220 ms -FastJavaIOScanner2 : 181 ms -FastJavaIOScanner : 179 ms -``` - ----- - -[![YourKit](https://www.yourkit.com/images/yklogo.png)](https://www.yourkit.com/) - -YourKit supports better-files with its full-featured Java Profiler. -YourKit, LLC is the creator of [YourKit Java Profiler](https://www.yourkit.com/java/profiler/) and [YourKit .NET Profiler](https://www.yourkit.com/.net/profiler/), -innovative and intelligent tools for profiling Java and .NET applications. diff --git a/scalalib/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java b/scalalib/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java deleted file mode 100644 index 50550704..00000000 --- a/scalalib/src/test/resource/better-files/benchmarks/src/main/java/better/files/ArrayBufferScanner.java +++ /dev/null @@ -1,78 +0,0 @@ -package better.files; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.Arrays; - -/** - * Hand built using a char buffer - */ -public class ArrayBufferScanner extends AbstractScanner { - private char[] buffer = new char[1 << 4]; - private int pos = 1; - - private BufferedReader reader; - - public ArrayBufferScanner(BufferedReader reader) { - super(reader); - this.reader = reader; - } - - @Override - public boolean hasNext() { - return pos > 0; - } - - private void loadBuffer() { - pos = 0; - try { - for (int i; (i = reader.read()) != -1; ) { - char c = (char) i; - if (c != ' ' && c != '\n' && c != '\t' && c != '\r' && c != '\f') { - if (pos == buffer.length) buffer = Arrays.copyOf(buffer, 2 * pos); - buffer[pos++] = c; - } else if (pos != 0) break; - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - public String current() { - return String.copyValueOf(buffer, 0, pos); - } - - @Override - public String next() { - loadBuffer(); - return current(); - } - - @Override - public String nextLine() { - try { - return reader.readLine(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - @Override - public int nextInt() { - loadBuffer(); - final int radix = 10; - int result = 0; - int i = buffer[0] == '-' || buffer[0] == '+' ? 1 : 0; - for (checkValidNumber(pos > i); i < pos; i++) { - int digit = buffer[i] - '0'; - checkValidNumber(0 <= digit && digit <= 9); - result = result * radix + digit; - } - return buffer[0] == '-' ? -result : result; - } - - private void checkValidNumber(boolean condition) { - if (!condition) throw new NumberFormatException(current()); - } -} diff --git a/scalalib/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala b/scalalib/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala deleted file mode 100644 index 791e6039..00000000 --- a/scalalib/src/test/resource/better-files/benchmarks/src/main/scala/better/files/Scanners.scala +++ /dev/null @@ -1,158 +0,0 @@ -package better.files - -import java.io.BufferedReader - -/** - * Base interface to test - */ -abstract class AbstractScanner(protected[this] val reader: BufferedReader) { - def hasNext: Boolean - def next(): String - def nextInt() = next().toInt - def nextLine() = reader.readLine() - def close() = reader.close() -} - -/** - * Based on java.util.Scanner - */ -class JavaScanner(reader: BufferedReader) extends AbstractScanner(reader) { - private[this] val scanner = new java.util.Scanner(reader) - override def hasNext = scanner.hasNext - override def next() = scanner.next() - override def nextInt() = scanner.nextInt() - override def nextLine() = { - scanner.nextLine() - scanner.nextLine() - } - override def close() = scanner.close() -} - -/** - * Based on StringTokenizer + resetting the iterator - */ -class IterableScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterable[String] { - override def iterator = for { - line <- Iterator.continually(reader.readLine()).takeWhile(_ != null) - tokenizer = new java.util.StringTokenizer(line) - _ <- Iterator.continually(tokenizer).takeWhile(_.hasMoreTokens) - } yield tokenizer.nextToken() - - private[this] var current = iterator - override def hasNext = current.hasNext - override def next() = current.next() - override def nextLine() = { - current = iterator - super.nextLine() - } -} - -/** - * Based on a mutating var StringTokenizer - */ -class IteratorScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { - import java.util.StringTokenizer - private[this] val tokenizers = Iterator.continually(reader.readLine()).takeWhile(_ != null).map(new StringTokenizer(_)).filter(_.hasMoreTokens) - private[this] var current: Option[StringTokenizer] = None - - @inline private[this] def tokenizer(): Option[StringTokenizer] = current.find(_.hasMoreTokens) orElse { - current = if (tokenizers.hasNext) Some(tokenizers.next()) else None - current - } - override def hasNext = tokenizer().nonEmpty - override def next() = tokenizer().get.nextToken() - override def nextLine() = { - current = None - super.nextLine() - } -} - -/** - * Based on java.io.StreamTokenizer - */ -class StreamingScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { - import java.io.StreamTokenizer - private[this] val in = new StreamTokenizer(reader) - - override def hasNext = in.ttype != StreamTokenizer.TT_EOF - override def next() = { - in.nextToken() - in.sval - } - override def nextInt() = nextDouble().toInt - def nextDouble() = { - in.nextToken() - in.nval - } -} - -/** - * Based on a reusable StringBuilder - */ -class StringBuilderScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { - private[this] val chars = reader.chars - private[this] val buffer = new StringBuilder() - - override def next() = { - buffer.clear() - while (buffer.isEmpty && hasNext) { - chars.takeWhile(c => !c.isWhitespace).foreach(buffer += _) - } - buffer.toString() - } - override def hasNext = chars.hasNext -} - -/** - * Scala version of the ArrayBufferScanner - */ -class CharBufferScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { - private[this] val chars = reader.chars - private[this] var buffer = Array.ofDim[Char](1<<4) - - override def next() = { - var pos = 0 - while (pos == 0 && hasNext) { - for { - c <- chars.takeWhile(c => c != ' ' && c != '\n') - } { - if (pos == buffer.length) buffer = java.util.Arrays.copyOf(buffer, 2 * pos) - buffer(pos) = c - pos += 1 - } - } - String.copyValueOf(buffer, 0, pos) - } - override def hasNext = chars.hasNext -} - -/** - * Scanner using https://github.com/williamfiset/FastJavaIO - */ -class FastJavaIOScanner(reader: BufferedReader) extends AbstractScanner(reader) { - protected def is: java.io.InputStream = new org.apache.commons.io.input.ReaderInputStream(reader, defaultCharset) - - private[this] val fastReader = new fastjavaio.InputReader(is) - - override def hasNext = true //TODO: https://github.com/williamfiset/FastJavaIO/issues/3 - override def next() = fastReader.readStr() - override def nextInt() = fastReader.readInt() - override def nextLine() = fastReader.readLine() -} - -/** - * Same as FastJavaIOScanner but uses better-files's Reader => InputStream - */ -class FastJavaIOScanner2(reader: BufferedReader) extends FastJavaIOScanner(reader) { - override def is = reader.toInputStream -} - -/** - * Based on the better-files implementation - */ -class BetterFilesScanner(reader: BufferedReader) extends AbstractScanner(reader) { - private[this] val scanner = Scanner(reader) - override def hasNext = scanner.hasNext - override def next() = scanner.next - override def nextLine() = scanner.nextLine() -} diff --git a/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala b/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala deleted file mode 100644 index 68b734e1..00000000 --- a/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/Benchmark.scala +++ /dev/null @@ -1,10 +0,0 @@ -package better.files - -import org.scalatest.FunSuite - -trait Benchmark extends FunSuite { - def profile[A](f: => A): (A, Long) = { - val t = System.nanoTime() - (f, ((System.nanoTime() - t)/1e6).toLong) - } -} diff --git a/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala b/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala deleted file mode 100644 index aa09bc77..00000000 --- a/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/EncodingBenchmark.scala +++ /dev/null @@ -1,39 +0,0 @@ -package better.files - -import java.nio.charset.Charset - -import scala.util.Random - -class EncodingBenchmark extends Benchmark { - - def testWrite(file: File, charset: Charset) = profile { - for { - writer <- file.bufferedWriter(charset) - content <- Iterator.continually(Random.nextString(10000)).take(1000) - } writer.write(content + "\n") - } - - def testRead(file: File, charset: Charset) = profile { - for { - reader <- file.bufferedReader(charset) - line <- reader.lines().autoClosed - } line - } - - def run(charset: Charset) = { - File.temporaryFile() foreach {file => - val (_, w) = testWrite(file, charset) - info(s"Charset=$charset, write=$w ms") - - val (_, r) = testRead(file, charset) - info(s"Charset=$charset, read=$r ms") - } - } - - test("encoding") { - val utf8 = Charset.forName("UTF-8") - run(charset = utf8) - info("-------------") - run(charset = UnicodeCharset(utf8)) - } -} diff --git a/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala b/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala deleted file mode 100644 index 83082b9a..00000000 --- a/scalalib/src/test/resource/better-files/benchmarks/src/test/scala/better/files/ScannerBenchmark.scala +++ /dev/null @@ -1,66 +0,0 @@ -package better.files - -import java.io.{BufferedReader, StringReader} - -class ScannerBenchmark extends Benchmark { - val file = File.newTemporaryFile() - val n = 1000 - repeat(n) { - file.appendLine(-n to n mkString " ") - .appendLine("hello " * n) - .appendLine("world " * n) - } - val scanners: Seq[BufferedReader => AbstractScanner] = Seq( - new JavaScanner(_), - new StringBuilderScanner(_), - new CharBufferScanner(_), - new StreamingScanner(_), - new IterableScanner(_), - new IteratorScanner(_), - new BetterFilesScanner(_), - new ArrayBufferScanner(_), - new FastJavaIOScanner2(_), - new FastJavaIOScanner(_) - ) - - def runTest(scanner: AbstractScanner) = { - val (_, time) = profile(run(scanner)) - scanner.close() - info(f"${scanner.getClass.getSimpleName.padTo(25, ' ')}: $time%4d ms") - } - - def run(scanner: AbstractScanner): Unit = repeat(n) { - assert(scanner.hasNext) - val ints = List.fill(2 * n + 1)(scanner.nextInt()) - val line = "" //scanner.nextLine() - val words = IndexedSeq.fill(2 * n)(scanner.next()) - (line, ints, words) - } - - test("scanner") { - info("Warming up ...") - scanners foreach { scannerBuilder => - val canaryData = - """ - |10 -23 - |Hello World - |Hello World - |19 - """.stripMargin - val scanner = scannerBuilder(new BufferedReader(new StringReader(canaryData))) - info(s"Testing ${scanner.getClass.getSimpleName} for correctness") - assert(scanner.hasNext) - assert(scanner.nextInt() == 10) - assert(scanner.nextInt() == -23) - assert(scanner.next() == "Hello") - assert(scanner.next() == "World") - val l = scanner.nextLine() - assert(l == "Hello World", l) - assert(scanner.nextInt() == 19) - //assert(!scanner.hasNext) - } - - info("Running benchmark ...") - scanners foreach { scanner => runTest(scanner(file.newBufferedReader)) } - } -} diff --git a/scalalib/src/test/resource/better-files/build.sbt b/scalalib/src/test/resource/better-files/build.sbt deleted file mode 100644 index a3ae7a81..00000000 --- a/scalalib/src/test/resource/better-files/build.sbt +++ /dev/null @@ -1,163 +0,0 @@ -val username = "pathikrit" -val repo = "better-files" - -lazy val commonSettings = Seq( - organization := s"com.github.$username", - scalaVersion := "2.12.3", - crossScalaVersions := Seq("2.12.3"), - crossVersion := CrossVersion.binary, - javacOptions ++= Seq("-source", "1.8", "-target", "1.8", "-Xlint"), - scalacOptions ++= Seq( - "-deprecation", // Emit warning and location for usages of deprecated APIs. - "-encoding", "utf-8", // Specify character encoding used by source files. - "-explaintypes", // Explain type errors in more detail. - "-feature", // Emit warning and location for usages of features that should be imported explicitly. - "-language:existentials", // Existential types (besides wildcard types) can be written and inferred - "-language:experimental.macros", // Allow macro definition (besides implementation and application) - "-language:higherKinds", // Allow higher-kinded types - "-language:implicitConversions", // Allow definition of implicit functions called views - "-unchecked", // Enable additional warnings where generated code depends on assumptions. - "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access. - "-Xfatal-warnings", // Fail the compilation if there are any warnings. - "-Xfuture", // Turn on future language features. - "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver. - "-Xlint:by-name-right-associative", // By-name parameter of right associative operator. - "-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error. - "-Xlint:delayedinit-select", // Selecting member of DelayedInit. - "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element. - "-Xlint:inaccessible", // Warn about inaccessible types in method signatures. - "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`. - "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id. - "-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'. - "-Xlint:nullary-unit", // Warn when nullary methods return Unit. - "-Xlint:option-implicit", // Option.apply used implicit view. - "-Xlint:package-object-classes", // Class or object defined in package object. - "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds. - "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field. - "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component. - "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope. - "-Xlint:unsound-match", // Pattern match may not be typesafe. - "-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver. - "-Ypartial-unification", // Enable partial unification in type constructor inference - "-Ywarn-dead-code", // Warn when dead code is identified. - "-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined. - "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. - "-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`. - "-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'. - "-Ywarn-nullary-unit", // Warn when nullary methods return Unit. - "-Ywarn-numeric-widen", // Warn when numerics are widened. - "-Ywarn-unused:implicits", // Warn if an implicit parameter is unused. - "-Ywarn-unused:imports", // Warn if an import selector is not referenced. - "-Ywarn-unused:locals", // Warn if a local definition is unused. - "-Ywarn-unused:params", // Warn if a value parameter is unused. - "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused. - "-Ywarn-unused:privates", // Warn if a private member is unused. - "-Ywarn-value-discard" // Warn when non-Unit expression results are unused. - ), - libraryDependencies += Dependencies.scalatest, - updateImpactOpenBrowser := false -) - -lazy val core = (project in file("core")) - .settings(commonSettings: _*) - .settings(publishSettings: _*) - .settings( - name := repo, - description := "Simple, safe and intuitive I/O in Scala" - ) - -lazy val akka = (project in file("akka")) - .settings(commonSettings: _*) - .settings(publishSettings: _*) - .settings( - name := s"$repo-akka", - description := "Reactive file watcher using Akka actors", - libraryDependencies += Dependencies.akka - ) - .dependsOn(core % "test->test;compile->compile") - -lazy val shapelessScanner = (project in file("shapeless")) - .settings(commonSettings: _*) - .settings(noPublishSettings: _*) - .settings( - name := s"shapeless-scanner", - description := "Shapeless Scanner", - libraryDependencies += Dependencies.shapeless - ) - .dependsOn(core % "test->test;compile->compile") - -lazy val benchmarks = (project in file("benchmarks")) - .settings(commonSettings: _*) - .settings(noPublishSettings: _*) - .settings( - name := s"$repo-benchmarks", - libraryDependencies ++= Seq( - Dependencies.commonsio, - Dependencies.fastjavaio - ) - ) - .dependsOn(core % "test->test;compile->compile") - -lazy val root = (project in file(".")) - .settings(commonSettings: _*) - .settings(docSettings: _*) - .settings(noPublishSettings: _*) - .settings(releaseSettings: _*) - .aggregate(core, akka, shapelessScanner, benchmarks) - -import UnidocKeys._ -lazy val docSettings = unidocSettings ++ site.settings ++ ghpages.settings ++ Seq( - autoAPIMappings := true, - unidocProjectFilter in (ScalaUnidoc, unidoc) := inProjects(core, akka), - SiteKeys.siteSourceDirectory := file("site"), - site.addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), "latest/api"), - git.remoteRepo := s"git@github.com:$username/$repo.git" -) - -import ReleaseTransformations._ -lazy val releaseSettings = Seq( - releaseProcess := Seq[ReleaseStep]( - checkSnapshotDependencies, - inquireVersions, - //runClean, - runTest, - setReleaseVersion, - commitReleaseVersion, - tagRelease, - publishArtifacts, - setNextVersion, - commitNextVersion, - releaseStepCommand("sonatypeReleaseAll"), - pushChanges - ) -) - -lazy val noPublishSettings = Seq( - publish := (), - publishLocal := (), - publishArtifact := false -) - -lazy val publishSettings = Seq( - homepage := Some(url(s"https://github.com/$username/$repo")), - licenses += "MIT" -> url(s"https://github.com/$username/$repo/blob/master/LICENSE"), - scmInfo := Some(ScmInfo(url(s"https://github.com/$username/$repo"), s"git@github.com:$username/$repo.git")), - apiURL := Some(url(s"https://$username.github.io/$repo/latest/api/")), - releaseCrossBuild := true, - releasePublishArtifactsAction := PgpKeys.publishSigned.value, - publishMavenStyle := true, - publishArtifact in Test := false, - publishTo := Some(if (isSnapshot.value) Opts.resolver.sonatypeSnapshots else Opts.resolver.sonatypeStaging), - credentials ++= (for { - username <- sys.env.get("SONATYPE_USERNAME") - password <- sys.env.get("SONATYPE_PASSWORD") - } yield Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", username, password)).toSeq, - pomExtra := - - - {username} - Pathikrit Bhowmick - http://github.com/{username} - - -) diff --git a/scalalib/src/test/resource/better-files/circle.yml b/scalalib/src/test/resource/better-files/circle.yml deleted file mode 100644 index 0ca8d9b9..00000000 --- a/scalalib/src/test/resource/better-files/circle.yml +++ /dev/null @@ -1,21 +0,0 @@ -machine: - environment: - SBT_GHPAGES_COMMIT_MESSAGE: 'Publishing Scaladoc [ci skip]' - java: - version: oraclejdk8 - -test: - override: - - sbt clean coverage +test - -deployment: - master: - branch: master - owner: pathikrit - commands: - - sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage - - bash <(curl -s https://codecov.io/bash) - - git config --global user.email "pathikritbhowmick@msn.com" - - git config --global user.name "circle-ci" - - git config --global push.default simple - - sbt ghpagesPushSite +publish diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala deleted file mode 100644 index 3bacd91d..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Dsl.scala +++ /dev/null @@ -1,155 +0,0 @@ -package better.files - -import java.nio.charset.Charset -import java.nio.file.attribute.{PosixFileAttributes, PosixFilePermission, PosixFilePermissions} -import java.util.zip.Deflater - -import scala.collection.JavaConverters._ - -/** - * Do file ops using a UNIX command line DSL - */ -object Dsl { - def ~ : File = - File.home - - def pwd: File = - File.currentWorkingDirectory - - def cwd: File = - pwd - - val `..`: File => File = - _.parent - - val `.`: File => File = - identity - - /** - * Adds some symbolic operations to file - * @param file - */ - implicit class SymbolicOperations(val file: File) { - /** - * Allows navigation up e.g. file / .. / .. - * - * @param f - * @return - */ - def /(f: File => File): File = - f(file) - - def <<(line: String)(implicit charset: Charset = defaultCharset): file.type = - file.appendLines(line)(charset) - - def >>:(line: String)(implicit charset: Charset = defaultCharset): file.type = - file.appendLines(line)(charset) - - def <(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): file.type = - file.write(text)(openOptions, charset) - - def `>:`(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): file.type = - file.write(text)(openOptions, charset) - - def `!`(implicit charset: Charset = defaultCharset): String = - file.contentAsString(charset) - - def `===`(that: File): Boolean = - file.isSameContentAs(that) - - def !==(that: File): Boolean = - !(file === that) - } - - def cp(from: File, to: File): File = { - if (to.isDirectory) { - from.copyToDirectory(to) - } else { - from.copyTo(to, overwrite = true) - } - } - - def mv(from: File, to: File): File = { - if (to.isDirectory) { - from.moveToDirectory(to) - } else { - from.moveTo(to, overwrite = true) - } - } - - def rm(file: File): File = - file.delete(swallowIOExceptions = true) - - def del(file: File): File = - rm(file) - - def ln(file1: File, file2: File): File = - file1.linkTo(file2) - - def ln_s(file1: File, file2: File): File = - file1.symbolicLinkTo(file2) - - def cat(files: File*): Seq[Iterator[Byte]] = - files.map(_.bytes) - - def ls(file: File): Files = - file.list - - def dir(file: File): Files = - ls(file) - - def ls_r(file: File): Files = - file.listRecursively - - def touch(file: File): File = - file.touch() - - def mkdir(file: File): File = - file.createDirectory() - - def md5(file: File): String = - file.md5 - - def sha1(file: File): String = - file.sha1 - - def sha256(file: File): String = - file.sha256 - - def sha512(file: File): String = - file.sha512 - - def mkdirs(file: File): File = - file.createDirectories() - - def chown(owner: String, file: File): File = - file.setOwner(owner) - - def chgrp(group: String, file: File): File = - file.setGroup(group) - - /** - * Update permission of this file - * - * @param permissions Must be 9 character POSIX permission representation e.g. "rwxr-x---" - * @param file - * @return file - */ - def chmod(permissions: String, file: File): File = - file.setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet) - - def chmod_+(permission: PosixFilePermission, file: File): File = - file.addPermission(permission) - - def chmod_-(permission: PosixFilePermission, file: File): File = - file.removePermission(permission) - - def stat(file: File): PosixFileAttributes = - file.posixAttributes - - def unzip(zipFile: File)(destination: File)(implicit charset: Charset = defaultCharset): destination.type = - zipFile.unzipTo(destination)(charset) - - def zip(files: File*)(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): destination.type = - destination.zipIn(files.iterator, compressionLevel)(charset) -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala deleted file mode 100644 index eb11cd93..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/File.scala +++ /dev/null @@ -1,1257 +0,0 @@ -package better.files - -import java.io.{File => JFile, _} -import java.net.{URI, URL} -import java.nio.charset.Charset -import java.nio.channels._ -import java.nio.file._ -import java.nio.file.attribute._ -import java.security.{DigestInputStream, MessageDigest} -import java.time.Instant -import java.util.regex.Pattern -import java.util.zip._ -import javax.xml.bind.DatatypeConverter - -import scala.collection.JavaConverters._ -import scala.concurrent.ExecutionContext -import scala.util.Properties -import scala.util.matching.Regex - -/** - * Scala wrapper around java.nio.files.Path - */ -class File private(val path: Path)(implicit val fileSystem: FileSystem = path.getFileSystem) { - //TODO: LinkOption? - - def pathAsString: String = - path.toString - - def toJava: JFile = - new JFile(path.toAbsolutePath.toString) - - /** - * Name of file - * Certain files may not have a name e.g. root directory - returns empty string in that case - * - * @return - */ - def name: String = - nameOption.getOrElse("") - - /** - * Certain files may not have a name e.g. root directory - returns None in that case - * - * @return - */ - def nameOption: Option[String] = - Option(path.getFileName).map(_.toString) - - def root: File = - path.getRoot - - def nameWithoutExtension: String = - nameWithoutExtension(includeAll = true) - - /** - * @param includeAll - * For files with multiple extensions e.g. "bundle.tar.gz" - * nameWithoutExtension(includeAll = true) returns "bundle" - * nameWithoutExtension(includeAll = false) returns "bundle.tar" - * @return - */ - def nameWithoutExtension(includeAll: Boolean): String = - if (hasExtension) name.substring(0, indexOfExtension(includeAll)) else name - - /** - * @return extension (including the dot) of this file if it is a regular file and has an extension, else None - */ - def extension: Option[String] = - extension() - - /** - * @param includeDot whether the dot should be included in the extension or not - * @param includeAll whether all extension tokens should be included, or just the last one e.g. for bundle.tar.gz should it be .tar.gz or .gz - * @param toLowerCase to lowercase the extension or not e.g. foo.HTML should have .html or .HTML - * @return extension of this file if it is a regular file and has an extension, else None - */ - def extension(includeDot: Boolean = true, includeAll: Boolean = false, toLowerCase: Boolean = true): Option[String] = - when(hasExtension) { - val dot = indexOfExtension(includeAll) - val index = if (includeDot) dot else dot + 1 - val extension = name.substring(index) - if (toLowerCase) extension.toLowerCase else extension - } - - private[this] def indexOfExtension(includeAll: Boolean) = - if (includeAll) name.indexOf(".") else name.lastIndexOf(".") - - /** - * Returns the extension if file is a regular file - * If file is unreadable or does not exist, it is assumed to be not a regular file - * See: https://github.com/pathikrit/better-files/issues/89 - * - * @return - */ - def hasExtension: Boolean = - (isRegularFile || notExists) && name.contains(".") - - /** - * Changes the file-extension by renaming this file; if file does not have an extension, it adds the extension - * Example usage file"foo.java".changeExtensionTo(".scala") - */ - def changeExtensionTo(extension: String): File = - if (isRegularFile) renameTo(s"$nameWithoutExtension$extension") else this - - def contentType: Option[String] = - Option(Files.probeContentType(path)) - - /** - * Return parent of this file - * NOTE: This API returns null if this file is the root; - * please use parentOption if you expect to handle roots - * - * @see parentOption - * @return - */ - def parent: File = - parentOption.orNull - - /** - * - * @return Some(parent) of this file or None if this is the root and thus has no parent - */ - def parentOption: Option[File] = - Option(path.getParent).map(File.apply) - - def /(child: String): File = - path.resolve(child) - - def /(child: Symbol): File = - this / child.name - - def createChild(child: String, asDirectory: Boolean = false, createParents: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): File = - (this / child).createIfNotExists(asDirectory, createParents)(attributes, linkOptions) - - /** - * Create this file. If it exists, don't do anything - * - * @param asDirectory If you want this file to be created as a directory instead, set this to true (false by default) - * @param createParents If you also want all the parents to be created from root to this file (false by defailt) - * @param attributes - * @param linkOptions - * @return - */ - def createIfNotExists(asDirectory: Boolean = false, createParents: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = { - if (exists(linkOptions)) { - this - } else if (asDirectory) { - createDirectories()(attributes) - } else { - if (createParents) parent.createDirectories()(attributes) - try { - createFile()(attributes) - } catch { - case _: FileAlreadyExistsException if isRegularFile(linkOptions) => // We don't really care if it exists already - } - this - } - } - - /** - * Create this file - * - * @param attributes - * @return - */ - def createFile()(implicit attributes: File.Attributes = File.Attributes.default): this.type = { - Files.createFile(path, attributes: _*) - this - } - - def exists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - Files.exists(path, linkOptions: _*) - - def notExists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - Files.notExists(path, linkOptions: _*) - - def sibling(name: String): File = - path.resolveSibling(name) - - def isSiblingOf(sibling: File): Boolean = - sibling.isChildOf(parent) - - def siblings: Files = - parent.list.filterNot(_ == this) - - def isChildOf(parent: File): Boolean = - parent.isParentOf(this) - - /** - * Check if this directory contains this file - * - * @param file - * @return true if this is a directory and it contains this file - */ - def contains(file: File): Boolean = - isDirectory && (file.path startsWith path) - - def isParentOf(child: File): Boolean = - contains(child) - - def bytes: Iterator[Byte] = - newInputStream.buffered.bytes //TODO: ManagedResource here? - - def loadBytes: Array[Byte] = - Files.readAllBytes(path) - - def byteArray: Array[Byte] = - loadBytes - - /** - * Create this directory - * - * @param attributes - * @return - */ - def createDirectory()(implicit attributes: File.Attributes = File.Attributes.default): this.type = { - Files.createDirectory(path, attributes: _*) - this - } - - /** - * Create this directory and all its parents - * Unlike the JDK, this by default sanely handles the JDK-8130464 bug - * If you want default Java behaviour, use File.LinkOptions.noFollow - * - * @param attributes - * @return - */ - def createDirectories()(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = { - try { - Files.createDirectories(path, attributes: _*) - } catch { - case _: FileAlreadyExistsException if isDirectory(linkOptions) => // work around for JDK-8130464 - } - this - } - - def chars(implicit charset: Charset = defaultCharset): Iterator[Char] = - newBufferedReader(charset).chars //TODO: ManagedResource here? - - /** - * Load all lines from this file - * Note: Large files may cause an OutOfMemory in which case, use the streaming version @see lineIterator - * - * @param charset - * @return all lines in this file - */ - def lines(implicit charset: Charset = defaultCharset): Traversable[String] = - Files.readAllLines(path, charset).asScala - - /** - * Iterate over lines in a file (auto-close stream on complete) - * NOTE: If the iteration is partial, it may leave a stream open - * If you want partial iteration use @see lines() - * - * @param charset - * @return - */ - def lineIterator(implicit charset: Charset = defaultCharset): Iterator[String] = - Files.lines(path, charset).toAutoClosedIterator - - def tokens(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): Iterator[String] = - newBufferedReader(charset).tokens(splitter) - - def contentAsString(implicit charset: Charset = defaultCharset): String = - new String(byteArray, charset) - - def printLines(lines: Iterator[Any])(implicit openOptions: File.OpenOptions = File.OpenOptions.append): this.type = { - for { - pw <- printWriter()(openOptions) - line <- lines - } pw.println(line) - this - } - - /** - * For large number of lines that may not fit in memory, use printLines - * - * @param lines - * @param charset - * @return - */ - def appendLines(lines: String*)(implicit charset: Charset = defaultCharset): this.type = { - Files.write(path, lines.asJava, charset, File.OpenOptions.append: _*) - this - } - - def appendLine(line: String = "")(implicit charset: Charset = defaultCharset): this.type = - appendLines(line)(charset) - - def append(text: String)(implicit charset: Charset = defaultCharset): this.type = - appendByteArray(text.getBytes(charset)) - - def appendText(text: String)(implicit charset: Charset = defaultCharset): this.type = - append(text)(charset) - - def appendByteArray(bytes: Array[Byte]): this.type = { - Files.write(path, bytes, File.OpenOptions.append: _*) - this - } - - def appendBytes(bytes: Iterator[Byte]): this.type = - writeBytes(bytes)(openOptions = File.OpenOptions.append) - - /** - * Write byte array to file. For large contents consider using the writeBytes - * - * @param bytes - * @return this - */ - def writeByteArray(bytes: Array[Byte])(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = { - Files.write(path, bytes, openOptions: _*) - this - } - - def writeBytes(bytes: Iterator[Byte])(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = { - outputStream(openOptions).foreach(_.buffered write bytes) - this - } - - def write(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type = - writeByteArray(text.getBytes(charset))(openOptions) - - def writeText(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type = - write(text)(openOptions, charset) - - def overwrite(text: String)(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): this.type = - write(text)(openOptions, charset) - - def newRandomAccess(mode: File.RandomAccessMode = File.RandomAccessMode.read): RandomAccessFile = - new RandomAccessFile(toJava, mode.value) - - def randomAccess(mode: File.RandomAccessMode = File.RandomAccessMode.read): ManagedResource[RandomAccessFile] = - newRandomAccess(mode).autoClosed //TODO: Mode enum? - - def newBufferedReader(implicit charset: Charset = defaultCharset): BufferedReader = - Files.newBufferedReader(path, charset) - - def bufferedReader(implicit charset: Charset = defaultCharset): ManagedResource[BufferedReader] = - newBufferedReader(charset).autoClosed - - def newBufferedWriter(implicit charset: Charset = defaultCharset, openOptions: File.OpenOptions = File.OpenOptions.default): BufferedWriter = - Files.newBufferedWriter(path, charset, openOptions: _*) - - def bufferedWriter(implicit charset: Charset = defaultCharset, openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[BufferedWriter] = - newBufferedWriter(charset, openOptions).autoClosed - - def newFileReader: FileReader = - new FileReader(toJava) - - def fileReader: ManagedResource[FileReader] = - newFileReader.autoClosed - - def newFileWriter(append: Boolean = false): FileWriter = - new FileWriter(toJava, append) - - def fileWriter(append: Boolean = false): ManagedResource[FileWriter] = - newFileWriter(append).autoClosed - - def newPrintWriter(autoFlush: Boolean = false)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): PrintWriter = - new PrintWriter(newOutputStream(openOptions), autoFlush) - - def printWriter(autoFlush: Boolean = false)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[PrintWriter] = - newPrintWriter(autoFlush)(openOptions).autoClosed - - def newInputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): InputStream = - Files.newInputStream(path, openOptions: _*) - - def inputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[InputStream] = - newInputStream(openOptions).autoClosed - - //TODO: Move this to inputstream implicit - def newDigestInputStream(digest: MessageDigest)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): DigestInputStream = - new DigestInputStream(newInputStream(openOptions), digest) - - def digestInputStream(digest: MessageDigest)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[DigestInputStream] = - newDigestInputStream(digest)(openOptions).autoClosed - - def newScanner(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): Scanner = - Scanner(newBufferedReader(charset), splitter) - - def scanner(splitter: StringSplitter = StringSplitter.default)(implicit charset: Charset = defaultCharset): ManagedResource[Scanner] = - newScanner(splitter)(charset).autoClosed - - def newOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): OutputStream = - Files.newOutputStream(path, openOptions: _*) - - def outputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[OutputStream] = - newOutputStream(openOptions).autoClosed - - def newZipOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): ZipOutputStream = - new ZipOutputStream(newOutputStream(openOptions), charset) - - def zipInputStream(implicit charset: Charset = defaultCharset): ManagedResource[ZipInputStream] = - newZipInputStream(charset).autoClosed - - def newZipInputStream(implicit charset: Charset = defaultCharset): ZipInputStream = - new ZipInputStream(new FileInputStream(toJava).buffered, charset) - - def zipOutputStream(implicit openOptions: File.OpenOptions = File.OpenOptions.default, charset: Charset = defaultCharset): ManagedResource[ZipOutputStream] = - newZipOutputStream(openOptions, charset).autoClosed - - def newFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default, attributes: File.Attributes = File.Attributes.default): FileChannel = - FileChannel.open(path, openOptions.toSet.asJava, attributes: _*) - - def fileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default, attributes: File.Attributes = File.Attributes.default): ManagedResource[FileChannel] = - newFileChannel(openOptions, attributes).autoClosed - - def newAsynchronousFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default): AsynchronousFileChannel = - AsynchronousFileChannel.open(path, openOptions: _*) - - def asynchronousFileChannel(implicit openOptions: File.OpenOptions = File.OpenOptions.default): ManagedResource[AsynchronousFileChannel] = - newAsynchronousFileChannel(openOptions).autoClosed - - def newWatchService: WatchService = - fileSystem.newWatchService() - - def watchService: ManagedResource[WatchService] = - newWatchService.autoClosed - - /** - * Serialize a object using Java's serializer into this file - * - * @param obj - * @return - */ - def writeSerialized(obj: Serializable)(implicit openOptions: File.OpenOptions = File.OpenOptions.default): this.type = { - createIfNotExists().outputStream(openOptions).foreach(_.asObjectOutputStream().serialize(obj).flush()) - this - } - - /** - * Deserialize a object using Java's default serialization from this file - * - * @return - */ - def readDeserialized[A](implicit openOptions: File.OpenOptions = File.OpenOptions.default): A = - inputStream(openOptions).map(_.asObjectInputStream().deserialize[A]) - - def register(service: WatchService, events: File.Events = File.Events.all): this.type = { - path.register(service, events.toArray) - this - } - - def digest(algorithm: MessageDigest): Array[Byte] = { - listRelativePaths.toSeq.sorted foreach { relativePath => - val file: File = path.resolve(relativePath) - if(file.isDirectory) { - algorithm.update(relativePath.toString.getBytes) - } else { - file.digestInputStream(algorithm).foreach(_.pipeTo(NullOutputStream)) - } - } - algorithm.digest() - } - - /** - * Set a file attribute e.g. file("dos:system") = true - * - * @param attribute - * @param value - * @param linkOptions - * @return - */ - def update(attribute: String, value: Any)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = { - Files.setAttribute(path, attribute, value, linkOptions : _*) - this - } - - /** - * @return checksum of this file (or directory) in hex format - */ - def checksum(algorithm: MessageDigest): String = - DatatypeConverter.printHexBinary(digest(algorithm)) - - def md5: String = - checksum("MD5") - - def sha1: String = - checksum("SHA-1") - - def sha256: String = - checksum("SHA-256") - - def sha512: String = - checksum("SHA-512") - - /** - * @return Some(target) if this is a symbolic link (to target) else None - */ - def symbolicLink: Option[File] = - when(isSymbolicLink)(new File(Files.readSymbolicLink(path))) - - /** - * @return true if this file (or the file found by following symlink) is a directory - */ - def isDirectory(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - Files.isDirectory(path, linkOptions: _*) - - /** - * @return true if this file (or the file found by following symlink) is a regular file - */ - def isRegularFile(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - Files.isRegularFile(path, linkOptions: _*) - - def isSymbolicLink: Boolean = - Files.isSymbolicLink(path) - - def isHidden: Boolean = - Files.isHidden(path) - - /** - * Check if a file is locked. - * - * @param mode The random access mode. - * @param position The position at which the locked region is to start; must be non-negative. - * @param size The size of the locked region; must be non-negative, and the sum position + size must be non-negative. - * @param isShared true to request a shared lock, false to request an exclusive lock. - * @return True if the file is locked, false otherwise. - */ - def isLocked(mode: File.RandomAccessMode, position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - try { - usingLock(mode) {channel => - channel.tryLock(position, size, isShared).release() - false - } - } catch { - case _: OverlappingFileLockException | _: NonWritableChannelException | _: NonReadableChannelException => true - - // Windows throws a `FileNotFoundException` if the file is locked (see: https://github.com/pathikrit/better-files/pull/194) - case _: FileNotFoundException if verifiedExists(linkOptions).getOrElse(true) => true - } - - /** - * @see https://docs.oracle.com/javase/tutorial/essential/io/check.html - * @see https://stackoverflow.com/questions/30520179/why-does-file-exists-return-true-even-though-files-exists-in-the-nio-files - * - * @return - * Some(true) if file is guaranteed to exist - * Some(false) if file is guaranteed to not exist - * None if the status is unknown e.g. if file is unreadable - */ - def verifiedExists(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Option[Boolean] = { - if (exists(linkOptions)) { - Some(true) - } else if(notExists(linkOptions)) { - Some(false) - } else { - None - } - } - - def usingLock[U](mode: File.RandomAccessMode)(f: FileChannel => U): U = - newRandomAccess(mode).getChannel.autoClosed.map(f) - - def isReadLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false) = - isLocked(File.RandomAccessMode.read, position, size, isShared) - - def isWriteLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false) = - isLocked(File.RandomAccessMode.readWrite, position, size, isShared) - - def list: Files = - Files.list(path) - - def children: Files = list - - def entries: Files = list - - def listRecursively(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files = - walk()(visitOptions).filterNot(isSamePathAs) - - /** - * Walk the directory tree recursively upto maxDepth - * - * @param maxDepth - * @return List of children in BFS maxDepth level deep (includes self since self is at depth = 0) - */ - def walk(maxDepth: Int = Int.MaxValue)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files = - Files.walk(path, maxDepth, visitOptions: _*) //TODO: that ignores I/O errors? - - def pathMatcher(syntax: File.PathMatcherSyntax, includePath: Boolean)(pattern: String): PathMatcher = - syntax(this, pattern, includePath) - - /** - * Util to glob from this file's path - * - * - * @param includePath If true, we don't need to set path glob patterns - * e.g. instead of **//*.txt we just use *.txt - * @return Set of files that matched - */ - //TODO: Consider removing `syntax` as implicit. You often want to control this on a per method call basis - def glob(pattern: String, includePath: Boolean = true)(implicit syntax: File.PathMatcherSyntax = File.PathMatcherSyntax.default, visitOptions: File.VisitOptions = File.VisitOptions.default): Files = - pathMatcher(syntax, includePath)(pattern).matches(this)(visitOptions) - - /** - * Util to match from this file's path using Regex - * - * @param includePath If true, we don't need to set path glob patterns - * e.g. instead of **//*.txt we just use *.txt - * @see glob - * @return Set of files that matched - */ - def globRegex(pattern: Regex, includePath: Boolean = true)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files = - glob(pattern.regex, includePath)(syntax = File.PathMatcherSyntax.regex, visitOptions = visitOptions) - - /** - * More Scala friendly way of doing Files.walk - * Note: This is lazy (returns an Iterator) and won't evaluate till we reify the iterator (e.g. using .toList) - * - * @param matchFilter - * @return - */ - def collectChildren(matchFilter: File => Boolean)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Files = - walk()(visitOptions).filter(matchFilter) - - def uri: URI = - path.toUri - - def url: URL = - uri.toURL - - /** - * @return file size (for directories, return size of the directory) in bytes - */ - def size(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Long = - walk()(visitOptions).map(f => Files.size(f.path)).sum - - def permissions(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Set[PosixFilePermission] = - Files.getPosixFilePermissions(path, linkOptions: _*).asScala.toSet - - def permissionsAsString(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String = - PosixFilePermissions.toString(permissions(linkOptions).asJava) - - def setPermissions(permissions: Set[PosixFilePermission]): this.type = { - Files.setPosixFilePermissions(path, permissions.asJava) - this - } - - def addPermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = - setPermissions(permissions(linkOptions) + permission) - - def removePermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = - setPermissions(permissions(linkOptions) - permission) - - /** - * test if file has this permission - */ - def testPermission(permission: PosixFilePermission)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - permissions(linkOptions)(permission) - - def isOwnerReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.OWNER_READ)(linkOptions) - - def isOwnerWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.OWNER_WRITE)(linkOptions) - - def isOwnerExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.OWNER_EXECUTE)(linkOptions) - - def isGroupReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.GROUP_READ)(linkOptions) - - def isGroupWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.GROUP_WRITE)(linkOptions) - - def isGroupExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.GROUP_EXECUTE)(linkOptions) - - def isOthersReadable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.OTHERS_READ)(linkOptions) - - def isOthersWritable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.OTHERS_WRITE)(linkOptions) - - def isOthersExecutable(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - testPermission(PosixFilePermission.OTHERS_EXECUTE)(linkOptions) - - /** - * This differs from the above as this checks if the JVM can read this file even though the OS cannot in certain platforms - * - * @see isOwnerReadable - * @return - */ - def isReadable: Boolean = - toJava.canRead - - def isWriteable: Boolean = - toJava.canWrite - - def isExecutable: Boolean = - toJava.canExecute - - def attributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): BasicFileAttributes = - Files.readAttributes(path, classOf[BasicFileAttributes], linkOptions: _*) - - def posixAttributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): PosixFileAttributes = - Files.readAttributes(path, classOf[PosixFileAttributes], linkOptions: _*) - - def dosAttributes(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): DosFileAttributes = - Files.readAttributes(path, classOf[DosFileAttributes], linkOptions: _*) - - def owner(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): UserPrincipal = - Files.getOwner(path, linkOptions: _*) - - def ownerName(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String = - owner(linkOptions).getName - - def group(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): GroupPrincipal = - posixAttributes(linkOptions).group() - - def groupName(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): String = - group(linkOptions).getName - - def setOwner(owner: String): this.type = { - Files.setOwner(path, fileSystem.getUserPrincipalLookupService.lookupPrincipalByName(owner)) - this - } - - def setGroup(group: String): this.type = { - Files.setOwner(path, fileSystem.getUserPrincipalLookupService.lookupPrincipalByGroupName(group)) - this - } - - /** - * Similar to the UNIX command touch - create this file if it does not exist and set its last modification time - */ - def touch(time: Instant = Instant.now())(implicit attributes: File.Attributes = File.Attributes.default, linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = { - Files.setLastModifiedTime(createIfNotExists()(attributes, linkOptions).path, FileTime.from(time)) - this - } - - def lastModifiedTime(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Instant = - Files.getLastModifiedTime(path, linkOptions: _*).toInstant - - /** - * Deletes this file or directory - * - * @param swallowIOExceptions If this is set to true, any exception thrown is swallowed - */ - def delete(swallowIOExceptions: Boolean = false): this.type = { - try { - if (isDirectory) list.foreach(_.delete(swallowIOExceptions)) - Files.delete(path) - } catch { - case _: IOException if swallowIOExceptions => //e.printStackTrace() //swallow - } - this - } - - def renameTo(newName: String): File = - moveTo(path.resolveSibling(newName)) - - /** - * - * @param destination - * @param overwrite - * @return destination - */ - def moveTo(destination: File, overwrite: Boolean = false): destination.type = { - Files.move(path, destination.path, File.CopyOptions(overwrite): _*) - destination - } - - /** - * Moves this file into the given directory - * @param directory - * - * @return the File referencing the new file created under destination - */ - def moveToDirectory(directory: File)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): File = { - require(directory.isDirectory(linkOptions), s"$directory must be a directory") - moveTo(directory / this.name) - } - - /** - * - * @param destination - * @param overwrite - * @return destination - */ - def copyTo(destination: File, overwrite: Boolean = false)(implicit copyOptions: File.CopyOptions = File.CopyOptions(overwrite)): destination.type = { - if (isDirectory) {//TODO: maxDepth? - Files.walkFileTree(path, new SimpleFileVisitor[Path] { - def newPath(subPath: Path): Path = destination.path.resolve(path.relativize(subPath)) - - override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes) = { - Files.createDirectories(newPath(dir)) - super.preVisitDirectory(dir, attrs) - } - - override def visitFile(file: Path, attrs: BasicFileAttributes) = { - Files.copy(file, newPath(file), copyOptions: _*) - super.visitFile(file, attrs) - } - }) - } else { - Files.copy(path, destination.path, copyOptions: _*) - } - destination - } - - /** - * Copies this file into the given directory - * @param directory - * - * @return the File referencing the new file created under destination - */ - def copyToDirectory(directory: File)(implicit linkOptions: File.LinkOptions = File.LinkOptions.default, copyOptions: File.CopyOptions = File.CopyOptions.default): File = { - require(directory.isDirectory(linkOptions), s"$directory must be a directory") - copyTo(directory / this.name)(copyOptions) - } - - def symbolicLinkTo(destination: File)(implicit attributes: File.Attributes = File.Attributes.default): destination.type = { - Files.createSymbolicLink(path, destination.path, attributes: _*) - destination - } - - def linkTo(destination: File, symbolic: Boolean = false)(implicit attributes: File.Attributes = File.Attributes.default): destination.type = { - if (symbolic) { - symbolicLinkTo(destination)(attributes) - } else { - Files.createLink(destination.path, path) - destination - } - } - - def listRelativePaths(implicit visitOptions: File.VisitOptions = File.VisitOptions.default): Iterator[Path] = - walk()(visitOptions).map(relativize) - - def relativize(destination: File): Path = - path.relativize(destination.path) - - def isSamePathAs(that: File): Boolean = - this.path == that.path - - def isSameFileAs(that: File): Boolean = - Files.isSameFile(this.path, that.path) - - /** - * @return true if this file is exactly same as that file - * For directories, it checks for equivalent directory structure - */ - def isSameContentAs(that: File): Boolean = - isSimilarContentAs(that) - - /** - * Almost same as isSameContentAs but uses faster md5 hashing to compare (and thus small chance of false positive) - * Also works for directories - * - * @param that - * @return - */ - def isSimilarContentAs(that: File): Boolean = - this.md5 == that.md5 - - override def equals(obj: Any) = { - obj match { - case file: File => isSamePathAs(file) - case _ => false - } - } - - /** - * @param linkOptions - * @return true if file is not present or empty directory or 0-bytes file - */ - def isEmpty(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = { - if (isDirectory(linkOptions)) { - children.isEmpty - } else if (isRegularFile(linkOptions)) { - toJava.length() == 0 - } else { - notExists(linkOptions) - } - } - - /** - * - * @param linkOptions - * @return for directories, true if it has no children, false otherwise - * for files, true if it is a 0-byte file, false otherwise - * else true if it exists, false otherwise - */ - def nonEmpty(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): Boolean = - !isEmpty(linkOptions) - - /** - * If this is a directory, remove all its children - * If its a file, empty the contents - * - * @return this - */ - def clear()(implicit linkOptions: File.LinkOptions = File.LinkOptions.default): this.type = { - if (isDirectory(linkOptions)) { - children.foreach(_.delete()) - } else { - writeByteArray(Array.emptyByteArray)(File.OpenOptions.default) - } - this - } - - def deleteOnExit(): this.type = { - toJava.deleteOnExit() - this - } - - override def hashCode = - path.hashCode() - - override def toString = - pathAsString - - /** - * Zips this file (or directory) - * - * @param destination The destination file; Creates this if it does not exists - * @return The destination zip file - */ - def zipTo(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): destination.type = { - val files = if (isDirectory) children else Iterator(this) - destination.zipIn(files, compressionLevel)(charset) - } - - /** - * zip to a temp directory - * - * @return the target directory - */ - def zip(compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = defaultCharset): File = - zipTo(destination = File.newTemporaryFile(name, ".zip"), compressionLevel)(charset) - - /** - * Unzips this zip file - * - * @param destination destination folder; Creates this if it does not exist - * @param zipFilter An optional param to reject or accept unzipping a file - * @return The destination where contents are unzipped - */ - def unzipTo(destination: File, zipFilter: ZipEntry => Boolean = _ => true)(implicit charset: Charset = defaultCharset): destination.type = { - for { - zipFile <- new ZipFile(toJava, charset).autoClosed - entry <- zipFile.entries().asScala if zipFilter(entry) - } entry.extractTo(destination, zipFile.getInputStream(entry)) - destination - } - - /** - * Streamed unzipping is slightly slower but supports larger files and more encodings - * @see https://github.com/pathikrit/better-files/issues/152 - * - * @param destinationDirectory destination folder; Creates this if it does not exist - * @return The destination where contents are unzipped - */ - def streamedUnzip(destinationDirectory: File = File.newTemporaryDirectory(name))(implicit charset: Charset = defaultCharset): destinationDirectory.type = { - for { - zipIn <- zipInputStream(charset) - } zipIn.mapEntries(_.extractTo(destinationDirectory, zipIn)).size - destinationDirectory - } - - def unGzipTo(destinationDirectory: File = File.newTemporaryDirectory())(implicit openOptions: File.OpenOptions = File.OpenOptions.default): destinationDirectory.type = { - for { - in <- inputStream(openOptions) - out <- destinationDirectory.outputStream(openOptions) - } in.buffered.pipeTo(out.buffered) - destinationDirectory - } - - /** - * Adds these files into this zip file - * Example usage: File("test.zip").zipIn(Seq(file"hello.txt", file"hello2.txt")) - * - * @param files - * @param compressionLevel - * @param charset - * @return this - */ - def zipIn(files: Files, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(charset: Charset = defaultCharset): this.type = { - for { - output <- newZipOutputStream(File.OpenOptions.default, charset).withCompressionLevel(compressionLevel).autoClosed - input <- files - file <- input.walk() - name = input.parent relativize file - } output.add(file, name.toString) - this - } - - /** - * unzip to a temporary zip file - * - * @return the zip file - */ - def unzip(zipFilter: ZipEntry => Boolean = _ => true)(implicit charset: Charset = defaultCharset): File = - unzipTo(destination = File.newTemporaryDirectory(name), zipFilter)(charset) - - /** - * Java's temporary files/directories are not cleaned up by default. - * If we explicitly call `.deleteOnExit()`, it gets added to shutdown handler which is not ideal - * for long running systems with millions of temporary files as: - * a) it would slowdown shutdown and - * b) occupy unnecessary disk-space during app lifetime - * - * This util auto-deletes the resource when done using the ManagedResource facility - * - * Example usage: - * File.temporaryDirectory().foreach(tempDir => doSomething(tempDir) - * - * @return - */ - def toTemporary: ManagedResource[File] = - new ManagedResource(this)(Disposable.fileDisposer) - - //TODO: add features from https://github.com/sbt/io -} - -object File { - /** - * Get a file from a resource - * Note: Use resourceToFile instead as this may not actually always load the file - * See: http://stackoverflow.com/questions/676250/different-ways-of-loading-a-file-as-an-inputstream - * - * @param name - * @return - */ - def resource(name: String): File = - File(currentClassLoader().getResource(name)) - - /** - * Copies a resource into a file - * - * @param name - * @param destination File where resource is copied into, if not specified a temp file is created - * @return - */ - def copyResource(name: String)(destination: File = File.newTemporaryFile(prefix = name)): destination.type = { - for { - in <- resourceAsStream(name).autoClosed - out <- destination.outputStream - } in.pipeTo(out) - destination - } - - def newTemporaryDirectory(prefix: String = "", parent: Option[File] = None)(implicit attributes: Attributes = Attributes.default): File = { - parent match { - case Some(dir) => Files.createTempDirectory(dir.path, prefix, attributes: _*) - case _ => Files.createTempDirectory(prefix, attributes: _*) - } - } - - def temporaryDirectory(prefix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default): ManagedResource[File] = - newTemporaryDirectory(prefix, parent)(attributes).toTemporary - - def usingTemporaryDirectory[U](prefix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default)(f: File => U): Unit = - temporaryDirectory(prefix, parent, attributes).foreach(f) - - def newTemporaryFile(prefix: String = "", suffix: String = "", parent: Option[File] = None)(implicit attributes: Attributes = Attributes.default): File = { - parent match { - case Some(dir) => Files.createTempFile(dir.path, prefix, suffix, attributes: _*) - case _ => Files.createTempFile(prefix, suffix, attributes: _*) - } - } - - def temporaryFile[U](prefix: String = "", suffix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default): ManagedResource[File] = - newTemporaryFile(prefix, suffix, parent)(attributes).toTemporary - - def usingTemporaryFile[U](prefix: String = "", suffix: String = "", parent: Option[File] = None, attributes: Attributes = Attributes.default)(f: File => U): Unit = - temporaryFile(prefix, suffix, parent, attributes).foreach(f) - - implicit def apply(path: Path): File = - new File(path.toAbsolutePath.normalize()) - - def apply(path: String, fragments: String*): File = - Paths.get(path, fragments: _*) - - /** - * Get File to path with help of reference anchor. - * - * Anchor is used as a reference in case that path is not absolute. - * Anchor could be path to directory or path to file. - * If anchor is file, then file's parent dir is used as an anchor. - * - * If anchor itself is relative, then anchor is used together with current working directory. - * - * NOTE: If anchor is non-existing path on filesystem, then it's always treated as file, - * e.g. it's last component is removed when it is used as an anchor. - * - * @param anchor path to be used as anchor - * @param path as string - * @param fragments optional path fragments - * @return absolute, normalize path - */ - def apply(anchor: File, path: String, fragments: String*): File = { - val p = Paths.get(path, fragments: _*) - if (p.isAbsolute) { - p - } else if (anchor.isDirectory) { - anchor / p.toString - } else { - anchor.parent / p.toString - } - } - - def apply(url: URL): File = - apply(url.toURI) - - def apply(uri: URI): File = - Paths.get(uri) - - def roots: Iterable[File] = - FileSystems.getDefault.getRootDirectories.asScala.map(File.apply) - - def root: File = - roots.head - - def home: File = - Properties.userHome.toFile - - def temp: File = - Properties.tmpDir.toFile - - def currentWorkingDirectory: File = - File("") - - type Attributes = Seq[FileAttribute[_]] - object Attributes { - val default : Attributes = Seq.empty - } - - type CopyOptions = Seq[CopyOption] - object CopyOptions { - def apply(overwrite: Boolean) : CopyOptions = (if (overwrite) Seq(StandardCopyOption.REPLACE_EXISTING) else default) ++ LinkOptions.default - val default : CopyOptions = Seq.empty //Seq(StandardCopyOption.COPY_ATTRIBUTES) - } - - type Events = Seq[WatchEvent.Kind[_]] - object Events { - val all : Events = Seq(StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.ENTRY_DELETE) - val default : Events = all - } - - type OpenOptions = Seq[OpenOption] - object OpenOptions { - val append : OpenOptions = Seq(StandardOpenOption.APPEND, StandardOpenOption.CREATE) - val default : OpenOptions = Seq.empty - } - - type LinkOptions = Seq[LinkOption] - object LinkOptions { - val follow : LinkOptions = Seq.empty - val noFollow : LinkOptions = Seq(LinkOption.NOFOLLOW_LINKS) - val default : LinkOptions = follow - } - - type VisitOptions = Seq[FileVisitOption] - object VisitOptions { - val follow : VisitOptions = Seq(FileVisitOption.FOLLOW_LINKS) - val default : VisitOptions = Seq.empty - } - - type Order = Ordering[File] - object Order { - val bySize : Order = Ordering.by(_.size) - val byName : Order = Ordering.by(_.name) - val byDepth : Order = Ordering.by(_.path.getNameCount) - val byModificationTime : Order = Ordering.by(_.lastModifiedTime) - val byDirectoriesLast : Order = Ordering.by(_.isDirectory) - val byDirectoriesFirst : Order = byDirectoriesLast.reverse - val default : Order = byDirectoriesFirst.andThenBy(byName) - } - - abstract class PathMatcherSyntax(name: String) { - - /** - * Return PathMatcher from this file - * - * @param file - * @param pattern - * @param includePath If this is true, no need to include path matchers - * e.g. instead of "**//*.txt" we can simply use *.txt - * @return - */ - def apply(file: File, pattern: String, includePath: Boolean): PathMatcher = { - val escapedPath = if (includePath) escapePath(file.path.toString + file.fileSystem.getSeparator) else "" - file.fileSystem.getPathMatcher(s"$name:$escapedPath$pattern") - } - - def escapePath(path: String): String - } - object PathMatcherSyntax { - val glob: PathMatcherSyntax = new PathMatcherSyntax("glob") { - override def escapePath(path: String) = path - .replaceAllLiterally("\\", "\\\\") - .replaceAllLiterally("*", "\\*") - .replaceAllLiterally("?", "\\?") - .replaceAllLiterally("{", "\\{") - .replaceAllLiterally("}", "\\}") - .replaceAllLiterally("[", "\\[") - .replaceAllLiterally("]", "\\]") - } - - val regex: PathMatcherSyntax = new PathMatcherSyntax("regex") { - override def escapePath(path: String) = Pattern.quote(path) - } - - val default: PathMatcherSyntax = glob - } - - class RandomAccessMode private(val value: String) - object RandomAccessMode { - val read = new RandomAccessMode("r") - val readWrite = new RandomAccessMode("rw") - val readWriteMetadataSynchronous = new RandomAccessMode("rws") - val readWriteContentSynchronous = new RandomAccessMode("rwd") - } - - def numberOfOpenFileDescriptors(): Long = { - java.lang.management.ManagementFactory - .getPlatformMBeanServer - .getAttribute(new javax.management.ObjectName("java.lang:type=OperatingSystem"), "OpenFileDescriptorCount") - .asInstanceOf[Long] - } - - /** - * Implement this interface to monitor the root file - */ - trait Monitor extends AutoCloseable { - val root: File - - /** - * Dispatch a StandardWatchEventKind to an appropriate callback - * Override this if you don't want to manually handle onDelete/onCreate/onModify separately - * - * @param eventType - * @param file - */ - def onEvent(eventType: WatchEvent.Kind[Path], file: File, count: Int): Unit = eventType match { - case StandardWatchEventKinds.ENTRY_CREATE => onCreate(file, count) - case StandardWatchEventKinds.ENTRY_MODIFY => onModify(file, count) - case StandardWatchEventKinds.ENTRY_DELETE => onDelete(file, count) - } - - def start()(implicit executionContext: ExecutionContext): Unit - - def onCreate(file: File, count: Int): Unit - - def onModify(file: File, count: Int): Unit - - def onDelete(file: File, count: Int): Unit - - def onUnknownEvent(event: WatchEvent[_], count: Int): Unit - - def onException(exception: Throwable): Unit - - def stop(): Unit = close() - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala deleted file mode 100644 index f6f139f2..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/FileMonitor.scala +++ /dev/null @@ -1,72 +0,0 @@ -package better.files - -import java.nio.file._ - -import scala.concurrent.ExecutionContext -import scala.util.Try -import scala.util.control.NonFatal - -/** - * Implementation of File.Monitor - * - * @param root - * @param maxDepth - */ -abstract class FileMonitor(val root: File, maxDepth: Int) extends File.Monitor { - protected[this] val service = root.newWatchService - - def this(root: File, recursive: Boolean = true) = this(root, if (recursive) Int.MaxValue else 0) - - /** - * If watching non-directory, don't react to siblings - * @param target - * @return - */ - protected[this] def reactTo(target: File) = root.isDirectory || root.isSamePathAs(target) - - protected[this] def process(key: WatchKey) = { - val path = key.watchable().asInstanceOf[Path] - - import scala.collection.JavaConverters._ - key.pollEvents().asScala foreach { - case event: WatchEvent[Path] @unchecked => - val target: File = path.resolve(event.context()) - if (reactTo(target)) { - if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE) { - val depth = root.relativize(target).getNameCount - watch(target, (maxDepth - depth) max 0) // auto-watch new files in a directory - } - onEvent(event.kind(), target, event.count()) - } - case event => if (reactTo(path)) onUnknownEvent(event, event.count()) - } - key.reset() - } - - protected[this] def watch(file: File, depth: Int): Unit = { - def toWatch: Files = if (file.isDirectory) { - file.walk(depth).filter(f => f.isDirectory && f.exists) - } else { - when(file.exists)(file.parent).iterator // There is no way to watch a regular file; so watch its parent instead - } - try { - toWatch.foreach(f => Try[Unit](f.register(service)).recover(PartialFunction(onException)).get) - } catch { - case NonFatal(e) => onException(e) - } - } - - override def start()(implicit executionContext: ExecutionContext) = { - watch(root, maxDepth) - executionContext.execute(() => Iterator.continually(service.take()).foreach(process)) - } - - override def close() = service.close() - - // Although this class is abstract, we give provide implementations so user can choose to implement a subset of these - override def onCreate(file: File, count: Int) = {} - override def onModify(file: File, count: Int) = {} - override def onDelete(file: File, count: Int) = {} - override def onUnknownEvent(event: WatchEvent[_], count: Int) = {} - override def onException(exception: Throwable) = {} -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala deleted file mode 100644 index 322b5f40..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Implicits.scala +++ /dev/null @@ -1,324 +0,0 @@ -package better.files - -import java.io.{File => JFile, _} -import java.nio.MappedByteBuffer -import java.nio.channels.FileChannel -import java.nio.charset.Charset -import java.nio.file.{Path, PathMatcher} -import java.security.MessageDigest -import java.util.StringTokenizer -import java.util.stream.{Stream => JStream} -import java.util.zip._ - -import scala.annotation.tailrec -import scala.collection.JavaConverters._ -import scala.util.Try - -/** - * Container for various implicits - */ -trait Implicits { - - //TODO: Rename all Ops to Extensions - - implicit class StringInterpolations(sc: StringContext) { - def file(args: Any*): File = - value(args).toFile - - private[this] def value(args: Seq[Any]) = - sc.s(args: _*) - } - - implicit class StringOps(str: String) { - def toFile: File = - File(str) - - def /(child: String): File = - toFile / child - } - - implicit class FileOps(file: JFile) { - def toScala: File = - File(file.getPath) - } - - implicit class SymbolExtensions(symbol: Symbol) { - def /(child: Symbol): File = - File(symbol.name) / child - } - - implicit class IteratorExtensions[A](it: Iterator[A]) { - def withHasNext(f: => Boolean): Iterator[A] = new Iterator[A] { - override def hasNext = f && it.hasNext - override def next() = it.next() - } - } - - implicit class InputStreamOps(in: InputStream) { - def pipeTo(out: OutputStream, bufferSize: Int = defaultBufferSize): out.type = - pipeTo(out, Array.ofDim[Byte](bufferSize)) - - /** - * Pipe an input stream to an output stream using a byte buffer - */ - @tailrec final def pipeTo(out: OutputStream, buffer: Array[Byte]): out.type = { - val n = in.read(buffer) - if (n > 0) { - out.write(buffer, 0, n) - pipeTo(out, buffer) - } else { - out - } - } - - def asString(closeStream: Boolean = true, bufferSize: Int = defaultBufferSize)(implicit charset: Charset = defaultCharset): String = { - try { - new ByteArrayOutputStream(bufferSize).autoClosed - .map(pipeTo(_, bufferSize = bufferSize).toString(charset.displayName())) - } finally { - if (closeStream) in.close() - } - } - - def buffered: BufferedInputStream = - new BufferedInputStream(in) - - def buffered(bufferSize: Int): BufferedInputStream = - new BufferedInputStream(in, bufferSize) - - def gzipped: GZIPInputStream = - new GZIPInputStream(in) - - /** - * If bufferSize is set to less than or equal to 0, we don't buffer - * @param bufferSize - * @return - */ - def asObjectInputStream(bufferSize: Int = defaultBufferSize): ObjectInputStream = - new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize)) - - /** - * @param bufferSize If bufferSize is set to less than or equal to 0, we don't buffer - * Code adapted from: - * https://github.com/apache/commons-io/blob/master/src/main/java/org/apache/commons/io/input/ClassLoaderObjectInputStream.java - * - * @return A special ObjectInputStream that loads a class based on a specified ClassLoader rather than the default - * This is useful in dynamic container environments. - */ - def asObjectInputStreamUsingClassLoader(classLoader: ClassLoader = getClass.getClassLoader, bufferSize: Int = defaultBufferSize): ObjectInputStream = - new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize)) { - override protected def resolveClass(objectStreamClass: ObjectStreamClass): Class[_] = - try { - Class.forName(objectStreamClass.getName, false, classLoader) - } catch { - case _: ClassNotFoundException ⇒ super.resolveClass(objectStreamClass) - } - - override protected def resolveProxyClass(interfaces: Array[String]): Class[_] = { - try { - java.lang.reflect.Proxy.getProxyClass( - classLoader, - interfaces.map(interface => Class.forName(interface, false, classLoader)) : _* - ) - } catch { - case _: ClassNotFoundException | _: IllegalArgumentException => super.resolveProxyClass(interfaces) - } - } - } - - def reader(implicit charset: Charset = defaultCharset): InputStreamReader = - new InputStreamReader(in, charset) - - def lines(implicit charset: Charset = defaultCharset): Iterator[String] = - reader(charset).buffered.lines().toAutoClosedIterator - - def bytes: Iterator[Byte] = - in.autoClosed.flatMap(res => eofReader(res.read()).map(_.toByte)) - } - - implicit class OutputStreamOps(val out: OutputStream) { - def buffered: BufferedOutputStream = - new BufferedOutputStream(out) - - def buffered(bufferSize: Int): BufferedOutputStream = - new BufferedOutputStream(out, bufferSize) - - def gzipped: GZIPOutputStream = - new GZIPOutputStream(out) - - def writer(implicit charset: Charset = defaultCharset): OutputStreamWriter = - new OutputStreamWriter(out, charset) - - def printWriter(autoFlush: Boolean = false): PrintWriter = - new PrintWriter(out, autoFlush) - - def write(bytes: Iterator[Byte], bufferSize: Int = defaultBufferSize): out.type = { - bytes.grouped(bufferSize).foreach(buffer => out.write(buffer.toArray)) - out.flush() - out - } - - def tee(out2: OutputStream): OutputStream = - new TeeOutputStream(out, out2) - - /** - * If bufferSize is set to less than or equal to 0, we don't buffer - * @param bufferSize - * @return - */ - def asObjectOutputStream(bufferSize: Int = defaultBufferSize): ObjectOutputStream = - new ObjectOutputStream(if (bufferSize <= 0) out else buffered(bufferSize)) - } - - implicit class ReaderOps(reader: Reader) { - def buffered: BufferedReader = - new BufferedReader(reader) - - def toInputStream(implicit charset: Charset = defaultCharset): InputStream = - new ReaderInputStream(reader)(charset) - } - - implicit class BufferedReaderOps(reader: BufferedReader) { - def chars: Iterator[Char] = - reader.autoClosed.flatMap(res => eofReader(res.read()).map(_.toChar)) - - def tokens(splitter: StringSplitter = StringSplitter.default): Iterator[String] = - reader.lines().toAutoClosedIterator.flatMap(splitter.split) - } - - implicit class WriterOps(writer: Writer) { - def buffered: BufferedWriter = - new BufferedWriter(writer) - - def outputstream(implicit charset: Charset = defaultCharset): OutputStream = - new WriterOutputStream(writer)(charset) - } - - implicit class FileChannelOps(fc: FileChannel) { - def toMappedByteBuffer: MappedByteBuffer = - fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()) - } - - implicit class PathMatcherOps(matcher: PathMatcher) { - def matches(file: File)(implicit visitOptions: File.VisitOptions = File.VisitOptions.default) = - file.collectChildren(child => matcher.matches(child.path))(visitOptions) - } - - implicit class ObjectInputStreamOps(ois: ObjectInputStream) { - def deserialize[A]: A = - ois.readObject().asInstanceOf[A] - } - - implicit class ObjectOutputStreamOps(val oos: ObjectOutputStream) { - def serialize(obj: Serializable): oos.type = { - oos.writeObject(obj) - oos - } - } - - implicit class ZipOutputStreamOps(val out: ZipOutputStream) { - - /** - * Correctly set the compression level - * See: http://stackoverflow.com/questions/1206970/creating-zip-using-zip-utility - * - * @param level - * @return - */ - def withCompressionLevel(level: Int): out.type = { - out.setLevel(level) - if (level == Deflater.NO_COMPRESSION) out.setMethod(ZipOutputStream.DEFLATED) - out - } - - def add(file: File, name: String): out.type = { - val relativeName = name.stripSuffix(file.fileSystem.getSeparator) - val entryName = if (file.isDirectory) s"$relativeName/" else relativeName // make sure to end directories in ZipEntry with "/" - out.putNextEntry(new ZipEntry(entryName)) - if (file.isRegularFile) file.inputStream.foreach(_.pipeTo(out)) - out.closeEntry() - out - } - - def +=(file: File): out.type = - add(file, file.name) - } - - implicit class ZipInputStreamOps(val in: ZipInputStream) { - def mapEntries[A](f: ZipEntry => A): Iterator[A] = new Iterator[A] { - private[this] var entry = in.getNextEntry - - override def hasNext = entry != null - - override def next() = { - val result = Try(f(entry)) - Try(in.closeEntry()) - entry = in.getNextEntry - result.get - } - } - } - - implicit class ZipEntryOps(val entry: ZipEntry) { - /** - * Extract this ZipEntry under this rootDir - * - * @param rootDir directory under which this entry is extracted - * @param inputStream use this inputStream when this entry is a file - * @return the extracted file - */ - def extractTo(rootDir: File, inputStream: => InputStream): File = { - val child = rootDir.createChild(entry.getName, asDirectory = entry.isDirectory, createParents = true) - if (!entry.isDirectory) child.outputStream.foreach(inputStream.pipeTo(_)) - child - } - } - - implicit class CloseableOps[A <: AutoCloseable](resource: A) { - /** - * Lightweight automatic resource management - * Closes the resource when done e.g. - *
-      * for {
-      * in <- file.newInputStream.autoClosed
-      * } in.write(bytes)
-      * // in is closed now
-      * 
- * - * @return - */ - def autoClosed: ManagedResource[A] = - new ManagedResource(resource)(Disposable.closableDisposer) - } - - implicit class JStreamOps[A](stream: JStream[A]) { - /** - * Closes this stream when iteration is complete - * It will NOT close the stream if it is not depleted! - * - * @return - */ - def toAutoClosedIterator: Iterator[A] = - stream.autoClosed.flatMap(_.iterator().asScala) - } - - private[files] implicit class OrderingOps[A](order: Ordering[A]) { - def andThenBy(order2: Ordering[A]): Ordering[A] = - Ordering.comparatorToOrdering(order.thenComparing(order2)) - } - - implicit def stringToMessageDigest(algorithmName: String): MessageDigest = - MessageDigest.getInstance(algorithmName) - - implicit def stringToCharset(charsetName: String): Charset = - Charset.forName(charsetName) - - implicit def tokenizerToIterator(s: StringTokenizer): Iterator[String] = - Iterator.continually(s.nextToken()).withHasNext(s.hasMoreTokens) - - //implicit def posixPermissionToFileAttribute(perm: PosixFilePermission) = - // PosixFilePermissions.asFileAttribute(Set(perm)) - - private[files] implicit def pathStreamToFiles(files: JStream[Path]): Files = - files.toAutoClosedIterator.map(File.apply) -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala deleted file mode 100644 index dad5ecb8..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ManagedResource.scala +++ /dev/null @@ -1,91 +0,0 @@ -package better.files - -import java.util.concurrent.atomic.AtomicBoolean - -import scala.util.Try -import scala.util.control.NonFatal - -/** - * A typeclass to denote a disposable resource - * @tparam A - */ -trait Disposable[-A] { - def dispose(resource: A): Unit - - def disposeSilently(resource: A): Unit = { - val _ = Try(dispose(resource)) - } -} - -object Disposable { - def apply[A](disposeMethod: A => Any): Disposable[A] = new Disposable[A] { - override def dispose(resource: A) = { - val _ = disposeMethod(resource) - } - } - - implicit val closableDisposer: Disposable[AutoCloseable] = - Disposable(_.close()) - - val fileDisposer: Disposable[File] = - Disposable(_.delete(swallowIOExceptions = true)) -} - -class ManagedResource[A](resource: A)(implicit disposer: Disposable[A]) { - private[this] val isDisposed = new AtomicBoolean(false) - private[this] def disposeOnce() = if (!isDisposed.getAndSet(true)) disposer.dispose(resource) - - // This is the Scala equivalent of how javac compiles try-with-resources, - // Except that fatal exceptions while disposing take precedence over exceptions thrown previously - private[this] def disposeOnceAndThrow(e1: Throwable) = { - try { - disposeOnce() - } catch { - case NonFatal(e2) => e1.addSuppressed(e2) - case e2: Throwable => - e2.addSuppressed(e1) - throw e2 - } - throw e1 - } - - def foreach[U](f: A => U): Unit = { - val _ = map(f) - } - - def map[B](f: A => B): B = { - try { - f(resource) - } catch { - case e1: Throwable => disposeOnceAndThrow(e1) - } finally { - disposeOnce() - } - } - - def withFilter(f: A => Boolean): this.type = { - if (!f(resource)) disposeOnce() - this - } - - /** - * This handles lazy operations (e.g. Iterators) - * for which resource needs to be disposed only after iteration is done - * - * @param f - * @tparam B - * @return - */ - def flatMap[B](f: A => Iterator[B]): Iterator[B] = { - val it = f(resource) - it withHasNext { - try { - val result = it.hasNext - if (!result) disposeOnce() - result - } catch { - case e1: Throwable => disposeOnceAndThrow(e1) - } - } - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala deleted file mode 100644 index f9b792cc..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/ReaderInputStream.scala +++ /dev/null @@ -1,83 +0,0 @@ -package better.files - -import java.io.{InputStream, Reader} -import java.nio.{ByteBuffer, CharBuffer} -import java.nio.charset.{Charset, CharsetEncoder, CoderResult, CodingErrorAction} - -import scala.annotation.tailrec - -/** - * Code ported from Java to Scala: - * https://github.com/apache/commons-io/blob/c0eb48f7e83987c5ed112b82f0d651aff5149ae4/src/main/java/org/apache/commons/io/input/ReaderInputStream.java - */ -class ReaderInputStream(reader: Reader, encoder: CharsetEncoder, bufferSize: Int) extends InputStream { - - def this(reader: Reader, bufferSize: Int = defaultBufferSize)(implicit charset: Charset = defaultCharset) = - this(reader = reader, encoder = charset.newEncoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE), bufferSize = bufferSize) - - /** - * CharBuffer used as input for the decoder. It should be reasonably - * large as we read data from the underlying Reader into this buffer. - */ - private[this] val encoderIn = CharBuffer.allocate(bufferSize).flip().asInstanceOf[CharBuffer] - - /** - * ByteBuffer used as output for the decoder. This buffer can be small - * as it is only used to transfer data from the decoder to the buffer provided by the caller. - */ - private[this] val encoderOut = ByteBuffer.allocate(bufferSize>>4).flip().asInstanceOf[ByteBuffer] - - private[this] var lastCoderResult = CoderResult.UNDERFLOW - private[this] var endOfInput = false - - private[this] def fillBuffer() = { - assert(!endOfInput) - if (lastCoderResult.isUnderflow) { - val position = encoderIn.compact().position - // We don't use Reader#read(CharBuffer) here because it is more efficient to write directly to the underlying char array - // since the default implementation copies data to a temporary char array anyway - reader.read(encoderIn.array, position, encoderIn.remaining) match { - case EOF => endOfInput = true - case c => encoderIn.position(position + c) - } - encoderIn.flip() - } - lastCoderResult = encoder.encode(encoderIn, encoderOut.compact(), endOfInput) - encoderOut.flip() - } - - override def read(b: Array[Byte], off: Int, len: Int) = { - if (len < 0 || off < 0 || (off + len) > b.length) throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len) - if (len == 0) { - 0 // Always return 0 if len == 0 - } else { - var read = 0 - @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) { - if (encoderOut.hasRemaining) { - val c = encoderOut.remaining min len - encoderOut.get(b, off, c) - read += c - loop(off + c, len - c) - } else if (!endOfInput) { - fillBuffer() - loop(off, len) - } - } - loop(off, len) - if (read == 0 && endOfInput) EOF else read - } - } - - @tailrec final override def read() = { - if (encoderOut.hasRemaining) { - encoderOut.get & 0xFF - } else if (endOfInput) { - EOF - } else { - fillBuffer() - read() - } - } - - override def close() = reader.close() -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala deleted file mode 100644 index be6ebb3f..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/Scanner.scala +++ /dev/null @@ -1,183 +0,0 @@ -package better.files - -import java.io.{InputStream, LineNumberReader, Reader, StringReader} -import java.nio.charset.Charset -import java.time.format.DateTimeFormatter -import java.util.StringTokenizer - -trait Scanner extends Iterator[String] with AutoCloseable { - def lineNumber(): Int - - def next[A](implicit scan: Scannable[A]): A = scan(this) - - def nextLine(): String - - def lines: Iterator[String] = Iterator.continually(nextLine()).withHasNext(hasNext) -} - -/** - * Faster, safer and more idiomatic Scala replacement for java.util.Scanner - * See: http://codeforces.com/blog/entry/7018 - */ -object Scanner { - - def apply(str: String): Scanner = - Scanner(str, StringSplitter.default) - - def apply(str: String, splitter: StringSplitter): Scanner = - Scanner(new StringReader(str), splitter) - - def apply(reader: Reader): Scanner = - Scanner(reader, StringSplitter.default) - - def apply(reader: Reader, splitter: StringSplitter): Scanner = - Scanner(new LineNumberReader(reader.buffered), splitter) - - def apply(inputStream: InputStream)(implicit charset: Charset = defaultCharset): Scanner = - Scanner(inputStream, StringSplitter.default)(charset) - - def apply(inputStream: InputStream, splitter: StringSplitter)(implicit charset: Charset): Scanner = - Scanner(inputStream.reader(charset), splitter) - - def apply(reader: LineNumberReader, splitter: StringSplitter): Scanner = new Scanner { - private[this] val tokens = reader.tokens(splitter) - override def lineNumber() = reader.getLineNumber - override def nextLine() = reader.readLine() - override def next() = tokens.next() - override def hasNext = tokens.hasNext - override def close() = reader.close() - } - - val stdin: Scanner = Scanner(System.in) - - trait Read[A] { // TODO: Move to own subproject when this is fixed https://github.com/typelevel/cats/issues/932 - def apply(s: String): A - } - - object Read { - def apply[A](f: String => A): Read[A] = new Read[A] { - override def apply(s: String) = f(s) - } - implicit val string : Read[String] = Read(identity) - implicit val boolean : Read[Boolean] = Read(_.toBoolean) - implicit val byte : Read[Byte] = Read(_.toByte) //TODO: https://issues.scala-lang.org/browse/SI-9706 - implicit val short : Read[Short] = Read(_.toShort) - implicit val int : Read[Int] = Read(_.toInt) - implicit val long : Read[Long] = Read(_.toLong) - implicit val bigInt : Read[BigInt] = Read(BigInt(_)) - implicit val float : Read[Float] = Read(_.toFloat) - implicit val double : Read[Double] = Read(_.toDouble) - implicit val bigDecimal : Read[BigDecimal] = Read(BigDecimal(_)) - implicit def option[A: Read] : Read[Option[A]] = Read(s => when(s.nonEmpty)(implicitly[Read[A]].apply(s))) - - // Java's time readers - import java.time._ - import java.sql.{Date => SqlDate, Time => SqlTime, Timestamp => SqlTimestamp} - - implicit val duration : Read[Duration] = Read(Duration.parse(_)) - implicit val instant : Read[Instant] = Read(Instant.parse(_)) - implicit val localDateTime : Read[LocalDateTime] = Read(LocalDateTime.parse(_)) - implicit val localDate : Read[LocalDate] = Read(LocalDate.parse(_)) - implicit val monthDay : Read[MonthDay] = Read(MonthDay.parse(_)) - implicit val offsetDateTime : Read[OffsetDateTime] = Read(OffsetDateTime.parse(_)) - implicit val offsetTime : Read[OffsetTime] = Read(OffsetTime.parse(_)) - implicit val period : Read[Period] = Read(Period.parse(_)) - implicit val year : Read[Year] = Read(Year.parse(_)) - implicit val yearMonth : Read[YearMonth] = Read(YearMonth.parse(_)) - implicit val zonedDateTime : Read[ZonedDateTime] = Read(ZonedDateTime.parse(_)) - implicit val sqlDate : Read[SqlDate] = Read(SqlDate.valueOf) - implicit val sqlTime : Read[SqlTime] = Read(SqlTime.valueOf) - implicit val sqlTimestamp : Read[SqlTimestamp] = Read(SqlTimestamp.valueOf) - - /** - * Use this to create custom readers e.g. to read a LocalDate using some custom format - * val readLocalDate: Read[LocalDate] = Read.temporalQuery(format = myFormat, query = LocalDate.from) - * @param format - * @param query - * @tparam A - * @return - */ - def temporalQuery[A](format: DateTimeFormatter, query: temporal.TemporalQuery[A]): Read[A] = - Read(format.parse(_, query)) - } -} - -/** - * Implement this trait to make thing parsable - * In most cases, use Scanner.Read typeclass when you simply need access to one String token - * Use Scannable typeclass if you need access to the full scanner e.g. to detect encodings etc. - */ -trait Scannable[A] { - def apply(scanner: Scanner): A -} - -object Scannable { - def apply[A](f: Scanner => A): Scannable[A] = new Scannable[A] { - override def apply(scanner: Scanner) = f(scanner) - } - - implicit def fromRead[A](implicit read: Scanner.Read[A]): Scannable[A] = - Scannable(s => read(s.next())) - - implicit def tuple2[T1, T2](implicit t1: Scannable[T1], t2: Scannable[T2]): Scannable[(T1, T2)] = - Scannable(s => t1(s) -> t2(s)) - - implicit def iterator[A](implicit scanner: Scannable[A]): Scannable[Iterator[A]] = - Scannable(s => Iterator.continually(scanner(s)).withHasNext(s.hasNext)) -} - -trait StringSplitter { - def split(s: String): TraversableOnce[String] -} -object StringSplitter { - val default = StringSplitter.anyOf(" \t\t\n\r") - - /** - * Split string on this character - * This will return exactly 1 + n number of items where n is the number of occurence of delimiter in String s - * - * @param delimiter - * @return - */ - def on(delimiter: Char): StringSplitter = new StringSplitter { - override def split(s: String) = new Iterator[String] { - private[this] var i = 0 - private[this] var j = -1 - private[this] val c = delimiter.toInt - _next() - - private[this] def _next() = { - i = j + 1 - val k = s.indexOf(c, i) - j = if (k < 0) s.length else k - } - - override def hasNext = i <= s.length - - override def next() = { - val res = s.substring(i, j) - _next() - res - } - } - } - - /** - * Split this string using ANY of the characters from delimiters - * - * @param delimiters - * @param includeDelimiters - * @return - */ - def anyOf(delimiters: String, includeDelimiters: Boolean = false): StringSplitter = - s => new StringTokenizer(s, delimiters, includeDelimiters) - - /** - * Split string using a regex pattern - * - * @param pattern - * @return - */ - def regex(pattern: String): StringSplitter = - s => s.split(pattern, -1) -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala deleted file mode 100644 index 1da25b09..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/TeeOutputStream.scala +++ /dev/null @@ -1,23 +0,0 @@ -package better.files - -import java.io.OutputStream - -/** - * Write to multiple outputstreams at once - * If error happens on any one while doing an operation, only the last error is reported - * @param outs - */ -class TeeOutputStream(outs: OutputStream*) extends OutputStream { - override def write(b: Int) = tryAll(outs)(_.write(b)) - override def flush() = tryAll(outs)(_.flush()) - override def write(b: Array[Byte]) = tryAll(outs)(_.write(b)) - override def write(b: Array[Byte], off: Int, len: Int) = tryAll(outs)(_.write(b, off, len)) - override def close() = tryAll(outs)(_.close()) -} - -/** - * A sink outputstream similar to /dev/null - just consumes everything - */ -object NullOutputStream extends OutputStream { - override def write(b: Int) = {} -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala deleted file mode 100644 index be81f628..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/UnicodeCharset.scala +++ /dev/null @@ -1,100 +0,0 @@ -package better.files - -import java.nio.charset._ -import java.nio.{BufferOverflowException, ByteBuffer, CharBuffer} - -import scala.collection.JavaConverters._ - -/** - * A Unicode charset that handles byte-order markers - * - * @param underlyingCharset Use this charset if no known byte-order marker is detected; use this for encoding too - * @param writeByteOrderMarkers If set, write BOMs while encoding - */ -class UnicodeCharset(underlyingCharset: Charset, writeByteOrderMarkers: Boolean) - extends Charset(underlyingCharset.name(), underlyingCharset.aliases().asScala.toArray) { - override def newDecoder() = new UnicodeDecoder(underlyingCharset) - override def newEncoder() = if (writeByteOrderMarkers) new BomEncoder(underlyingCharset) else underlyingCharset.newEncoder() - override def contains(cs: Charset) = underlyingCharset.contains(cs) -} - -/** - * A Unicode decoder that uses the Unicode byte-order marker (BOM) to auto-detect the encoding - * (if none detected, falls back on the defaultCharset). This also gets around a bug in the JDK - * (http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4508058) where BOM is not consumed for UTF-8. - * See: https://github.com/pathikrit/better-files/issues/107 - * - * @param defaultCharset Use this charset if no known byte-order marker is detected - */ -class UnicodeDecoder(defaultCharset: Charset) extends CharsetDecoder(defaultCharset, 1, 1) { - import UnicodeCharset.bomTable - - private[this] var inferredCharset: Option[Charset] = None - - @annotation.tailrec - private[this] def decode(in: ByteBuffer, out: CharBuffer, candidates: Set[Charset] = Set.empty): CoderResult = { - if (isCharsetDetected) { - detectedCharset().newDecoder().decode(in, out, true) - } else if (candidates.isEmpty || !in.hasRemaining) { - inferredCharset = Some(defaultCharset) - in.rewind() - decode(in, out) - } else if (candidates.forall(c => bomTable(c).length == in.position())) { - inferredCharset = candidates.headOption.ensuring(candidates.size == 1, "Ambiguous BOMs found") - decode(in, out) - } else { - val idx = in.position() - val byte = in.get() - def isPossible(charset: Charset) = bomTable(charset).lift(idx).contains(byte) - decode(in, out, candidates.filter(isPossible)) - } - } - - override def decodeLoop(in: ByteBuffer, out: CharBuffer) = decode(in = in, out = out, candidates = bomTable.keySet) - - override def isCharsetDetected = inferredCharset.isDefined - - override def isAutoDetecting = true - - override def implReset() = inferredCharset = None - - override def detectedCharset() = inferredCharset.getOrElse(throw new IllegalStateException("Insufficient bytes read to determine charset")) -} - -/** - * Encoder that writes the BOM for this charset - * @param charset - */ -class BomEncoder(charset: Charset) extends CharsetEncoder(charset, 1, 1) { - private[this] val bom = UnicodeCharset.bomTable.getOrElse(charset, throw new IllegalArgumentException(s"$charset does not support BOMs")).toArray - private[this] var isBomWritten = false - - override def encodeLoop(in: CharBuffer, out: ByteBuffer): CoderResult = { - if (!isBomWritten) { - try { - out.put(bom) - } catch { - case _: BufferOverflowException => return CoderResult.OVERFLOW - } finally { - isBomWritten = true - } - } - charset.newEncoder().encode(in, out, true) - } - - override def implReset() = isBomWritten = false -} - -object UnicodeCharset { - private[files] val bomTable: Map[Charset, IndexedSeq[Byte]] = Map( - "UTF-8" -> IndexedSeq(0xEF, 0xBB, 0xBF), - "UTF-16BE" -> IndexedSeq(0xFE, 0xFF), - "UTF-16LE" -> IndexedSeq(0xFF, 0xFE), - "UTF-32BE" -> IndexedSeq(0x00, 0x00, 0xFE, 0xFF), - "UTF-32LE" -> IndexedSeq(0xFF, 0xFE, 0x00, 0x00) - ).collect{case (charset, bytes) if Charset.isSupported(charset) => Charset.forName(charset) -> bytes.map(_.toByte)} - .ensuring(_.nonEmpty, "No unicode charset detected") - - def apply(charset: Charset, writeByteOrderMarkers: Boolean = false): Charset = - if (bomTable.contains(charset)) new UnicodeCharset(charset, writeByteOrderMarkers) else charset -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala deleted file mode 100644 index 80cd5fc8..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/WriterOutputStream.scala +++ /dev/null @@ -1,74 +0,0 @@ -package better.files - -import java.io.{OutputStream, Writer} -import java.nio.charset.{Charset, CharsetDecoder, CodingErrorAction} -import java.nio.{ByteBuffer, CharBuffer} - -import scala.annotation.tailrec - -/** - * Code ported from Java to Scala: - * https://github.com/apache/commons-io/blob/d357d9d563c4a34fa2ab3cdc68221c851a9de4f5/src/main/java/org/apache/commons/io/output/WriterOutputStream.java - */ -class WriterOutputStream(writer: Writer, decoder: CharsetDecoder, bufferSize: Int, flushImmediately: Boolean) extends OutputStream { - - /** - * CharBuffer used as output for the decoder - */ - private[this] val decoderOut = CharBuffer.allocate(bufferSize) - - /** - * ByteBuffer used as output for the decoder. This buffer can be small - * as it is only used to transfer data from the decoder to the buffer provided by the caller. - */ - private[this] val decoderIn = ByteBuffer.allocate(bufferSize>>4) - - def this(writer: Writer, bufferSize: Int = defaultBufferSize, flushImmediately: Boolean = false)(implicit charset: Charset = defaultCharset) = - this(writer = writer, decoder = charset.newDecoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE).replaceWith("?"), bufferSize = bufferSize, flushImmediately = flushImmediately) - - override def write(b: Array[Byte], off: Int, len: Int) = { - @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) { - val c = decoderIn.remaining min len - decoderIn.put(b, off, c) - processInput(endOfInput = false) - loop(off + c, len - c) - } - loop(off, len) - if (flushImmediately) flushOutput() - } - - override def write(b: Int) = write(Array(b.toByte)) - - override def flush() = { - flushOutput() - writer.flush() - } - - override def close() = { - processInput(endOfInput = true) - flushOutput() - writer.close() - } - - private[this] def processInput(endOfInput: Boolean) = { - decoderIn.flip() - @tailrec def loop(): Unit = { - val coderResult = decoder.decode(decoderIn, decoderOut, endOfInput) - if (coderResult.isOverflow) { - flushOutput() - loop() - } else { - assert(coderResult.isUnderflow, "decoder is configured to replace malformed input and unmappable characters") - } - } - loop() - decoderIn.compact() - } - - private[this] def flushOutput(): Unit = { - if (decoderOut.position > 0) { - writer.write(decoderOut.array, 0, decoderOut.position) - val _ = decoderOut.rewind() - } - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala b/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala deleted file mode 100644 index bef8c1ed..00000000 --- a/scalalib/src/test/resource/better-files/core/src/main/scala/better/files/package.scala +++ /dev/null @@ -1,66 +0,0 @@ -package better - -import java.io.{InputStream, StreamTokenizer} -import java.nio.charset.Charset - -import scala.collection.mutable -import scala.util.{Failure, Success, Try} - -package object files extends Implicits { - - /** - * Default array buffer size - * Seems like a good value used by JDK: (see: java.io.BufferedInputStream.DEFAULT_BUFFER_SIZE) - */ - val defaultBufferSize = 8192 - - /** - * The default charset used by better-files - * Note: It uses java.net.charset.Charset.defaultCharset() in general but if the default supports byte-order markers, - * it uses a more compliant version than the JDK one (see: https://github.com/pathikrit/better-files/issues/107) - */ - val defaultCharset: Charset = - UnicodeCharset(Charset.defaultCharset()) - - val EOF = StreamTokenizer.TT_EOF - - type Files = Iterator[File] - - /** - * If bufferSize is set to less than or equal to 0, we don't buffer - * @param bufferSize - * @return - */ - def resourceAsStream(name: String, bufferSize: Int = defaultBufferSize): InputStream = - currentClassLoader().getResourceAsStream(name).buffered(bufferSize) - - // Some utils: - private[files] def newMultiMap[A, B]: mutable.MultiMap[A, B] = new mutable.HashMap[A, mutable.Set[B]] with mutable.MultiMap[A, B] - - @inline private[files] def when[A](condition: Boolean)(f: => A): Option[A] = if (condition) Some(f) else None - - @inline private[files] def repeat[U](n: Int)(f: => U): Unit = (1 to n).foreach(_ => f) - - private[files] def currentClassLoader() = Thread.currentThread().getContextClassLoader - - private[files] def eofReader(read: => Int): Iterator[Int] = Iterator.continually(read).takeWhile(_ != EOF) - - /** - * Utility to apply f on all xs skipping over errors - * Throws the last error that happened - * * - * @param xs - * @param f - * @tparam A - */ - private[files] def tryAll[A](xs: Seq[A])(f: A => Unit): Unit = { - val res = xs.foldLeft(Option.empty[Throwable]) { - case (currError, a) => - Try(f(a)) match { - case Success(_) => currError - case Failure(e) => Some(e) - } - } - res.foreach(throwable => throw throwable) - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala deleted file mode 100644 index 769cfbf9..00000000 --- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/CommonSpec.scala +++ /dev/null @@ -1,15 +0,0 @@ -package better.files - -import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers} - -import scala.concurrent.duration._ -import scala.language.postfixOps -import scala.util.Properties.{isLinux, isMac} - -trait CommonSpec extends FlatSpec with BeforeAndAfterEach with Matchers { - val isCI = sys.env.get("CI").exists(_.toBoolean) - - val isUnixOS = isLinux || isMac - - def sleep(t: FiniteDuration = 2 second) = Thread.sleep(t.toMillis) -} diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala deleted file mode 100644 index 36379eec..00000000 --- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileMonitorSpec.scala +++ /dev/null @@ -1,61 +0,0 @@ -package better.files - -import scala.concurrent.duration._ -import scala.concurrent.ExecutionContext.Implicits.global -import scala.language.postfixOps - -class FileMonitorSpec extends CommonSpec { - "file watcher" should "watch single files" in { - assume(isCI) - val file = File.newTemporaryFile(suffix = ".txt").writeText("Hello world") - - var log = List.empty[String] - def output(msg: String) = synchronized { - println(msg) - log = msg :: log - } - /***************************************************************************/ - val watcher = new FileMonitor(file) { - override def onCreate(file: File, count: Int) = output(s"$file got created $count time(s)") - override def onModify(file: File, count: Int) = output(s"$file got modified $count time(s)") - override def onDelete(file: File, count: Int) = output(s"$file got deleted $count time(s)") - } - watcher.start() - /***************************************************************************/ - sleep(5 seconds) - file.writeText("hello world"); sleep() - file.clear(); sleep() - file.writeText("howdy"); sleep() - file.delete(); sleep() - sleep(5 seconds) - val sibling = (file.parent / "t1.txt").createIfNotExists(); sleep() - sibling.writeText("hello world"); sleep() - sleep(20 seconds) - - log.size should be >= 2 - log.exists(_ contains sibling.name) shouldBe false - log.forall(_ contains file.name) shouldBe true - } - - ignore should "watch directories to configurable depth" in { - assume(isCI) - val dir = File.newTemporaryDirectory() - (dir/"a"/"b"/"c"/"d"/"e").createDirectories() - var log = List.empty[String] - def output(msg: String) = synchronized(log = msg :: log) - - val watcher = new FileMonitor(dir, maxDepth = 2) { - override def onCreate(file: File, count: Int) = output(s"Create happened on ${file.name} $count times") - } - watcher.start() - - sleep(5 seconds) - (dir/"a"/"b"/"t1").touch().writeText("hello world"); sleep() - (dir/"a"/"b"/"c"/"d"/"t1").touch().writeText("hello world"); sleep() - sleep(10 seconds) - - withClue(log) { - log.size shouldEqual 1 - } - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala deleted file mode 100644 index f197575a..00000000 --- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/FileSpec.scala +++ /dev/null @@ -1,549 +0,0 @@ -package better.files - -import java.nio.file.{FileAlreadyExistsException, FileSystems, Files => JFiles} - -import better.files.Dsl._ -import better.files.File.{home, root} - -import scala.language.postfixOps -import scala.util.Try - -class FileSpec extends CommonSpec { - - /** try to cope with windows, which will return e.g. c:\ as root */ - val rootStr = FileSystems.getDefault.getRootDirectories.iterator().next().toString - import java.io.File.{separator, separatorChar} - - /** - * Helper for unix -> windows path references (as strings). - * - * @param path as unix path - * @return path in native format - */ - def unixToNative(path: String): String = { - if (isUnixOS) { - path - } else { - path - .replaceFirst("^/", rootStr.replaceAllLiterally("\\", "\\\\")) // we must escape '\' in C:\ - .replaceAllLiterally("/", separator) - } - } - - var testRoot: File = _ //TODO: Get rid of mutable test vars - var fa: File = _ - var a1: File = _ - var a2: File = _ - var t1: File = _ - var t2: File = _ - var t3: File = _ - var fb: File = _ - var b1: File = _ - var b2: File = _ - - /** - * Setup the following directory structure under root - * /a - * /a1 - * /a2 - * a21.txt - * a22.txt - * /b - * b1/ --> ../a1 - * b2.txt --> ../a2/a22.txt - */ - - override def beforeEach() = { - testRoot = File.newTemporaryDirectory("better-files") - fa = testRoot/"a" - a1 = testRoot/"a"/"a1" - a2 = testRoot/"a"/"a2" - t1 = testRoot/"a"/"a1"/"t1.txt" - t2 = testRoot/"a"/"a1"/"t2.txt" - t3 = testRoot/"a"/"a1"/"t3.scala.txt" - fb = testRoot/"b" - b1 = testRoot/"b"/"b1" - b2 = testRoot/'b/"b2.txt" - Seq(a1, a2, fb) foreach mkdirs - Seq(t1, t2) foreach touch - } - - override def afterEach() = { - val _ = rm(testRoot) - } - - override def withFixture(test: NoArgTest) = { - //val before = File.numberOfOpenFileDescriptors() - val result = super.withFixture(test) - //val after = File.numberOfOpenFileDescriptors() - //assert(before == after, s"Resource leakage detected in $test") - result - } - - "files" can "be instantiated" in { - import java.io.{File => JFile} - - val f = File("/User/johndoe/Documents") // using constructor - val f1: File = file"/User/johndoe/Documents" // using string interpolator - val f2: File = "/User/johndoe/Documents".toFile // convert a string path to a file - val f3: File = new JFile("/User/johndoe/Documents").toScala // convert a Java file to Scala - val f4: File = root/"User"/"johndoe"/"Documents" // using root helper to start from root - //val f5: File = `~` / "Documents" // also equivalent to `home / "Documents"` - val f6: File = "/User"/"johndoe"/"Documents" // using file separator DSL - val f7: File = home/"Documents"/"presentations"/`..` // Use `..` to navigate up to parent - val f8: File = root/"User"/"johndoe"/"Documents"/ `.` - val f9: File = File(f.uri) - val f10: File = File("../a") // using a relative path - Seq(f, f1, f2, f3, f4,/* f5,*/ f6, f7, f8, f9, f10) foreach {f => - f.pathAsString should not include ".." - } - - root.toString shouldEqual rootStr - home.toString.count(_ == separatorChar) should be > 1 - (root/"usr"/"johndoe"/"docs").toString shouldEqual unixToNative("/usr/johndoe/docs") - Seq(f, f1, f2, f4, /*f5,*/ f6, f8, f9).map(_.toString).toSet shouldBe Set(f.toString) - } - - it can "be instantiated with anchor" in { - // testRoot / a / a1 / t1.txt - val basedir = a1 - File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) - File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) - - File(basedir, "rel/path/to/loc").toString should be (unixToNative(basedir.toString + "/rel/path/to/loc")) - File(basedir, "../rel/path/to/loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc")) - File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc")) - - val baseref = t1 - File(baseref, "/abs/path/to/loc").toString should be (unixToNative("/abs/path/to/loc")) - File(baseref, "/abs", "path", "to", "loc").toString should be (unixToNative("/abs/path/to/loc")) - - File(baseref, "rel/path/to/loc").toString should be (unixToNative(a1.toString + "/rel/path/to/loc")) - File(baseref, "../rel/path/to/loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc")) - File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(fa.toString + "/rel/path/to/loc")) - } - - it can "be instantiated with non-existing abs anchor" in { - val anchorStr = "/abs/to/nowhere" - val anchorStr_a = anchorStr + "/a" - val basedir = File(anchorStr_a + "/last") - - File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) - File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) - - File(basedir, "rel/path/to/loc").toString should be (unixToNative(anchorStr_a + "/rel/path/to/loc")) - File(basedir, "../rel/path/to/loc").toString should be (unixToNative(anchorStr + "/rel/path/to/loc")) - File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(anchorStr + "/rel/path/to/loc")) - } - - it can "be instantiated with non-existing relative anchor" in { - val relAnchor = File("rel/anc/b/last") - val basedir = relAnchor - - File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) - File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) - - File(basedir, "rel/path/to/loc").toString should be (unixToNative(File("rel/anc/b").toString + "/rel/path/to/loc")) - File(basedir, "../rel/path/to/loc").toString should be (unixToNative(File("rel/anc").toString + "/rel/path/to/loc")) - File(basedir, "../", "rel", "path", "to", "loc").toString should be (unixToNative(File("rel/anc").toString + "/rel/path/to/loc")) - } - - it should "do basic I/O" in { - t1 < "hello" - t1.contentAsString shouldEqual "hello" - t1.appendLine() << "world" - (t1!) shouldEqual String.format("hello%nworld%n") - t1.chars.toStream should contain theSameElementsInOrderAs String.format("hello%nworld%n").toSeq - "foo" `>:` t1 - "bar" >>: t1 - t1.contentAsString shouldEqual String.format("foobar%n") - t1.appendLines("hello", "world") - t1.contentAsString shouldEqual String.format("foobar%nhello%nworld%n") - t2.writeText("hello").appendText("world").contentAsString shouldEqual "helloworld" - - (testRoot/"diary") - .createIfNotExists() - .appendLine() - .appendLines("My name is", "Inigo Montoya") - .printLines(Iterator("x", 1)) - .lines.toSeq should contain theSameElementsInOrderAs Seq("", "My name is", "Inigo Montoya", "x", "1") - } - - it should "handle BOM" in { - val lines = Seq("Line 1", "Line 2") - val expectedContent = lines.mkString(start = "", sep = "\n", end = "\n") - File.temporaryFile() foreach {file => - file.appendLines(lines: _*)(charset = UnicodeCharset("UTF-8", writeByteOrderMarkers = true)) - file.contentAsString(charset = "UTF-8") should not equal expectedContent - file.contentAsString shouldEqual expectedContent - } - } - -// TODO: Do not depend on self-referential tests -// it should "glob" in { -// assume(isCI) -// a1.glob("*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt") -// //a1.glob("*.txt").map(_.name).toSeq shouldEqual Seq("t1.txt", "t2.txt") -// testRoot.glob("**/*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt") -// val path = testRoot.path.toString.ensuring(testRoot.path.isAbsolute) -// File(path).glob("**/*.{txt}").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt") -// ("benchmarks"/"src").glob("**/*.{scala,java}").map(_.name).toSeq.sorted shouldEqual Seq("ArrayBufferScanner.java", "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") -// ("benchmarks"/"src").glob("**/*.{scala}").map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") -// ("benchmarks"/"src").glob("**/*.scala").map(_.name).toSeq.sorted shouldEqual Seq("Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") -// ("benchmarks"/"src").listRecursively.filter(_.extension.contains(".scala")).map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") -// ls("core"/"src"/"test") should have length 1 -// ("core"/"src"/"test").walk(maxDepth = 1) should have length 2 -// ("core"/"src"/"test").walk(maxDepth = 0) should have length 1 -// ("core"/"src"/"test").walk() should have length (("core"/"src"/"test").listRecursively.length + 1L) -// ls_r("core"/"src"/"test") should have length 8 -// } - - it should "support names/extensions" in { - assume(isCI) - fa.extension shouldBe None - fa.nameWithoutExtension shouldBe fa.name - t1.extension shouldBe Some(".txt") - t1.extension(includeDot = false) shouldBe Some("txt") - t3.extension shouldBe Some(".txt") - t3.extension(includeAll = true) shouldBe Some(".scala.txt") - t3.extension(includeDot = false, includeAll = true) shouldBe Some("scala.txt") - t1.name shouldBe "t1.txt" - t1.nameWithoutExtension shouldBe "t1" - t1.changeExtensionTo(".md").name shouldBe "t1.md" - (t1 < "hello world").changeExtensionTo(".txt").name shouldBe "t1.txt" - t1.contentType shouldBe Some("text/plain") - ("src" / "test").toString should include ("better-files") - (t1 == t1.toString) shouldBe false - (t1.contentAsString == t1.toString) shouldBe false - (t1 == t1.contentAsString) shouldBe false - t1.root shouldEqual fa.root - file"/tmp/foo.scala.html".extension shouldBe Some(".html") - file"/tmp/foo.scala.html".nameWithoutExtension shouldBe "foo" - file"/tmp/foo.scala.html".nameWithoutExtension(includeAll = false) shouldBe "foo.scala" - root.name shouldBe "" - } - - it should "hide/unhide" in { - t1.isHidden shouldBe false - } - - it should "support parent/child" in { - fa isChildOf testRoot shouldBe true - testRoot isChildOf root shouldBe true - root isChildOf root shouldBe true - fa isChildOf fa shouldBe true - b2 isChildOf b2 shouldBe false - b2 isChildOf b2.parent shouldBe true - root.parent shouldBe null - } - - it should "support siblings" in { - (file"/tmp/foo.txt" sibling "bar.txt").pathAsString shouldBe unixToNative("/tmp/bar.txt") - fa.siblings.toList.map(_.name) shouldBe List("b") - fb isSiblingOf fa shouldBe true - } - - it should "support sorting" in { - testRoot.list.toSeq.sorted(File.Order.byName) should not be empty - testRoot.list.toSeq.max(File.Order.bySize).isEmpty shouldBe false - Seq(fa, fb).contains(testRoot.list.toSeq.min(File.Order.byDepth)) shouldBe true - sleep() - t2.appendLine("modified!") - a1.list.toSeq.min(File.Order.byModificationTime) shouldBe t1 - testRoot.list.toSeq.sorted(File.Order.byDirectoriesFirst) should not be empty - } - - it must "have .size" in { - fb.isEmpty shouldBe true - t1.size shouldBe 0 - t1.writeText("Hello World") - t1.size should be > 0L - testRoot.size should be > (t1.size + t2.size) - } - - it should "set/unset permissions" in { - assume(isCI) - import java.nio.file.attribute.PosixFilePermission - //an[UnsupportedOperationException] should be thrownBy t1.dosAttributes - t1.permissions()(PosixFilePermission.OWNER_EXECUTE) shouldBe false - - chmod_+(PosixFilePermission.OWNER_EXECUTE, t1) - t1.testPermission(PosixFilePermission.OWNER_EXECUTE) shouldBe true - t1.permissionsAsString shouldBe "rwxrw-r--" - - chmod_-(PosixFilePermission.OWNER_EXECUTE, t1) - t1.isOwnerExecutable shouldBe false - t1.permissionsAsString shouldBe "rw-rw-r--" - } - - it should "support equality" in { - fa shouldEqual (testRoot/"a") - fa shouldNot equal (testRoot/"b") - val c1 = fa.md5 - fa.md5 shouldEqual c1 - t1 < "hello" - t2 < "hello" - (t1 == t2) shouldBe false - (t1 === t2) shouldBe true - t2 < "hello world" - (t1 == t2) shouldBe false - (t1 === t2) shouldBe false - fa.md5 should not equal c1 - } - - it should "create if not exist directory structures" in { - File.usingTemporaryDirectory() {dir => - val file = dir / "a" / "b" / "c.txt" - assert(file.notExists) - assert(file.parent.notExists) - file.createIfNotExists(createParents = true) - assert(file.exists) - assert(file.parent.exists) - file.writeText("Hello world") - assert(file.contentAsString === "Hello world") - } - } - - it should "treat symlinks transparently in convenience methods" in { - File.usingTemporaryDirectory() {dir => - val realDir = dir / "a" - val dirSymlink = dir / "b" - realDir.createDirectory() - JFiles.createSymbolicLink(dirSymlink.path, realDir.path) - dirSymlink.createDirectories() - a[FileAlreadyExistsException] should be thrownBy dirSymlink.createDirectories()(linkOptions = File.LinkOptions.noFollow) - /*a[FileAlreadyExistsException] shouldNot be thrownBy*/ dirSymlink.createDirectories() - } - } - - it should "support chown/chgrp" in { - fa.ownerName should not be empty - fa.groupName should not be empty - a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chown("hitler", fa) - //a[java.nio.file.FileSystemException] should be thrownBy chown("root", fa) - a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chgrp("cool", fa) - //a[java.nio.file.FileSystemException] should be thrownBy chown("admin", fa) - //fa.chown("nobody").chgrp("nobody") - stat(t1) shouldBe a[java.nio.file.attribute.PosixFileAttributes] - } - - it should "detect file locks" in { - File.temporaryFile() foreach {file => - def lockInfo() = file.isReadLocked() -> file.isWriteLocked() - // TODO: Why is file.isReadLocked() should be false? - lockInfo() shouldBe (true -> false) - val channel = file.newRandomAccess(File.RandomAccessMode.readWrite).getChannel - val lock = channel.tryLock() - lockInfo() shouldBe (true -> true) - lock.release() - channel.close() - lockInfo() shouldBe (true -> false) - } - } - - it should "support ln/cp/mv" in { - val magicWord = "Hello World" - t1 writeText magicWord - // link - // to relative target - val b0 = b1.sibling("b0") - java.nio.file.Files.createSymbolicLink(b0.path, java.nio.file.Paths.get("b1")) - b0.symbolicLink should not be empty - b0.symbolicLink.get.path.isAbsolute shouldBe false - // to absolute target - b1.linkTo(a1, symbolic = true) - ln_s(b2, t2) - (b1 / "t1.txt").contentAsString shouldEqual magicWord - // copy - b2.contentAsString shouldBe empty - t1.md5 should not equal t2.md5 - a[java.nio.file.FileAlreadyExistsException] should be thrownBy (t1 copyTo t2) - t1.copyTo(t2, overwrite = true) - t1.exists shouldBe true - t1.md5 shouldEqual t2.md5 - b2.contentAsString shouldEqual magicWord - // rename - t2.name shouldBe "t2.txt" - t2.exists shouldBe true - val t3 = t2 renameTo "t3.txt" - t3.name shouldBe "t3.txt" - t2.exists shouldBe false - t3.exists shouldBe true - // move - t3 moveTo t2 - t2.exists shouldBe true - t3.exists shouldBe false - } - - it should "support creating hard links with ln" in { - assume(isUnixOS) - val magicWord = "Hello World" - t1 writeText magicWord - t1.linkTo(t3, symbolic = false) - (a1 / "t3.scala.txt").contentAsString shouldEqual magicWord - } - - it should "support custom charset" in { - import java.nio.charset.Charset - t1.writeText("你好世界")(charset = "UTF8") - t1.contentAsString(charset = "ISO-8859-1") should not equal "你好世界" - t1.contentAsString(charset = "UTF8") shouldEqual "你好世界" - val c1 = md5(t1) - val c2 = t1.overwrite("你好世界")(File.OpenOptions.default, Charset.forName("ISO-8859-1")).md5 - c1 should not equal c2 - c2 shouldEqual t1.checksum("md5") - } - - it should "support hashing algos" in { - implicit val charset = java.nio.charset.StandardCharsets.UTF_8 - t1.writeText("") - md5(t1) shouldEqual "D41D8CD98F00B204E9800998ECF8427E" - sha1(t1) shouldEqual "DA39A3EE5E6B4B0D3255BFEF95601890AFD80709" - sha256(t1) shouldEqual "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" - sha512(t1) shouldEqual "CF83E1357EEFB8BDF1542850D66D8007D620E4050B5715DC83F4A921D36CE9CE47D0D13C5D85F2B0FF8318D2877EEC2F63B931BD47417A81A538327AF927DA3E" - } - - it should "compute correct checksum for non-zero length string" in { - implicit val charset = java.nio.charset.StandardCharsets.UTF_8 - t1.writeText("test") - md5(t1) shouldEqual "098F6BCD4621D373CADE4E832627B4F6" - sha1(t1) shouldEqual "A94A8FE5CCB19BA61C4C0873D391E987982FBBD3" - sha256(t1) shouldEqual "9F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A08" - sha512(t1) shouldEqual "EE26B0DD4AF7E749AA1A8EE3C10AE9923F618980772E473F8819A5D4940E0DB27AC185F8A0E1D5F84F88BC887FD67B143732C304CC5FA9AD8E6F57F50028A8FF" - } - - it should "copy" in { - (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World") - (fb / "t5" / "t5.txt").createIfNotExists(createParents = true).writeText("Scala Awesome") - (fb / "t5" / "t3").notExists shouldBe true - cp(fb / "t3", fb / "t5") - (fb / "t3").exists shouldBe true - (fb / "t5" / "t3").exists shouldBe true - (fb / "t5" / "t5.txt").contentAsString shouldEqual "Scala Awesome" - assert((fb / "t3") === (fb / "t5" / "t3")) - } - - it should "move" in { - (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World") - mv(fb / "t3", fb / "t5") - (fb / "t5" / "t4.txt").contentAsString shouldEqual "Hello World" - (fb / "t3").notExists shouldBe true - } - - it should "delete" in { - fb.exists shouldBe true - fb.delete() - fb.exists shouldBe false - } - - it should "touch" in { - (fb / "z1").exists shouldBe false - (fb / "z1").isEmpty shouldBe true - (fb / "z1").touch() - (fb / "z1").exists shouldBe true - (fb / "z1").isEmpty shouldBe true - Thread.sleep(1000) - (fb / "z1").lastModifiedTime.getEpochSecond should be < (fb / "z1").touch().lastModifiedTime.getEpochSecond - } - - it should "md5" in { - val h1 = t1.hashCode - val actual = (t1 < "hello world").md5 - val h2 = t1.hashCode - h1 shouldEqual h2 - import scala.sys.process._ - val expected = Try(s"md5sum ${t1.path}" !!) getOrElse (s"md5 ${t1.path}" !!) - expected.toUpperCase should include (actual) - actual should not equal h1 - } - - it should "support file in/out" in { - t1 < "hello world" - for { - in <- t1.inputStream - out <- t2.outputStream - } in.pipeTo(out) - t2.contentAsString shouldEqual "hello world" - t2.newInputStream.asString() shouldEqual "hello world" - } - - it should "zip/unzip directories" in { - t1.writeText("hello world") - val zipFile = testRoot.zip() - zipFile.size should be > 100L - zipFile.name should endWith (".zip") - - def test(output: File) = { - (output/"a"/"a1"/"t1.txt").contentAsString shouldEqual "hello world" - output === testRoot shouldBe true - (output/"a"/"a1"/"t1.txt").overwrite("hello") - (output !== testRoot) shouldBe true - } - - test(zipFile.unzip()) - test(zipFile.streamedUnzip()) - } - - it should "zip/unzip single files" in { - t1.writeText("hello world") - val zipFile = t1.zip() - zipFile.size should be > 100L - zipFile.name should endWith (".zip") - val destination = unzip(zipFile)(File.newTemporaryDirectory()) - (destination/"t1.txt").contentAsString shouldEqual "hello world" - } - - it should "gzip" in { - for { - writer <- (testRoot / "test.gz").newOutputStream.buffered.gzipped.writer.buffered.autoClosed - } writer.write("Hello world") - - (testRoot / "test.gz").inputStream.map(_.buffered.gzipped.buffered.lines.toSeq) shouldEqual Seq("Hello world") - } - - it should "read bytebuffers" in { - t1.writeText("hello world") - for { - fileChannel <- t1.newFileChannel.autoClosed - } fileChannel.toMappedByteBuffer.remaining() shouldEqual t1.bytes.length - - (t2 writeBytes t1.bytes).contentAsString shouldEqual t1.contentAsString - } - - it should "convert readers to inputstreams and writers to outputstreams" in { - File.temporaryFile() foreach {f => - val text = List.fill(10000)("hello world") - for { - writer <- f.bufferedWriter - out <- writer.outputstream.autoClosed - } out.write(text.mkString("\n").getBytes) - val t = f.bufferedReader.flatMap(_.toInputStream.lines) - t.toList shouldEqual text - } - } - - it should "serialize/deserialize" in { - class Person(val name: String, val age: Int) extends Serializable - val p1 = new Person("Chris", 34) - - File.temporaryFile() foreach {f => //serialization round-trip test - assert(f.isEmpty) - f.writeSerialized(p1) - assert(f.nonEmpty) - val p2: Person = f.readDeserialized[Person] - assert(p1.name === p2.name) - assert(p1.age === p2.age) - - val p3 = f.inputStream.map(_.asObjectInputStreamUsingClassLoader().deserialize[Person]) - assert(p3.name === p2.name) - assert(p3.age === p2.age) - } - } - - it should "count number of open file descriptors" in { - val expected = java.lang.management.ManagementFactory.getOperatingSystemMXBean - .asInstanceOf[com.sun.management.UnixOperatingSystemMXBean] - .getOpenFileDescriptorCount - assert((File.numberOfOpenFileDescriptors() - expected).abs <= 10) - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala deleted file mode 100644 index 1acf7d7b..00000000 --- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/GlobSpec.scala +++ /dev/null @@ -1,360 +0,0 @@ -package better.files - -import better.files.Dsl._ - -import java.io.File.separator - -import org.scalatest.BeforeAndAfterAll - -class GlobSpec extends CommonSpec with BeforeAndAfterAll { - var testDir: File = _ - var globTree: File = _ - var specialTree: File = _ - - var regexWildcardPath: File = _ - var globWildcardPath: File = _ - // - // Test target for glob - // - // tests/ - // ├── globtree - // │   ├── a - // │   │   ├── a2 - // │   │   │   ├── a2.txt - // │   │   │   └── x.txt - // │   │   ├── a.not - // │   │   ├── a.txt - // │   │   └── x.txt - // │   ├── b - // │   │   ├── a - // │   │   │   └── ba.txt - // │   │   └── b.txt - // │   ├── c - // │   │   ├── c.txt - // │   │   └── x.txt - // │   ├── empty - // │   ├── link_to_a -> a - // │   ├── one.txt - // │   ├── readme.md - // │   ├── three.txt - // │   └── two.txt - // └── special - // ├── .* - // │   └── a - // │ └── a.txt - // └── ** - // └── a - // └── a.txt - // - override def beforeAll() = { - testDir = File.newTemporaryDirectory("glob-tests") - globTree = testDir / "globtree" - - mkdir(globTree) - val a = mkdir(globTree / "a" ) - mkdir(globTree / "a" / "a2") - touch(globTree / "a" / "a2" / "a2.txt") - touch(globTree / "a" / "a2" / "x.txt") - touch(globTree / "a" / "a.not") - touch(globTree / "a" / "a.txt") - touch(globTree / "a" / "x.txt") - - mkdir(globTree / "b" ) - mkdir(globTree / "b" / "a") - touch(globTree / "b" / "a" / "ba.txt") - touch(globTree / "b" / "b.txt") - - mkdir(globTree / "c" ) - touch(globTree / "c" / "c.txt") - touch(globTree / "c" / "x.txt") - - mkdir(globTree / "empty" ) - - if (isUnixOS) { - ln_s(globTree / "link_to_a", a) - } - - touch(globTree / "one.txt") - touch(globTree / "two.txt") - touch(globTree / "three.txt") - touch(globTree / "readme.md") - - // Special target with path name components as wildcards - specialTree = testDir / "special" - - // Windows does not support '*' in file names - if (isUnixOS) { - // regex - mkdir(specialTree) - regexWildcardPath = mkdir(specialTree / ".*") - mkdir(specialTree / ".*" / "a") - touch(specialTree / ".*" / "a" / "a.txt") - - // glob - globWildcardPath = mkdir(specialTree / "**") - mkdir(specialTree / "**" / "a") - touch(specialTree / "**" / "a" / "a.txt") - } - - () - } - - override def afterAll() = { - val _ = rm(testDir) - } - - /** - * Helper in case something goes wrong... - */ - private def debugPaths(files: Seq[File]): String = { - files - .sortBy(_.path) - .map(files => s"PATH: ${files.toString}") - .mkString(s"SIZE: ${files.size}\n", "\n", "\n") - } - - /** - * Verity if candidates are equal with references. - * Does not accept empty sets, use assert(paths.isEmpty) for that. - * - * @param pathsIt candidates - * @param refPaths references - * @param baseDir basedir to for creating full path of references - */ - private def verify(pathsIt: Files, refPaths: Seq[String], baseDir: File) = { - val paths = pathsIt.toSeq - val refs = refPaths - .map(refPath => baseDir/refPath) - .sortBy(_.path) - - withClue("Result: " + debugPaths(paths) + "Reference: " + debugPaths(refs)) { - assert(paths.length === refPaths.length) - assert(paths.nonEmpty) - paths.sortBy(_.path).zip(refs).foreach({case (path, refPath) => assert(path === refPath)}) - } - } - - "glob" should "match plain file (e.g. 'file.ext')" in { - val refPaths = Seq( - "one.txt" - ) - val paths = globTree.glob("one.txt") - verify(paths, refPaths, globTree) - } - it should "match path without glob (e.g. 'sub/dir/file.ext')" in { - val refPaths = Seq( - "a/a.txt" - ) - val paths = globTree.glob("a/a.txt") - verify(paths, refPaths, globTree) - } - - it should "match file-glob (e.g. '*.ext')" in { - val refPaths = Seq( - "one.txt", - "two.txt", - "three.txt" - ) - val paths = globTree.glob("*.txt") - verify(paths, refPaths, globTree) - assert(globTree.glob("*.txt", includePath = false)(File.PathMatcherSyntax.glob).isEmpty) - } - - it should "match fixed sub dir and file-glob (e.g. '**/subdir/*.ext')" in { - // TODO: DOC: why top level 'a' is not matched - val refPaths = List( - "b/a/ba.txt" - ) - val paths = globTree.glob("**/a/*.txt") - verify(paths, refPaths, globTree) - } - - it should "use parent dir for matching (e.g. plain 'subdir/*.ext')" in { - // e.g. check that b nor c are matched, nor b/a - val refPaths = Seq( - "a/a.txt", - "a/x.txt" - ) - val paths = globTree.glob("a/*.txt") - verify(paths, refPaths, globTree) - } - - it should "match sub-directory glob with plain file (e.g. 'subdir/*/file.ext')" in { - val refPaths = Seq( - "a/x.txt", - "c/x.txt" - ) - val paths = testDir.glob("globtree/*/x.txt") - verify(paths, refPaths, globTree) - } - - it should "match sub-directory glob with file-glob (e.g. 'subdir/*/*.ext')" in { - val refPaths = Seq( - "a/a.txt", - "a/x.txt", - "c/c.txt", - "c/x.txt", - "b/b.txt" - ) - val paths = testDir.glob("globtree/*/*.txt") - verify(paths, refPaths, globTree) - } - - it should "match deep sub-directory glob with plain file (e.g. 'subdir/**/file.ext')" in { - val refPaths = Seq( - "a/a2/x.txt", - "a/x.txt", - "c/x.txt" - ) - val p1s = globTree.glob("**/x.txt") - verify(p1s, refPaths, globTree) - - val p2s = testDir.glob("globtree/**/x.txt") - verify(p2s, refPaths, globTree) - } - - it should "match deep sub-directory glob with file-glob (e.g. 'subdir/**/*.ext')" in { - val refPaths = Seq( - "a/a.txt", - "a/x.txt", - "a/a2/x.txt", - "a/a2/a2.txt", - "c/x.txt", - "c/c.txt", - "b/b.txt", - "b/a/ba.txt" - ) - val p1s = globTree.glob("**/*.txt") - verify(p1s, refPaths, globTree) - - val p2s = testDir.glob("globtree/**/*.txt") - verify(p2s, refPaths, globTree) - } - - it should "match deep file-glob (e.g. 'subdir/**.ext')" in { - val refPaths = Seq( - "one.txt", - "two.txt", - "three.txt", - "a/a.txt", - "a/x.txt", - "a/a2/x.txt", - "a/a2/a2.txt", - "b/a/ba.txt", - "b/b.txt", - "c/x.txt", - "c/c.txt" - ) - val p1s = globTree.glob("**.txt") - verify(p1s, refPaths, globTree) - - val p2s = testDir.glob("globtree/**.txt") - verify(p2s, refPaths, globTree) - } - - it should "match everything (e.g. 'subdir/**')" in { - val refPaths = List( - "a", - "a/a.not", - "a/a.txt", - "a/a2", - "a/a2/a2.txt", - "a/a2/x.txt", - "a/x.txt", - "b", - "b/a", - "b/a/ba.txt", - "b/b.txt", - "c", - "c/c.txt", - "c/x.txt", - "empty", - "one.txt", - "readme.md", - "three.txt", - "two.txt") ++ - when(isUnixOS)("link_to_a") - - val paths = testDir.glob("globtree/**") - verify(paths, refPaths, globTree) - } - - it should "work with links (e.g. 'link_to_a/**.txt')" in { - assume(isUnixOS) - val refPaths = Seq( - "a/a.txt", - "a/x.txt", - "a/a2/x.txt", - "a/a2/a2.txt" - ) - - // TODO: DOC: File behaviour, links are resolved (abs + normalized path) - - val p1s = globTree.glob("link_to_a/**.txt")(visitOptions = File.VisitOptions.follow) - verify(p1s, refPaths, globTree) - - val p2s = globTree.glob("link_to_a/**.txt").toSeq - assert(p2s.isEmpty) - - val p3s = testDir.glob("globtree/link_to_a/**.txt")(visitOptions = File.VisitOptions.follow) - verify(p3s, refPaths, globTree) - - val p4s = testDir.glob("globtree/link_to_a/**.txt") - assert(p4s.isEmpty) - } - - it should "not use dir name as wildcard (e.g. dirname is **)" in { - assume(isUnixOS) - val d = globWildcardPath // "path" / "with" / "**" - val paths = d.glob("*.txt") - - assert(paths.isEmpty) - } - - "Regex" should "match all txt-files under sub-directory (e.g. '.*/.*\\\\.txt')" in { - val refPaths = Seq( - "a/a.txt", - "a/x.txt", - "a/a2/x.txt", - "a/a2/a2.txt", - "c/x.txt", - "c/c.txt", - "b/b.txt", - "b/a/ba.txt" - ) - val paths = globTree.glob(".*" + separator + ".*\\.txt")(File.PathMatcherSyntax.regex) - - verify(paths, refPaths, globTree) - } - - it should "match the same if `Regex` is used" in { - val pattern = (".*" + separator + ".*\\.txt").r - - val pathsGlob = globTree.glob(pattern.regex)(File.PathMatcherSyntax.regex) - val pathsRegex = globTree.globRegex(pattern) - - verify(pathsRegex, pathsGlob.toSeq.map(_.toString), globTree) - - } - - it should "use parent dir for matching (e.g. plain 'subdir/*.ext' instead of '**/subdir/*.ext)" in { - // e.g. check that b nor c are matched, nor b/a - val refPaths = Seq( - "a/a.txt", - "a/x.txt", - "a/a2/a2.txt", - "a/a2/x.txt" - ) - val paths = globTree.glob("a" + separator + ".*\\.txt")(File.PathMatcherSyntax.regex) - - verify(paths, refPaths, globTree) - assert(globTree.glob("a/.*\\.txt", includePath = false)(File.PathMatcherSyntax.regex).isEmpty) - } - - it should "not use dir name as wildcard (e.g. dirname is .*)" in { - assume(isUnixOS) - val d = regexWildcardPath // "path" / "with" / ".*" - val paths = d.glob("a\\.txt")(File.PathMatcherSyntax.regex) - assert(paths.isEmpty) - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala deleted file mode 100644 index 554f5358..00000000 --- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ManagedResourceSpec.scala +++ /dev/null @@ -1,250 +0,0 @@ -package better.files - -import org.scalatest.matchers.{MatchResult, Matcher} - -import scala.reflect.ClassTag -import scala.util.control.ControlThrowable - -class ManagedResourceSpec extends CommonSpec { - // Test classes - - private class TestDisposable extends AutoCloseable { - var closeCount = 0 - - override def close(): Unit = - closeCount += 1 - } - - private class TestDisposableThatThrows extends TestDisposable { - override def close(): Unit = { - super.close() - throw new TestDisposeException - } - } - - private class TestDisposableThatThrowsFatal extends TestDisposable { - override def close(): Unit = { - super.close() - throw new TestDisposeFatalException - } - } - - private class TestEvalException extends Exception - private class TestDisposeException extends Exception - private class TestDisposeFatalException extends Exception with ControlThrowable - - // Custom matchers - - private class HaveSuppressedMatcher(classes: Class[_ <: Throwable]*) extends Matcher[Throwable] { - override def apply(left: Throwable): MatchResult = { - MatchResult( - (classes corresponds left.getSuppressed) { - (clazz, suppressed) => clazz isInstance suppressed - }, - s"had suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}", - s"had not suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}" - ) - } - } - - private def haveSuppressed[E <: Throwable](implicit ct: ClassTag[E]) = - new HaveSuppressedMatcher(ct.runtimeClass.asInstanceOf[Class[_ <: Throwable]]) - - // Test body - - behavior of "managed resources" - - it should "map correctly" in { - val t = new TestDisposable - - val result = for { - tc <- t.autoClosed - } yield { - t.closeCount shouldBe 0 - "hello" - } - - result shouldBe "hello" - t.closeCount shouldBe 1 - } - - it should "flatMap correctly" in { - val t = new TestDisposable - - val result = (for { - tc <- t.autoClosed - v <- Iterator("one", "two", "three") - } yield { - t.closeCount shouldBe 0 - v - }).toSeq - - result should contain inOrder ("one", "two", "three") - t.closeCount shouldBe 1 - } - - it should "handle exceptions correctly" in { - val t = new TestDisposable - - a [TestEvalException] should be thrownBy { - for { - tc <- t.autoClosed - } { - t.closeCount shouldBe 0 - throw new TestEvalException - } - } - t.closeCount shouldBe 1 - - var lastSeen = "" - a [TestEvalException] should be thrownBy { - for { - tc <- t.autoClosed - v <- Iterator("one", "two", "three") - } { - t.closeCount shouldBe 1 - lastSeen = v - if (v == "two") throw new TestEvalException - } - } - t.closeCount shouldBe 2 - lastSeen shouldBe "two" - } - - it should "handle disposal exceptions correctly" in { - // For some mysterious reason, thrownBy doesn't work here, in this specific test case. No clue why, despite spending an entire day trying to figure it out, - // including repeatedly stepping through the innards of ScalaTest in a debugger. Catching the exception manually does work, though. - val messageNoException = "no exception was thrown" - def messageWrongException(e: Throwable): String = - s"an exception was thrown, but not a TestDisposeException; instead it's a ${e.getClass.getName}" - - val t = new TestDisposableThatThrows - - val e1 = - try { - for { - tc <- t.autoClosed - } { - t.closeCount shouldBe 0 - } - None - } - catch { - case e: TestDisposeException => - Some(e) - } - assert(e1.nonEmpty, messageNoException) - e1 foreach { e1c => assert(e1c.isInstanceOf[TestDisposeException], messageWrongException(e1c)) } - t.closeCount shouldBe 1 - - var lastSeen = "" - val e2 = - try { - val i = for { - tc <- t.autoClosed - v <- Iterator("one", "two", "three") - } yield { - t.closeCount shouldBe 1 - lastSeen = v - v - } - while (i.hasNext) i.next() - None - } - catch { - case e: TestDisposeException => - Some(e) - } - lastSeen shouldBe "three" - assert(e2.nonEmpty, messageNoException) - e2 foreach { e2c => assert(e2c.isInstanceOf[TestDisposeException], messageWrongException(e2c)) } - t.closeCount shouldBe 2 - } - - it should "handle non-local returns correctly" in { - val t = new TestDisposable - - def doTheThing(): String = { - throw the [ControlThrowable] thrownBy { - for { - tc <- t.autoClosed - } { - t.closeCount shouldBe 0 - return "hello" - } - } - } - doTheThing() shouldBe "hello" - t.closeCount shouldBe 1 - - def doTheThings(): String = { - throw the [ControlThrowable] thrownBy { - for { - tc <- t.autoClosed - v <- Iterator("one", "two", "three") - } { - t.closeCount shouldBe 1 - if (v == "two") return v - } - } - } - doTheThings() shouldBe "two" - t.closeCount shouldBe 2 - } - - it should "handle multiple exceptions correctly" in { - val t = new TestDisposableThatThrows - - the [TestEvalException] thrownBy { - for { - tc <- t.autoClosed - } { - t.closeCount shouldBe 0 - throw new TestEvalException - } - } should haveSuppressed [TestDisposeException] - t.closeCount shouldBe 1 - - var lastSeen = "" - the [TestEvalException] thrownBy { - for { - tc <- t.autoClosed - v <- Iterator("one", "two", "three") - } { - t.closeCount shouldBe 1 - lastSeen = v - if (v == "two") throw new TestEvalException - } - } should haveSuppressed [TestDisposeException] - lastSeen shouldBe "two" - t.closeCount shouldBe 2 - } - - it should "give fatal exceptions precedence" in { - val t = new TestDisposableThatThrowsFatal - - the [TestDisposeFatalException] thrownBy { - for { - tc <- t.autoClosed - } { - t.closeCount shouldBe 0 - throw new TestEvalException - } - } should haveSuppressed [TestEvalException] - t.closeCount shouldBe 1 - - var lastSeen = "" - the [TestDisposeFatalException] thrownBy { - for { - tc <- t.autoClosed - v <- Iterator("one", "two", "three") - } { - t.closeCount shouldBe 1 - lastSeen = v - if (v == "two") throw new TestEvalException - } - } should haveSuppressed [TestEvalException] - t.closeCount shouldBe 2 - lastSeen shouldBe "two" - } -} diff --git a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala b/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala deleted file mode 100644 index 54f0a117..00000000 --- a/scalalib/src/test/resource/better-files/core/src/test/scala/better/files/ScannerSpec.scala +++ /dev/null @@ -1,79 +0,0 @@ -package better.files - -import Dsl._ - -import scala.language.existentials - -class ScannerSpec extends CommonSpec { - def t1 = File.newTemporaryFile() - - - "splitter" should "split" in { - val csvSplitter = StringSplitter.on(',') - def split(s: String) = csvSplitter.split(s).toList - - assert(split(",") === List("", "")) - assert(split("") === List("")) - assert(split("Hello World") === List("Hello World")) - assert(split("Hello,World") === List("Hello", "World")) - - assert(split(",,") === List("", "", "")) - assert(split(",Hello,World,") === List("", "Hello", "World", "")) - assert(split(",Hello,World") === List("", "Hello", "World")) - assert(split("Hello,World,") === List("Hello", "World", "")) - } - - "scanner" should "parse files" in { - val data = t1 << s""" - | Hello World - | 1 2 3 - | Ok 23 football - """.stripMargin - data.scanner() foreach {scanner => - assert(scanner.lineNumber() == 0) - assert(scanner.next[String] == "Hello") - assert(scanner.lineNumber() == 2) - assert(scanner.next[String] == "World") - assert(scanner.next[Int] == 1) - assert(scanner.next[Int] == 2) - assert(scanner.lineNumber() == 3) - assert(scanner.next[Int] == 3) - assert(scanner.nextLine() == " Ok 23 football") - assert(!scanner.hasNext) - a[NoSuchElementException] should be thrownBy scanner.next() - assert(!scanner.hasNext) - } - data.tokens().toSeq shouldEqual data.newScanner().toSeq - } - - it should "parse longs/booleans" in { - val data = for { - scanner <- Scanner("10 false").autoClosed - } yield scanner.next[(Long, Boolean)] - data shouldBe ((10L, false)) - } - - it should "parse custom parsers" in { - val file = t1 < """ - |Garfield - |Woofer - """.stripMargin - - sealed trait Animal - case class Dog(name: String) extends Animal - case class Cat(name: String) extends Animal - - implicit val animalParser: Scannable[Animal] = Scannable {scanner => - val name = scanner.next[String] - if (name == "Garfield") Cat(name) else Dog(name) - } - file.scanner() foreach {scanner => - Seq.fill(2)(scanner.next[Animal]) should contain theSameElementsInOrderAs Seq(Cat("Garfield"), Dog("Woofer")) - } - } - - it should "parse empty tokens" in { - val scanner = Scanner("hello||world", StringSplitter.on('|')) - List.fill(3)(scanner.next[Option[String]]) shouldEqual List(Some("hello"), None, Some("world")) - } -} diff --git a/scalalib/src/test/resource/better-files/project/Dependencies.scala b/scalalib/src/test/resource/better-files/project/Dependencies.scala deleted file mode 100644 index 5e0a037d..00000000 --- a/scalalib/src/test/resource/better-files/project/Dependencies.scala +++ /dev/null @@ -1,15 +0,0 @@ -import sbt._ - -object Dependencies { - val scalatest = "org.scalatest" %% "scalatest" % "3.0.4" % Test - - // Used in Akka file watcher - val akka = "com.typesafe.akka" %% "akka-actor" % "2.5.6" - - // For shapeless based Reader/Scanner - val shapeless = "com.chuusai" %% "shapeless" % "2.3.2" - - // Used in Benchmarks only - val commonsio = "commons-io" % "commons-io" % "2.5" - val fastjavaio = "fastjavaio" % "fastjavaio" % "1.0" from "https://github.com/williamfiset/FastJavaIO/releases/download/v1.0/fastjavaio.jar" -} diff --git a/scalalib/src/test/resource/better-files/project/build.properties b/scalalib/src/test/resource/better-files/project/build.properties deleted file mode 100644 index 74e2336b..00000000 --- a/scalalib/src/test/resource/better-files/project/build.properties +++ /dev/null @@ -1,2 +0,0 @@ -# This can only be upto version supported by CircleCI. See: https://circleci.com/docs/1.0/language-scala/ -sbt.version=0.13.9 diff --git a/scalalib/src/test/resource/better-files/project/plugins.sbt b/scalalib/src/test/resource/better-files/project/plugins.sbt deleted file mode 100644 index dba2a1da..00000000 --- a/scalalib/src/test/resource/better-files/project/plugins.sbt +++ /dev/null @@ -1,9 +0,0 @@ -addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.2") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0") -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.1.9") -addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.5.4") -addSbtPlugin("com.updateimpact" % "updateimpact-sbt-plugin" % "2.1.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "0.5.1") -addSbtPlugin("com.codacy" % "sbt-codacy-coverage" % "1.3.8") diff --git a/scalalib/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala b/scalalib/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala deleted file mode 100644 index d60487d9..00000000 --- a/scalalib/src/test/resource/better-files/shapeless/src/main/scala/better/files/ShapelessScanner.scala +++ /dev/null @@ -1,24 +0,0 @@ -package better.files - -import better.files.Scanner.Read - -import shapeless._ - -import scala.util.Try - -object ShapelessScanner { - implicit val hNilScannable: Scannable[HNil] = - Scannable(_ => HNil) - - implicit def hListScannable[H, T <: HList](implicit h: Lazy[Scannable[H]], t: Scannable[T]): Scannable[H :: T] = - Scannable(s => h.value(s) :: t(s)) - - implicit def genericScannable[A, R](implicit gen: Generic.Aux[A, R], reprScannable: Lazy[Scannable[R]]): Scannable[A] = - Scannable(s => gen.from(reprScannable.value(s))) - - implicit val cnilReader: Read[CNil] = - Read(s => throw new RuntimeException(s"Could not read $s into this coproduct")) - - implicit def coproductReader[H, T <: Coproduct](implicit h: Read[H], t: Read[T]): Read[H :+: T] = - Read(s => Try(Inl(h(s))).getOrElse(Inr(t(s)))) -} diff --git a/scalalib/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala b/scalalib/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala deleted file mode 100644 index 34557261..00000000 --- a/scalalib/src/test/resource/better-files/shapeless/src/test/scala/better/files/ShapelessScannerSpec.scala +++ /dev/null @@ -1,32 +0,0 @@ -package better.files - -import shapeless._ - -class ShapelessScannerSpec extends CommonSpec { - import ShapelessScanner._ - - val text = """ - 12 Bob True - 13 Mary False - 26 Rick True - """ - - "Shapeless Scanner" should "parse HList" in { - val in = Scanner(text) - - type Row = Int :: String :: Boolean :: HNil - val out = Seq.fill(3)(in.next[Row]) - assert(out == Seq( - 12 :: "Bob" :: true :: HNil, - 13 :: "Mary" :: false :: HNil, - 26 :: "Rick" :: true :: HNil - )) - } - - "Shapeless Scanner" should "parse case class" in { - val in = Scanner(text) - - case class Person(id: Int, name: String, isMale: Boolean) - assert(in.next[Iterator[Person]].map(_.id).sum == 51) - } -} diff --git a/scalalib/src/test/resource/better-files/site/index.html b/scalalib/src/test/resource/better-files/site/index.html deleted file mode 100644 index 29931115..00000000 --- a/scalalib/src/test/resource/better-files/site/index.html +++ /dev/null @@ -1,16 +0,0 @@ - - - - - Better Files - - - - ScalaDoc - - diff --git a/scalalib/src/test/resource/better-files/site/tech_talk_preview.png b/scalalib/src/test/resource/better-files/site/tech_talk_preview.png deleted file mode 100644 index 9f9f7599..00000000 Binary files a/scalalib/src/test/resource/better-files/site/tech_talk_preview.png and /dev/null differ diff --git a/scalalib/src/test/resource/better-files/version.sbt b/scalalib/src/test/resource/better-files/version.sbt deleted file mode 100644 index 0750fecd..00000000 --- a/scalalib/src/test/resource/better-files/version.sbt +++ /dev/null @@ -1 +0,0 @@ -version in ThisBuild := "3.2.1-SNAPSHOT" \ No newline at end of file diff --git a/scalalib/src/test/resource/hello-world/src/main/scala/Main.scala b/scalalib/src/test/resource/hello-world/src/main/scala/Main.scala index e288a17a..1e686f11 100644 --- a/scalalib/src/test/resource/hello-world/src/main/scala/Main.scala +++ b/scalalib/src/test/resource/hello-world/src/main/scala/Main.scala @@ -5,6 +5,6 @@ object Main extends App { val person = Person.fromString("rockjam:25") val greeting = s"hello ${person.name}, your age is: ${person.age}" println(greeting) - val resultPath = Paths.get("target", "workspace", "hello-world", "hello-mill") + val resultPath = Paths.get("hello-mill") Files.write(resultPath, greeting.getBytes) } diff --git a/scalalib/src/test/resource/jawn/.gitignore b/scalalib/src/test/resource/jawn/.gitignore deleted file mode 100644 index 2db3b8c0..00000000 --- a/scalalib/src/test/resource/jawn/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -lib_managed -project7/boot -project7/plugins/src_managed -project7/plugins/project -project/boot -project/build/target -project/plugins/lib_managed -project/plugins/project -project/plugins/src_managed -project/plugins/target -target -.ensime -.ensime_lucene -TAGS -\#*# -*~ -.#* -.lib -.ensime_cache -.idea diff --git a/scalalib/src/test/resource/jawn/.travis.yml b/scalalib/src/test/resource/jawn/.travis.yml deleted file mode 100644 index 5f9f5fe4..00000000 --- a/scalalib/src/test/resource/jawn/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: scala -sudo: false -jdk: - - oraclejdk8 -script: - - sbt "so test" diff --git a/scalalib/src/test/resource/jawn/README.md b/scalalib/src/test/resource/jawn/README.md deleted file mode 100644 index 6ea33b92..00000000 --- a/scalalib/src/test/resource/jawn/README.md +++ /dev/null @@ -1,427 +0,0 @@ -## Jawn - -"Jawn is for parsing jay-sawn." - -### Origin - -The term "jawn" comes from the Philadelphia area. It conveys about as -much information as "thing" does. I chose the name because I had moved -to Montreal so I was remembering Philly fondly. Also, there isn't a -better way to describe objects encoded in JSON than "things". Finally, -we get a catchy slogan. - -Jawn was designed to parse JSON into an AST as quickly as possible. - -[![Build Status](https://api.travis-ci.org/non/jawn.svg)](https://travis-ci.org/non/jawn) -[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/non/jawn?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Latest version](https://index.scala-lang.org/non/jawn/jawn-parser/latest.svg?color=orange)](https://index.scala-lang.org/non/jawn/jawn-parser) - -### Overview - -Jawn consists of four parts: - -1. A fast, generic JSON parser (`jawn-parser`) -2. A small, somewhat anemic AST (`jawn-ast`) -3. Support packages which parse to third-party ASTs -4. A few helpful utilities (`jawn-util`) - -Currently Jawn is competitive with the fastest Java JSON libraries -(GSON and Jackson) and in the author's benchmarks it often wins. It -seems to be faster than any other Scala parser that exists (as of July -2014). - -Given the plethora of really nice JSON libraries for Scala, the -expectation is that you're probably here for `jawn-parser` or a -support package. - -### Quick Start - -Jawn supports Scala 2.10, 2.11, and 2.12. - -Here's a `build.sbt` snippet that shows you how to depend on Jawn in -your own SBT project: - -```scala -resolvers += Resolver.sonatypeRepo("releases") - -// use this if you just want jawn's parser, and will implement your own facade -libraryDependencies += "org.spire-math" %% "jawn-parser" % "0.11.0" - -// use this if you want jawn's parser and also jawn's ast -libraryDependencies += "org.spire-math" %% "jawn-ast" % "0.11.0" -``` - -If you want to use Jawn's parser with another project's AST, see the -"Supporting external ASTs with Jawn" section. For example, with Spray -you would say: - -```scala -libraryDependencies += "org.spire-math" %% "jawn-spray" % "0.11.0" -``` - -There are a few reasons you might want to do this: - - * The library's built-in parser is significantly slower than Jawn's. - * Jawn supports more input types (`ByteBuffer`, `File`, etc.). - * You need asynchronous JSON parsing. - -(NOTE: previous to version 0.8.3 the support libraries would have been -named `"spray-support"` instead of `"jawn-spray"`.) - -### Dependencies - -*jawn-parser* has no dependencies other than Scala. - -*jawn-ast* depends on *jawn-parser* but nothing else. - -The various support projects (e.g. *jawn-argonaut*) depend on -the library they are supporting. - -### Parsing - -Jawn's parser is both fast and relatively featureful. Assuming you -want to get back an AST of type `J` and you have a `Facade[J]` -defined, you can use the following `parse` signatures: - -```scala -Parser.parseUnsafe[J](String) → J -Parser.parseFromString[J](String) → Try[J] -Parser.parsefromPath[J](String) → Try[J] -Parser.parseFromFile[J](File) → Try[J] -Parser.parseFromChannel[J](ReadableByteChannel) → Try[J] -Parser.parseFromByteBuffer[J](ByteBuffer) → Try[J] -``` - -Jawn also supports asynchronous parsing, which allows users to feed -the parser with data as it is available. There are three modes: - -* `SingleValue` waits to return a single `J` value once parsing is done. -* `UnwrapArray` if the top-level element is an array, return values as they become available. -* `ValueStream` parse one-or-more json values separated by whitespace. - -Here's an example: - -```scala -import jawn.ast -import jawn.AsyncParser -import jawn.ParseException - -val p = ast.JParser.async(mode = AsyncParser.UnwrapArray) - -def chunks: Stream[String] = ??? -def sink(j: ast.JValue): Unit = ??? - -def loop(st: Stream[String]): Either[ParseException, Unit] = - st match { - case s #:: tail => - p.absorb(s) match { - case Right(js) => - js.foreach(sink) - loop(tail) - case Left(e) => - Left(e) - } - case _ => - p.finish().right.map(_.foreach(sink)) - } - -loop(chunks) -``` - -You can also call `jawn.Parser.async[J]` to use async parsing with an -arbitrary data type (provided you also have an implicit `Facade[J]`). - -### Supporting external ASTs with Jawn - -Jawn currently supports six external ASTs directly: - -| AST | 2.10 | 2.11 | 2.12 | -|-----------|--------|--------|-------| -| Argonaut | 6.2 | 6.2 | 6.2 | -| Json4s | 3.5.2 | 3.5.2 | 3.5.2 | -| Play-json | 2.4.11 | 2.5.15 | 2.6.0 | -| Rojoma | 2.4.3 | 2.4.3 | 2.4.3 | -| Rojoma-v3 | 3.7.2 | 3.7.2 | 3.7.2 | -| Spray | 1.3.3 | 1.3.3 | 1.3.3 | - -Each of these subprojects provides a `Parser` object (an instance of -`SupportParser[J]`) that is parameterized on the given project's -AST (`J`). The following methods are available: - -```scala -Parser.parseUnsafe(String) → J -Parser.parseFromString(String) → Try[J] -Parser.parsefromPath(String) → Try[J] -Parser.parseFromFile(File) → Try[J] -Parser.parseFromChannel(ReadableByteChannel) → Try[J] -Parser.parseFromByteBuffer(ByteBuffer) → Try[J] -``` - -These methods parallel those provided by `jawn.Parser`. - -For the following snippets, `XYZ` is one of (`argonaut`, `json4s`, -`play`, `rojoma`, `rojoma-v3` or `spray`): - -This is how you would include the subproject in build.sbt: - -```scala -resolvers += Resolver.sonatypeRepo("releases") - -libraryDependencies += "org.spire-math" %% jawn-"XYZ" % "0.11.0" -``` - -This is an example of how you might use the parser into your code: - -```scala -import jawn.support.XYZ.Parser - -val myResult = Parser.parseFromString(myString) -``` - -### Do-It-Yourself Parsing - -Jawn supports building any JSON AST you need via type classes. You -benefit from Jawn's fast parser while still using your favorite Scala -JSON library. This mechanism is also what allows Jawn to provide -"support" for other libraries' ASTs. - -To include Jawn's parser in your project, add the following -snippet to your `build.sbt` file: - -```scala -resolvers += Resolver.sonatypeRepo("releases") - -libraryDependencies += "org.spire-math" %% "jawn-parser" % "0.11.0" -``` - -To support your AST of choice, you'll want to define a `Facade[J]` -instance, where the `J` type parameter represents the base of your JSON -AST. For example, here's a facade that supports Spray: - -```scala -import spray.json._ -object Spray extends SimpleFacade[JsValue] { - def jnull() = JsNull - def jfalse() = JsFalse - def jtrue() = JsTrue - def jnum(s: String) = JsNumber(s) - def jint(s: String) = JsNumber(s) - def jstring(s: String) = JsString(s) - def jarray(vs: List[JsValue]) = JsArray(vs) - def jobject(vs: Map[String, JsValue]) = JsObject(vs) -} -``` - -Most ASTs will be easy to define using the `SimpleFacade` or -`MutableFacade` traits. However, if an ASTs object or array instances -do more than just wrap a Scala collection, it may be necessary to -extend `Facade` directly. - -You can also look at the facades used by the support projects to help -you create your own. This could also be useful if you wanted to -use an older version of a supported library. - -### Using the AST - -#### Access - -For accessing atomic values, `JValue` supports two sets of -methods: *get-style* methods and *as-style* methods. - -The *get-style* methods return `Some(_)` when called on a compatible -JSON value (e.g. strings can return `Some[String]`, numbers can return -`Some[Double]`, etc.), and `None` otherwise: - -```scala -getBoolean → Option[Boolean] -getString → Option[String] -getLong → Option[Long] -getDouble → Option[Double] -getBigInt → Option[BigInt] -getBigDecimal → Option[BigDecimal] -``` - -In constrast, the *as-style* methods will either return an unwrapped -value (instead of returning `Some(_)`) or throw an exception (instead -of returning `None`): - -```scala -asBoolean → Boolean // or exception -asString → String // or exception -asLong → Long // or exception -asDouble → Double // or exception -asBigInt → BigInt // or exception -asBigDecimal → BigDecimal // or exception -``` - -To access elements of an array, call `get` with an `Int` position: - -```scala -get(i: Int) → JValue // returns JNull if index is illegal -``` - -To access elements of an object, call `get` with a `String` key: - -```scala -get(k: String) → JValue // returns JNull if key is not found -``` - -Both of these methods also return `JNull` if the value is not the -appropraite container. This allows the caller to chain lookups without -having to check that each level is correct: - -```scala -val v: JValue = ??? - -// returns JNull if a problem is encountered in structure of 'v'. -val t: JValue = v.get("novels").get(0).get("title") - -// if 'v' had the right structure and 't' is JString(s), then Some(s). -// otherwise, None. -val titleOrNone: Option[String] = t.getString - -// equivalent to titleOrNone.getOrElse(throw ...) -val titleOrDie: String = t.asString -``` - -#### Updating - -The atomic values (`JNum`, `JBoolean`, `JNum`, and `JString`) are -immutable. - -Objects are fully-mutable and can have items added, removed, or -changed: - -```scala -set(k: String, v: JValue) → Unit -remove(k: String) → Option[JValue] -``` - -If `set` is called on a non-object, an exception will be thrown. -If `remove` is called on a non-object, `None` will be returned. - -Arrays are semi-mutable. Their values can be changed, but their size -is fixed: - -```scala -set(i: Int, v: JValue) → Unit -``` - -If `set` is called on a non-array, or called with an illegal index, an -exception will be thrown. - -(A future version of Jawn may provide an array whose length can be -changed.) - -### Profiling - -Jawn uses [JMH](http://openjdk.java.net/projects/code-tools/jmh/) -along with the [sbt-jmh](https://github.com/ktoso/sbt-jmh) plugin. - -#### Running Benchmarks - -The benchmarks are located in the `benchmark` project. You can run the -benchmarks by typing `benchmark/run` from SBT. There are many -supported arguments, so here are a few examples: - -Run all benchmarks, with 10 warmups, 10 iterations, using 3 threads: - -`benchmark/run -wi 10 -i 10 -f1 -t3` - -Run just the `CountriesBench` test (5 warmups, 5 iterations, 1 thread): - -`benchmark/run -wi 5 -i 5 -f1 -t1 .*CountriesBench` - -#### Benchmark Issues - -Currently, the benchmarks are a bit fiddily. The most obvious symptom -is that if you compile the benchmarks, make changes, and compile -again, you may see errors like: - -``` -[error] (benchmark/jmh:generateJavaSources) java.lang.NoClassDefFoundError: jawn/benchmark/Bla25Bench -``` - -The fix here is to run `benchmark/clean` and try again. - -You will also see intermittent problems like: - -``` -[error] (benchmark/jmh:compile) java.lang.reflect.MalformedParameterizedTypeException -``` - -The solution here is easier (though frustrating): just try it -again. If you continue to have problems, consider cleaning the project -and trying again. - -(In the future I hope to make the benchmarking here a bit more -resilient. Suggestions and pull requests gladly welcome!) - -#### Files - -The benchmarks use files located in `benchmark/src/main/resources`. If -you want to test your own files (e.g. `mydata.json`), you would: - - * Copy the file to `benchmark/src/main/resources/mydata.json`. - * Add the following code to `JmhBenchmarks.scala`: - -```scala -class MyDataBench extends JmhBenchmarks("mydata.json") -``` - -Jawn has been tested with much larger files, e.g. 100M - 1G, but these -are obviously too large to ship with the project. - -With large files, it's usually easier to comment out most of the -benchmarking methods and only test one (or a few) methods. Some of the -slower JSON parsers get *much* slower for large files. - -#### Interpreting the results - -Remember that the benchmarking results you see will vary based on: - - * Hardware - * Java version - * JSON file size - * JSON file structure - * JSON data values - -I have tried to use each library in the most idiomatic and fastest way -possible (to parse the JSON into a simple AST). Pull requests to -update library versions and improve usage are very welcome. - -### Future Work - -More support libraries could be added. - -It's likely that some of Jawn's I/O could be optimized a bit more, and -also made more configurable. The heuristics around all-at-once loading -versus input chunking could definitely be improved. - -In cases where the user doesn't need fast lookups into JSON objects, -an even lighter AST could be used to improve parsing and rendering -speeds. - -Strategies to cache/intern field names of objects could pay big -dividends in some cases (this might require AST changes). - -If you have ideas for any of these (or other ideas) please feel free -to open an issue or pull request so we can talk about it. - -### Disclaimers - -Jawn only supports UTF-8 when parsing bytes. This might change in the -future, but for now that's the target case. You can always decode your -data to a string, and handle the character set decoding using Java's -standard tools. - -Jawn's AST is intended to be very lightweight and simple. It supports -simple access, and limited mutable updates. It intentionally lacks the -power and sophistication of many other JSON libraries. - -### Copyright and License - -All code is available to you under the MIT license, available at -http://opensource.org/licenses/mit-license.php. - -Copyright Erik Osheim, 2012-2017. diff --git a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JParser.scala b/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JParser.scala deleted file mode 100644 index 704557cc..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JParser.scala +++ /dev/null @@ -1,35 +0,0 @@ -package jawn -package ast - -import java.io.File -import java.nio.ByteBuffer -import java.nio.channels.ReadableByteChannel -import scala.util.Try - -object JParser { - implicit val facade = JawnFacade - - def parseUnsafe(s: String): JValue = - new StringParser(s).parse() - - def parseFromString(s: String): Try[JValue] = - Try(new StringParser[JValue](s).parse) - - def parseFromCharSequence(cs: CharSequence): Try[JValue] = - Try(new CharSequenceParser[JValue](cs).parse) - - def parseFromPath(path: String): Try[JValue] = - parseFromFile(new File(path)) - - def parseFromFile(file: File): Try[JValue] = - Try(ChannelParser.fromFile[JValue](file).parse) - - def parseFromChannel(ch: ReadableByteChannel): Try[JValue] = - Try(ChannelParser.fromChannel(ch).parse) - - def parseFromByteBuffer(buf: ByteBuffer): Try[JValue] = - Try(new ByteBufferParser[JValue](buf).parse) - - def async(mode: AsyncParser.Mode): AsyncParser[JValue] = - AsyncParser(mode) -} diff --git a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JValue.scala b/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JValue.scala deleted file mode 100644 index d09347bc..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JValue.scala +++ /dev/null @@ -1,314 +0,0 @@ -package jawn -package ast - -import java.lang.Double.{isNaN, isInfinite} -import scala.collection.mutable -import scala.util.hashing.MurmurHash3 - -class WrongValueException(e: String, g: String) extends Exception(s"expected $e, got $g") - -class InvalidNumException(s: String) extends Exception(s"invalid number: $s") - -sealed abstract class JValue { - - def valueType: String - - def getBoolean: Option[Boolean] = None - def getString: Option[String] = None - def getInt: Option[Int] = None - def getLong: Option[Long] = None - def getDouble: Option[Double] = None - def getBigInt: Option[BigInt] = None - def getBigDecimal: Option[BigDecimal] = None - - def asBoolean: Boolean = throw new WrongValueException("boolean", valueType) - def asString: String = throw new WrongValueException("string", valueType) - def asInt: Int = throw new WrongValueException("number", valueType) - def asLong: Long = throw new WrongValueException("number", valueType) - def asDouble: Double = throw new WrongValueException("number", valueType) - def asBigInt: BigInt = throw new WrongValueException("number", valueType) - def asBigDecimal: BigDecimal = throw new WrongValueException("number", valueType) - - def get(i: Int): JValue = JNull - def set(i: Int, v: JValue): Unit = throw new WrongValueException("array", valueType) - - def get(s: String): JValue = JNull - def set(s: String, v: JValue): Unit = throw new WrongValueException("object", valueType) - def remove(s: String): Option[JValue] = None - - final def atomic: Option[JAtom] = - this match { - case v: JAtom => Some(v) - case _ => None - } - - final def isNull: Boolean = - this == JNull - - final def nonNull: Boolean = - this != JNull - - final def render(): String = - CanonicalRenderer.render(this) - - final def render(r: Renderer): String = - r.render(this) - - override def toString: String = - CanonicalRenderer.render(this) -} - -object JValue { - implicit val facade: Facade[JValue] = JawnFacade -} - -sealed abstract class JAtom extends JValue { - def fold[A](f1: String => A, f2: Double => A, f3: Boolean => A, f4: => A): A = - this match { - case JString(s) => f1(s) - case v: JNum => f2(v.asDouble) - case JTrue => f3(true) - case JFalse => f3(false) - case JNull => f4 - } -} - -case object JNull extends JAtom { - final def valueType: String = "null" -} - -sealed abstract class JBool extends JAtom { - final def valueType: String = "boolean" - final override def getBoolean: Option[Boolean] = Some(this == JTrue) - final override def asBoolean: Boolean = this == JTrue -} - -object JBool { - final val True: JBool = JTrue - final val False: JBool = JFalse - - final def apply(b: Boolean): JBool = if (b) JTrue else JFalse -} - -case object JTrue extends JBool -case object JFalse extends JBool - -case class JString(s: String) extends JAtom { - final def valueType: String = "string" - final override def getString: Option[String] = Some(s) - final override def asString: String = s -} - -object JString { - final val empty = JString("") -} - -sealed abstract class JNum extends JAtom { - final def valueType: String = "number" -} - -object JNum { self => - - /** - * Create a JNum from a Long. - * - * This is identical to calling the LongNum(_) constructor. - */ - final def apply(n: Long): JNum = - LongNum(n) - - /** - * Create a JNum from a Double. - * - * This factory constructor performs some error-checking (ensures - * that the given value is a finite Double). If you have already - * done this error-checking, you can use the DoubleNum(_) or - * DeferNum(_) constructors directly. - */ - final def apply(n: Double): JNum = - if (isNaN(n) || isInfinite(n)) throw new InvalidNumException(n.toString) - else DoubleNum(n) - - /** - * Create a JNum from a String. - * - * This factory constructor validates the string (essentially, - * parsing it as a JSON value). If you are already sure this string - * is a valid JSON number, you can use the DeferLong(_) or - * DeferNum(_) constructors directly. - */ - final def apply(s: String): JNum = - JParser.parseUnsafe(s) match { - case jnum: JNum => jnum - case _ => throw new InvalidNumException(s) - } - - final def hybridEq(x: Long, y: Double): Boolean = { - val z = x.toDouble - y == z && z.toLong == x - } - - final val zero: JNum = LongNum(0) - final val one: JNum = LongNum(1) -} - -case class LongNum(n: Long) extends JNum { - - final override def getInt: Option[Int] = Some(n.toInt) - final override def getLong: Option[Long] = Some(n) - final override def getDouble: Option[Double] = Some(n.toDouble) - final override def getBigInt: Option[BigInt] = Some(BigInt(n)) - final override def getBigDecimal: Option[BigDecimal] = Some(BigDecimal(n)) - - final override def asInt: Int = n.toInt - final override def asLong: Long = n - final override def asDouble: Double = n.toDouble - final override def asBigInt: BigInt = BigInt(n) - final override def asBigDecimal: BigDecimal = BigDecimal(n) - - final override def hashCode: Int = n.## - - final override def equals(that: Any): Boolean = - that match { - case LongNum(n2) => n == n2 - case DoubleNum(n2) => JNum.hybridEq(n, n2) - case jn: JNum => jn == this - case _ => false - } -} - -case class DoubleNum(n: Double) extends JNum { - - final override def getInt: Option[Int] = Some(n.toInt) - final override def getLong: Option[Long] = Some(n.toLong) - final override def getDouble: Option[Double] = Some(n) - final override def getBigInt: Option[BigInt] = Some(BigDecimal(n).toBigInt) - final override def getBigDecimal: Option[BigDecimal] = Some(BigDecimal(n)) - - final override def asInt: Int = n.toInt - final override def asLong: Long = n.toLong - final override def asDouble: Double = n - final override def asBigInt: BigInt = BigDecimal(n).toBigInt - final override def asBigDecimal: BigDecimal = BigDecimal(n) - - final override def hashCode: Int = n.## - - final override def equals(that: Any): Boolean = - that match { - case LongNum(n2) => JNum.hybridEq(n2, n) - case DoubleNum(n2) => n == n2 - case jn: JNum => jn == this - case _ => false - } -} - -case class DeferLong(s: String) extends JNum { - - lazy val n: Long = util.parseLongUnsafe(s) - - final override def getInt: Option[Int] = Some(n.toInt) - final override def getLong: Option[Long] = Some(n) - final override def getDouble: Option[Double] = Some(n.toDouble) - final override def getBigInt: Option[BigInt] = Some(BigInt(s)) - final override def getBigDecimal: Option[BigDecimal] = Some(BigDecimal(s)) - - final override def asInt: Int = n.toInt - final override def asLong: Long = n - final override def asDouble: Double = n.toDouble - final override def asBigInt: BigInt = BigInt(s) - final override def asBigDecimal: BigDecimal = BigDecimal(s) - - final override def hashCode: Int = n.## - - final override def equals(that: Any): Boolean = - that match { - case LongNum(n2) => n == n2 - case DoubleNum(n2) => JNum.hybridEq(n, n2) - case jn: DeferLong => n == jn.asLong - case jn: DeferNum => JNum.hybridEq(n, jn.asDouble) - case _ => false - } -} - -case class DeferNum(s: String) extends JNum { - - lazy val n: Double = java.lang.Double.parseDouble(s) - - final override def getInt: Option[Int] = Some(n.toInt) - final override def getLong: Option[Long] = Some(util.parseLongUnsafe(s)) - final override def getDouble: Option[Double] = Some(n) - final override def getBigInt: Option[BigInt] = Some(BigDecimal(s).toBigInt) - final override def getBigDecimal: Option[BigDecimal] = Some(BigDecimal(s)) - - final override def asInt: Int = n.toInt - final override def asLong: Long = util.parseLongUnsafe(s) - final override def asDouble: Double = n - final override def asBigInt: BigInt = BigDecimal(s).toBigInt - final override def asBigDecimal: BigDecimal = BigDecimal(s) - - final override def hashCode: Int = n.## - - final override def equals(that: Any): Boolean = - that match { - case LongNum(n2) => JNum.hybridEq(n2, n) - case DoubleNum(n2) => n == n2 - case jn: DeferLong => JNum.hybridEq(jn.asLong, n) - case jn: DeferNum => n == jn.asDouble - case _ => false - } -} - -case class JArray(vs: Array[JValue]) extends JValue { - final def valueType: String = "array" - - final override def get(i: Int): JValue = - if (0 <= i && i < vs.length) vs(i) else JNull - - final override def set(i: Int, v: JValue): Unit = - vs(i) = v - - final override def hashCode: Int = MurmurHash3.arrayHash(vs) - - final override def equals(that: Any): Boolean = - that match { - case JArray(vs2) => - if (vs.length != vs2.length) return false - var i = 0 - while (i < vs.length) { - if (vs(i) != vs2(i)) return false - i += 1 - } - true - case _ => - false - } -} - -object JArray { self => - final def empty: JArray = - JArray(new Array[JValue](0)) - - final def fromSeq(js: Seq[JValue]): JArray = - JArray(js.toArray) -} - -case class JObject(vs: mutable.Map[String, JValue]) extends JValue { - final def valueType: String = "object" - - final override def get(k: String): JValue = - vs.getOrElse(k, JNull) - - final override def set(k: String, v: JValue): Unit = - vs.put(k, v) - - final override def remove(k: String): Option[JValue] = - vs.remove(k) -} - -object JObject { self => - final def empty: JObject = - JObject(mutable.Map.empty) - - final def fromSeq(js: Seq[(String, JValue)]): JObject = - JObject(mutable.Map(js: _*)) -} diff --git a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JawnFacade.scala b/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JawnFacade.scala deleted file mode 100644 index a2d2d711..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/JawnFacade.scala +++ /dev/null @@ -1,51 +0,0 @@ -package jawn -package ast - -import scala.collection.mutable - -object JawnFacade extends Facade[JValue] { - - final val jnull = JNull - final val jfalse = JFalse - final val jtrue = JTrue - - final def jnum(s: CharSequence, decIndex: Int, expIndex: Int): JValue = - if (decIndex == -1 && expIndex == -1) { - DeferLong(s.toString) - } else { - DeferNum(s.toString) - } - - final def jstring(s: CharSequence): JValue = - JString(s.toString) - - final def singleContext(): FContext[JValue] = - new FContext[JValue] { - var value: JValue = _ - def add(s: CharSequence) { value = JString(s.toString) } - def add(v: JValue) { value = v } - def finish: JValue = value - def isObj: Boolean = false - } - - final def arrayContext(): FContext[JValue] = - new FContext[JValue] { - val vs = mutable.ArrayBuffer.empty[JValue] - def add(s: CharSequence) { vs.append(JString(s.toString)) } - def add(v: JValue) { vs.append(v) } - def finish: JValue = JArray(vs.toArray) - def isObj: Boolean = false - } - - final def objectContext(): FContext[JValue] = - new FContext[JValue] { - var key: String = null - val vs = mutable.Map.empty[String, JValue] - def add(s: CharSequence): Unit = - if (key == null) { key = s.toString } else { vs(key.toString) = JString(s.toString); key = null } - def add(v: JValue): Unit = - { vs(key) = v; key = null } - def finish = JObject(vs) - def isObj = true - } -} diff --git a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/Renderer.scala b/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/Renderer.scala deleted file mode 100644 index 3b2d9103..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/main/scala/jawn/ast/Renderer.scala +++ /dev/null @@ -1,101 +0,0 @@ -package jawn -package ast - -import scala.annotation.switch -import scala.collection.mutable -import scala.util.Sorting - -sealed trait Renderer { - final def render(jv: JValue): String = { - val sb = new StringBuilder - render(sb, 0, jv) - sb.toString - } - - final def render(sb: StringBuilder, depth: Int, jv: JValue): Unit = - jv match { - case JNull => sb.append("null") - case JTrue => sb.append("true") - case JFalse => sb.append("false") - case LongNum(n) => sb.append(n.toString) - case DoubleNum(n) => sb.append(n.toString) - case DeferNum(s) => sb.append(s) - case DeferLong(s) => sb.append(s) - case JString(s) => renderString(sb, s) - case JArray(vs) => renderArray(sb, depth, vs) - case JObject(vs) => renderObject(sb, depth, canonicalizeObject(vs)) - } - - def canonicalizeObject(vs: mutable.Map[String, JValue]): Iterator[(String, JValue)] - - def renderString(sb: StringBuilder, s: String): Unit - - final def renderArray(sb: StringBuilder, depth: Int, vs: Array[JValue]): Unit = { - if (vs.isEmpty) return { sb.append("[]"); () } - sb.append("[") - render(sb, depth + 1, vs(0)) - var i = 1 - while (i < vs.length) { - sb.append(",") - render(sb, depth + 1, vs(i)) - i += 1 - } - sb.append("]") - } - - final def renderObject(sb: StringBuilder, depth: Int, it: Iterator[(String, JValue)]): Unit = { - if (!it.hasNext) return { sb.append("{}"); () } - val (k0, v0) = it.next - sb.append("{") - renderString(sb, k0) - sb.append(":") - render(sb, depth + 1, v0) - while (it.hasNext) { - val (k, v) = it.next - sb.append(",") - renderString(sb, k) - sb.append(":") - render(sb, depth + 1, v) - } - sb.append("}") - } - - final def escape(sb: StringBuilder, s: String, unicode: Boolean): Unit = { - sb.append('"') - var i = 0 - val len = s.length - while (i < len) { - (s.charAt(i): @switch) match { - case '"' => sb.append("\\\"") - case '\\' => sb.append("\\\\") - case '\b' => sb.append("\\b") - case '\f' => sb.append("\\f") - case '\n' => sb.append("\\n") - case '\r' => sb.append("\\r") - case '\t' => sb.append("\\t") - case c => - if (c < ' ' || (c > '~' && unicode)) sb.append("\\u%04x" format c.toInt) - else sb.append(c) - } - i += 1 - } - sb.append('"') - } -} - -object CanonicalRenderer extends Renderer { - def canonicalizeObject(vs: mutable.Map[String, JValue]): Iterator[(String, JValue)] = { - val keys = vs.keys.toArray - Sorting.quickSort(keys) - keys.iterator.map(k => (k, vs(k))) - } - def renderString(sb: StringBuilder, s: String): Unit = - escape(sb, s, true) -} - -object FastRenderer extends Renderer { - def canonicalizeObject(vs: mutable.Map[String, JValue]): Iterator[(String, JValue)] = - vs.iterator - def renderString(sb: StringBuilder, s: String): Unit = - escape(sb, s, false) -} diff --git a/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ArbitraryUtil.scala b/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ArbitraryUtil.scala deleted file mode 100644 index 6fdb8fbe..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ArbitraryUtil.scala +++ /dev/null @@ -1,49 +0,0 @@ -package jawn -package ast - -import org.scalacheck._ -import Gen._ -import Arbitrary.arbitrary - -object ArbitraryUtil { - - // JSON doesn't allow NaN, PositiveInfinity, or NegativeInfinity - def isFinite(n: Double): Boolean = - !java.lang.Double.isNaN(n) && !java.lang.Double.isInfinite(n) - - val jnull = Gen.const(JNull) - val jboolean = Gen.oneOf(JTrue :: JFalse :: Nil) - val jlong = arbitrary[Long].map(LongNum(_)) - val jdouble = arbitrary[Double].filter(isFinite).map(DoubleNum(_)) - val jstring = arbitrary[String].map(JString(_)) - - // Totally unscientific atom frequencies. - val jatom: Gen[JAtom] = - Gen.frequency( - (1, jnull), - (8, jboolean), - (8, jlong), - (8, jdouble), - (16, jstring)) - - // Use lvl to limit the depth of our jvalues. - // Otherwise we will end up with SOE real fast. - - val MaxLevel: Int = 3 - - def jarray(lvl: Int): Gen[JArray] = - Gen.containerOf[Array, JValue](jvalue(lvl + 1)).map(JArray(_)) - - def jitem(lvl: Int): Gen[(String, JValue)] = - for { s <- arbitrary[String]; j <- jvalue(lvl) } yield (s, j) - - def jobject(lvl: Int): Gen[JObject] = - Gen.containerOf[Vector, (String, JValue)](jitem(lvl + 1)).map(JObject.fromSeq) - - def jvalue(lvl: Int = 0): Gen[JValue] = - if (lvl >= MaxLevel) jatom - else Gen.frequency((16, jatom), (1, jarray(lvl)), (2, jobject(lvl))) - - implicit lazy val arbitraryJValue: Arbitrary[JValue] = - Arbitrary(jvalue()) -} diff --git a/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/AstTest.scala b/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/AstTest.scala deleted file mode 100644 index 3ec7373e..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/AstTest.scala +++ /dev/null @@ -1,79 +0,0 @@ -package jawn -package ast - -import org.scalatest._ -import org.scalatest.prop._ - -import scala.collection.mutable -import scala.util.{Try, Success} - -import ArbitraryUtil._ - -class AstTest extends PropSpec with Matchers with PropertyChecks { - - property("calling .get never crashes") { - forAll { (v: JValue, s: String, i: Int) => - Try(v.get(i).get(s)).isSuccess shouldBe true - Try(v.get(s).get(i)).isSuccess shouldBe true - Try(v.get(i).get(i)).isSuccess shouldBe true - Try(v.get(s).get(s)).isSuccess shouldBe true - } - } - - property(".getX and .asX agree") { - forAll { (v: JValue) => - v.getBoolean shouldBe Try(v.asBoolean).toOption - v.getString shouldBe Try(v.asString).toOption - v.getInt shouldBe Try(v.asInt).toOption - v.getLong shouldBe Try(v.asLong).toOption - v.getDouble shouldBe Try(v.asDouble).toOption - v.getBigInt shouldBe Try(v.asBigInt).toOption - v.getBigDecimal shouldBe Try(v.asBigDecimal).toOption - } - } - - property(".getBoolean") { - forAll((b: Boolean) => JBool(b).getBoolean shouldBe Some(b)) - } - - property(".getString") { - forAll((s: String) => JString(s).getString shouldBe Some(s)) - } - - property(".getInt") { - forAll { (n: Int) => - JNum(n).getInt shouldBe Some(n) - JParser.parseUnsafe(n.toString).getInt shouldBe Some(n) - } - } - - property(".getLong") { - forAll { (n: Long) => - JNum(n).getLong shouldBe Some(n) - JParser.parseUnsafe(n.toString).getLong shouldBe Some(n) - } - } - - property(".getDouble") { - forAll { (n: Double) => - JNum(n).getDouble shouldBe Some(n) - JParser.parseUnsafe(n.toString).getDouble shouldBe Some(n) - } - } - - property(".getBigInt") { - forAll { (n: BigInt) => - JNum(n.toString).getBigInt shouldBe Some(n) - JParser.parseUnsafe(n.toString).getBigInt shouldBe Some(n) - } - } - - property(".getBigDecimal") { - forAll { (n: BigDecimal) => - if (Try(BigDecimal(n.toString)) == Success(n)) { - JNum(n.toString).getBigDecimal shouldBe Some(n) - JParser.parseUnsafe(n.toString).getBigDecimal shouldBe Some(n) - } - } - } -} diff --git a/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ParseCheck.scala b/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ParseCheck.scala deleted file mode 100644 index a5a5aa18..00000000 --- a/scalalib/src/test/resource/jawn/ast/src/test/scala/jawn/ParseCheck.scala +++ /dev/null @@ -1,169 +0,0 @@ -package jawn -package ast - -import org.scalatest._ -import org.scalatest.prop._ -import org.scalacheck.Arbitrary._ -import org.scalacheck._ -import Gen._ -import Arbitrary.arbitrary - -import scala.collection.mutable -import scala.util.{Try, Success} - -import jawn.parser.TestUtil - -import ArbitraryUtil._ - -class AstCheck extends PropSpec with Matchers with PropertyChecks { - - // so it's only one property, but it exercises: - // - // * parsing from strings - // * rendering jvalues to string - // * jvalue equality - // - // not bad. - property("idempotent parsing/rendering") { - forAll { value1: JValue => - val json1 = CanonicalRenderer.render(value1) - val value2 = JParser.parseFromString(json1).get - val json2 = CanonicalRenderer.render(value2) - json2 shouldBe json1 - json2.## shouldBe json1.## - - value1 shouldBe value2 - value1.## shouldBe value2.## - - TestUtil.withTemp(json1) { t => - JParser.parseFromFile(t).get shouldBe value2 - } - } - } - - property("string encoding/decoding") { - forAll { s: String => - val jstr1 = JString(s) - val json1 = CanonicalRenderer.render(jstr1) - val jstr2 = JParser.parseFromString(json1).get - val json2 = CanonicalRenderer.render(jstr2) - jstr2 shouldBe jstr1 - json2 shouldBe json1 - json2.## shouldBe json1.## - } - } - - property("string/charSequence parsing") { - forAll { value: JValue => - val s = CanonicalRenderer.render(value) - val j1 = JParser.parseFromString(s) - val cs = java.nio.CharBuffer.wrap(s.toCharArray) - val j2 = JParser.parseFromCharSequence(cs) - j1 shouldBe j2 - j1.## shouldBe j2.## - } - } - - implicit val facade = JawnFacade - - val percs = List(0.0, 0.2, 0.4, 0.8, 1.0) - - def checkRight(r: Either[ParseException, Seq[JValue]]): Seq[JValue] = { - r.isRight shouldBe true - val Right(vs) = r - vs - } - - def splitIntoSegments(json: String): List[String] = - if (json.length >= 8) { - val offsets = percs.map(n => (json.length * n).toInt) - val pairs = offsets zip offsets.drop(1) - pairs.map { case (i, j) => json.substring(i, j) } - } else { - json :: Nil - } - - def parseSegments(p: AsyncParser[JValue], segments: List[String]): Seq[JValue] = - segments.foldLeft(List.empty[JValue]) { (rs, s) => - rs ++ checkRight(p.absorb(s)) - } ++ checkRight(p.finish()) - - import AsyncParser.{UnwrapArray, ValueStream, SingleValue} - - property("async multi") { - val data = "[1,2,3][4,5,6]" - val p = AsyncParser[JValue](ValueStream) - val res0 = p.absorb(data) - val res1 = p.finish - //println((res0, res1)) - true - } - - property("async parsing") { - forAll { (v: JValue) => - val json = CanonicalRenderer.render(v) - val segments = splitIntoSegments(json) - val parsed = parseSegments(AsyncParser[JValue](SingleValue), segments) - parsed shouldBe List(v) - } - } - - property("async unwrapping") { - forAll { (vs0: List[Int]) => - val vs = vs0.map(LongNum(_)) - val arr = JArray(vs.toArray) - val json = CanonicalRenderer.render(arr) - val segments = splitIntoSegments(json) - parseSegments(AsyncParser[JValue](UnwrapArray), segments) shouldBe vs - } - } - - property("unicode string round-trip") { - forAll { (s: String) => - JParser.parseFromString(JString(s).render(FastRenderer)) shouldBe Success(JString(s)) - } - } - - property("if x == y, then x.## == y.##") { - forAll { (x: JValue, y: JValue) => - if (x == y) x.## shouldBe y.## - } - } - - property("ignore trailing zeros") { - forAll { (n: Int) => - val s = n.toString - val n1 = LongNum(n) - val n2 = DoubleNum(n) - - def check(j: JValue) { - j shouldBe n1; n1 shouldBe j - j shouldBe n2; n2 shouldBe j - } - - check(DeferNum(s)) - check(DeferNum(s + ".0")) - check(DeferNum(s + ".00")) - check(DeferNum(s + ".000")) - check(DeferNum(s + "e0")) - check(DeferNum(s + ".0e0")) - } - } - - property("large strings") { - val M = 1000000 - val q = "\"" - - val s0 = ("x" * (40 * M)) - val e0 = q + s0 + q - TestUtil.withTemp(e0) { t => - JParser.parseFromFile(t).filter(_ == JString(s0)).isSuccess shouldBe true - } - - val s1 = "\\" * (20 * M) - val e1 = q + s1 + s1 + q - TestUtil.withTemp(e1) { t => - JParser.parseFromFile(t).filter(_ == JString(s1)).isSuccess shouldBe true - } - } -} diff --git a/scalalib/src/test/resource/jawn/benchmark/build.sbt b/scalalib/src/test/resource/jawn/benchmark/build.sbt deleted file mode 100644 index 7cb15b12..00000000 --- a/scalalib/src/test/resource/jawn/benchmark/build.sbt +++ /dev/null @@ -1,21 +0,0 @@ -name := "jawn-benchmarks" - -javaOptions in run += "-Xmx6G" - -libraryDependencies ++= Seq( - "io.argonaut" %% "argonaut" % "6.2", - "org.json4s" %% "json4s-native" % "3.5.2", - "org.json4s" %% "json4s-jackson" % "3.5.2", - "com.typesafe.play" %% "play-json" % "2.5.15", - "com.rojoma" %% "rojoma-json" % "2.4.3", - "com.rojoma" %% "rojoma-json-v3" % "3.7.2", - "io.spray" %% "spray-json" % "1.3.3", - "org.parboiled" %% "parboiled" % "2.1.4", - "com.fasterxml.jackson.core" % "jackson-annotations" % "2.8.4", - "com.fasterxml.jackson.core" % "jackson-core" % "2.8.4", - "com.fasterxml.jackson.core" % "jackson-databind" % "2.8.4", - "com.google.code.gson" % "gson" % "2.8.1" -) - -// enable forking in run -fork in run := true diff --git a/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/JmhBenchmarks.scala b/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/JmhBenchmarks.scala deleted file mode 100644 index bc56f9f6..00000000 --- a/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/JmhBenchmarks.scala +++ /dev/null @@ -1,120 +0,0 @@ -package jawn -package benchmark - -import java.io.{BufferedReader, File, FileInputStream, FileReader} -import java.util.concurrent.TimeUnit -import org.openjdk.jmh.annotations._ -import scala.collection.mutable - -@State(Scope.Benchmark) -@BenchmarkMode(Array(Mode.AverageTime)) -@OutputTimeUnit(TimeUnit.MILLISECONDS) -abstract class JmhBenchmarks(name: String) { - val path: String = s"src/main/resources/$name" - - def load(path: String): String = { - val file = new File(path) - val bytes = new Array[Byte](file.length.toInt) - val fis = new FileInputStream(file) - fis.read(bytes) - new String(bytes, "UTF-8") - } - - def reader(path: String): FileReader = - new FileReader(new File(path)) - - def buffered(path: String): BufferedReader = - new BufferedReader(new FileReader(new File(path))) - - @Benchmark - def jawnCheckSyntax() = - jawn.Syntax.checkString(load(path)) - - @Benchmark - def jawnParse() = - jawn.ast.JParser.parseFromFile(new File(path)).get - - @Benchmark - def jawnStringParse() = - jawn.ast.JParser.parseFromString(load(path)).get -} - -trait OtherBenchmarks { self: JmhBenchmarks => - @Benchmark - def json4sJacksonParse() = { - import org.json4s._ - import org.json4s.jackson.JsonMethods._ - parse(load(path)) - } - - @Benchmark - def playParse() = - play.api.libs.json.Json.parse(load(path)) - - @Benchmark - def rojomaV3Parse() = - com.rojoma.json.v3.io.JsonReader.fromReader(reader(path), blockSize = 100000) - - @Benchmark - def argonautParse() = - argonaut.Parse.parse(load(path)) - - @Benchmark - def sprayParse() = - spray.json.JsonParser(load(path)) - - @Benchmark - def parboiledJsonParse() = - new ParboiledParser(load(path)).Json.run().get - - @Benchmark - def jacksonParse() = { - import com.fasterxml.jackson.databind.ObjectMapper - import com.fasterxml.jackson.databind.JsonNode - new ObjectMapper().readValue(new File(path), classOf[JsonNode]) - } - - @Benchmark - def gsonParse() = - new com.google.gson.JsonParser().parse(buffered(path)) - - // don't bother benchmarking jawn + external asts by default - - // @Benchmark - // def json4sJawnParse() = - // jawn.support.json4s.Parser.parseFromFile(new File(path)).get - // - // @Benchmark - // def rojomaV3JawnParse() = - // jawn.support.rojoma.v3.Parser.parseFromFile(new File(path)).get - // - // @Benchmark - // def argonautJawnParse() = - // jawn.support.argonaut.Parser.parseFromFile(new File(path)).get - // - // @Benchmark - // def sprayJawnParse() = - // jawn.support.spray.Parser.parseFromFile(new File(path)).get - - // native json4s parser is really, really slow, so it's disabled by default. - - // @Benchmark - // def json4sNativeParse() = { - // import org.json4s._ - // import org.json4s.native.JsonMethods._ - // parse(load(path)) - // } -} - -class Qux2Bench extends JmhBenchmarks("qux2.json") with OtherBenchmarks -class Bla25Bench extends JmhBenchmarks("bla25.json") with OtherBenchmarks -class CountriesBench extends JmhBenchmarks("countries.geo.json") with OtherBenchmarks -class Ugh10kBench extends JmhBenchmarks("ugh10k.json") with OtherBenchmarks - -class JawnOnlyQux2Bench extends JmhBenchmarks("qux2.json") -class JawnOnlyBla25Bench extends JmhBenchmarks("bla25.json") -class JawnOnlyCountriesBench extends JmhBenchmarks("countries.geo.json") -class JawnOnlyUgh10kBench extends JmhBenchmarks("ugh10k.json") - -// // from https://github.com/zemirco/sf-city-lots-json -// class CityLotsBench extends JmhBenchmarks("citylots.json") diff --git a/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/Parboiled.scala b/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/Parboiled.scala deleted file mode 100644 index bd5fed18..00000000 --- a/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/Parboiled.scala +++ /dev/null @@ -1,105 +0,0 @@ -package jawn.benchmark - -/* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import scala.annotation.switch -import org.parboiled2._ -import spray.json.{ParserInput => _, _} - -/** - * This is a feature-complete JSON parser implementation that almost directly - * models the JSON grammar presented at http://www.json.org as a parboiled2 PEG parser. - */ -class ParboiledParser(val input: ParserInput) extends Parser with StringBuilding { - import CharPredicate.{Digit, Digit19, HexDigit} - import ParboiledParser._ - - // the root rule - def Json = rule { WhiteSpace ~ Value ~ EOI } - - def JsonObject: Rule1[JsObject] = rule { - ws('{') ~ zeroOrMore(Pair).separatedBy(ws(',')) ~ ws('}') ~> ((fields: Seq[JsField]) => JsObject(fields :_*)) - } - - def Pair = rule { JsonStringUnwrapped ~ ws(':') ~ Value ~> ((_, _)) } - - def Value: Rule1[JsValue] = rule { - // as an optimization of the equivalent rule: - // JsonString | JsonNumber | JsonObject | JsonArray | JsonTrue | JsonFalse | JsonNull - // we make use of the fact that one-char lookahead is enough to discriminate the cases - run { - (cursorChar: @switch) match { - case '"' => JsonString - case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '-' => JsonNumber - case '{' => JsonObject - case '[' => JsonArray - case 't' => JsonTrue - case 'f' => JsonFalse - case 'n' => JsonNull - case _ => MISMATCH - } - } - } - - def JsonString = rule { JsonStringUnwrapped ~> (JsString(_)) } - - def JsonStringUnwrapped = rule { '"' ~ clearSB() ~ Characters ~ ws('"') ~ push(sb.toString) } - - def JsonNumber = rule { capture(Integer ~ optional(Frac) ~ optional(Exp)) ~> (JsNumber(_)) ~ WhiteSpace } - - def JsonArray = rule { ws('[') ~ zeroOrMore(Value).separatedBy(ws(',')) ~ ws(']') ~> (JsArray(_ :_*)) } - - def Characters = rule { zeroOrMore(NormalChar | '\\' ~ EscapedChar) } - - def NormalChar = rule { !QuoteBackslash ~ ANY ~ appendSB() } - - def EscapedChar = rule ( - QuoteSlashBackSlash ~ appendSB() - | 'b' ~ appendSB('\b') - | 'f' ~ appendSB('\f') - | 'n' ~ appendSB('\n') - | 'r' ~ appendSB('\r') - | 't' ~ appendSB('\t') - | Unicode ~> { code => sb.append(code.asInstanceOf[Char]); () } - ) - - def Unicode = rule { 'u' ~ capture(HexDigit ~ HexDigit ~ HexDigit ~ HexDigit) ~> (java.lang.Integer.parseInt(_, 16)) } - - def Integer = rule { optional('-') ~ (Digit19 ~ Digits | Digit) } - - def Digits = rule { oneOrMore(Digit) } - - def Frac = rule { "." ~ Digits } - - def Exp = rule { ignoreCase('e') ~ optional(anyOf("+-")) ~ Digits } - - def JsonTrue = rule { "true" ~ WhiteSpace ~ push(JsTrue) } - - def JsonFalse = rule { "false" ~ WhiteSpace ~ push(JsFalse) } - - def JsonNull = rule { "null" ~ WhiteSpace ~ push(JsNull) } - - def WhiteSpace = rule { zeroOrMore(WhiteSpaceChar) } - - def ws(c: Char) = rule { c ~ WhiteSpace } -} - -object ParboiledParser { - val WhiteSpaceChar = CharPredicate(" \n\r\t\f") - val QuoteBackslash = CharPredicate("\"\\") - val QuoteSlashBackSlash = QuoteBackslash ++ "/" -} diff --git a/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/ParseLongBench.scala b/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/ParseLongBench.scala deleted file mode 100644 index 97e8e6a8..00000000 --- a/scalalib/src/test/resource/jawn/benchmark/src/main/scala/jawn/ParseLongBench.scala +++ /dev/null @@ -1,133 +0,0 @@ -package jawn -package benchmark - -import java.io.{BufferedReader, File, FileInputStream, FileReader} -import java.util.concurrent.TimeUnit -import org.openjdk.jmh.annotations._ -import scala.collection.mutable - -case class Slice(s: String, begin: Int, limit: Int) extends CharSequence { - val length: Int = limit - begin - def charAt(i: Int): Char = s.charAt(begin + i) - def subSequence(start: Int, end: Int): Slice = - Slice(s, begin + start, Math.min(end + begin, limit)) - override def toString: String = - s.substring(begin, limit) -} - -@State(Scope.Benchmark) -@OutputTimeUnit(TimeUnit.MILLISECONDS) -class ParseLongBench { - - val longs: Array[Long] = Array( - -1346837161442476189L, -4666345991836441070L, 4868830844043235709L, - 2992690405064579158L, -2017521011608078634L, -3039682866169364757L, - 8997687047891586260L, 5932727796276454607L, 4062739618560250554L, - 8668950167358198490L, -8565613821858118870L, 8049785848575684314L, - -580831266940599830L, -3593199367295538945L, 8374322595267797482L, - 3088261552516619129L, -6879203747403593851L, -1842900848925949857L, - 4484592876047641351L, 5182973278356955602L, -6840392853855436945L, - -4176340556015032222L, -536379174926548619L, 6343722878919863216L, - 1557757008211571405L, -334093799456298669L, 619602023052756397L, - 6904874397154297343L, -4332034907782234995L, -8767842695446545180L, - -6127250063205613011L, 6902212562850963795L, 4778607575334665692L, - 7674074815344809639L, -3834944692798167050L, 7406081418831471202L, - -9126886315356724563L, 8093878176633322645L, 2471547025788214028L, - -5018828829942988155L, -6676531171364391367L, 8189793226936659851L, - 7150026713387306746L, -6065566098373722052L, 3281133763697608570L, - 957103694526079944L, -3009447279791131829L, -1995600795755716697L, - 2361055030313262510L, -4312828282749171343L, 8836216125516165138L, - 5548785979447786253L, 8567551485822958810L, 5931896003625723150L, - 3472058092439106147L, 4363240277904515929L, -2999484068697753019L, - -8285358702782547958L, -2407429647076308777L, 4411565001760018584L, - 792384115860070648L, 3328145302561962294L, -2377559446421434356L, - -7837698939558960516L, -565806101451282875L, -4792610084643070650L, - 2713520205731589923L, -6521104721472605988L, 5037187811345411645L, - 3866939564433764178L, -3851229228204678079L, -8171137274242372558L, - -14713951794749384L, 2061783257002637655L, -7375571393873059570L, - 7402007407273053723L, -5104318069025846447L, -8956415532448219980L, - 4904595193891993401L, 5396360181536889307L, -8043917553767343384L, - -3666269817017255250L, -6535587792359353103L, -4553034734642385706L, - -7544140164897268962L, 2468330113904053484L, 5790319365381968237L, - -2734383156062609640L, -4831208471935595172L, 4502079643250626043L, - 4778622151522470246L, 7233054223498326990L, 5833883346008509644L, - -8013495378054295093L, 2944606201054530456L, -8608231828651976245L, - -6957117814546267426L, -4744827311133020624L, 2640030216500286789L, - 8343959867315747844L) - - val strs: Array[CharSequence] = - longs.map(_.toString) - - val seqs: Array[CharSequence] = - longs.map { n => - val prefix = "x" * (n & 63).toInt - val suffix = "y" * ((n * 7) & 63).toInt - val i = prefix.length - val s = n.toString - Slice(prefix + s + suffix, i, s.length + i) - } - - val str: CharSequence = "23948271429443" - - val seq: CharSequence = Slice("weigjewigjwi23948271429443jgewigjweiwjegiwgjiewjgeiwjg", 12, 26) - - def sumJava(css: Array[CharSequence]): Long = { - var sum: Long = 0 - var i = 0 - while (i < css.length) { - sum += java.lang.Long.parseLong(css(i).toString) - i += 1 - } - sum - } - - def sumStd(css: Array[CharSequence]): Long = { - var sum: Long = 0 - var i = 0 - while (i < css.length) { - sum += css(i).toString.toLong - i += 1 - } - sum - } - - def sumSafe(css: Array[CharSequence]): Long = { - var sum: Long = 0 - var i = 0 - while (i < css.length) { - sum += Util.parseLong(css(i)) - i += 1 - } - sum - } - - def sumUnsafe(css: Array[CharSequence]): Long = { - var sum: Long = 0 - var i = 0 - while (i < css.length) { - sum += Util.parseLongUnsafe(css(i)) - i += 1 - } - sum - } - - @Benchmark def stringArrayJava(): Long = sumJava(strs) - @Benchmark def seqArrayJava(): Long = sumJava(seqs) - @Benchmark def stringValueJava(): Long = java.lang.Long.parseLong(str.toString) - @Benchmark def seqValueJava(): Long = java.lang.Long.parseLong(seq.toString) - - @Benchmark def stringArrayStd(): Long = sumStd(strs) - @Benchmark def seqArrayStd(): Long = sumStd(seqs) - @Benchmark def stringValueStd(): Long = str.toString.toLong - @Benchmark def seqValueStd(): Long = seq.toString.toLong - - @Benchmark def stringArraySafe(): Long = sumSafe(strs) - @Benchmark def seqArraySafe(): Long = sumSafe(seqs) - @Benchmark def stringValueSafe(): Long = Util.parseLong(str) - @Benchmark def seqValueSafe(): Long = Util.parseLong(seq) - - @Benchmark def stringArrayUnsafe(): Long = sumUnsafe(strs) - @Benchmark def seqArrayUnsafe(): Long = sumUnsafe(seqs) - @Benchmark def stringValueUnsafe(): Long = Util.parseLongUnsafe(str) - @Benchmark def seqValueUnsafe(): Long = Util.parseLongUnsafe(seq) -} diff --git a/scalalib/src/test/resource/jawn/build.sbt b/scalalib/src/test/resource/jawn/build.sbt deleted file mode 100644 index c32403ed..00000000 --- a/scalalib/src/test/resource/jawn/build.sbt +++ /dev/null @@ -1,162 +0,0 @@ -import ReleaseTransformations._ - -lazy val previousJawnVersion = "0.10.4" - -lazy val stableCrossVersions = - Seq("2.10.6", "2.11.11", "2.12.2") - -// we'll support 2.13.0-M1 soon but not yet -lazy val allCrossVersions = - stableCrossVersions - -lazy val benchmarkVersion = - "2.12.2" - -lazy val jawnSettings = Seq( - organization := "org.spire-math", - scalaVersion := "2.12.2", - crossScalaVersions := allCrossVersions, - - mimaPreviousArtifacts := Set(organization.value %% moduleName.value % previousJawnVersion), - - resolvers += Resolver.sonatypeRepo("releases"), - - libraryDependencies ++= - "org.scalatest" %% "scalatest" % "3.0.3" % Test :: - "org.scalacheck" %% "scalacheck" % "1.13.5" % Test :: - Nil, - - scalacOptions ++= - "-deprecation" :: - "-optimize" :: - "-unchecked" :: - Nil, - - licenses += ("MIT", url("http://opensource.org/licenses/MIT")), - homepage := Some(url("http://github.com/non/jawn")), - - // release stuff - releaseCrossBuild := true, - publishMavenStyle := true, - publishArtifact in Test := false, - pomIncludeRepository := Function.const(false), - - publishTo := { - val nexus = "https://oss.sonatype.org/" - if (isSnapshot.value) { - Some("Snapshots" at nexus + "content/repositories/snapshots") - } else { - Some("Releases" at nexus + "service/local/staging/deploy/maven2") - } - }, - - scmInfo := Some(ScmInfo( - browseUrl = url("https://github.com/non/jawn"), - connection = "scm:git:git@github.com:non/jawn.git" - )), - - developers += Developer( - name = "Erik Osheim", - email = "erik@plastic-idolatry.com", - id = "d_m", - url = url("http://github.com/non/") - ), - - releaseProcess := Seq[ReleaseStep]( - checkSnapshotDependencies, - inquireVersions, - runClean, - ReleaseHelper.runCommandAndRemaining("+test"), // formerly runTest - setReleaseVersion, - commitReleaseVersion, - tagRelease, - ReleaseHelper.runCommandAndRemaining("+publishSigned"), - setNextVersion, - commitNextVersion, - ReleaseStep(action = Command.process("sonatypeReleaseAll", _)), - pushChanges)) - -lazy val noPublish = Seq( - publish := {}, - publishLocal := {}, - publishArtifact := false, - mimaPreviousArtifacts := Set()) - -lazy val root = project.in(file(".")) - .aggregate(all.map(Project.projectToRef): _*) - .enablePlugins(CrossPerProjectPlugin) - .disablePlugins(JmhPlugin) - .settings(name := "jawn") - .settings(jawnSettings: _*) - .settings(noPublish: _*) - -lazy val parser = project.in(file("parser")) - .settings(name := "parser") - .settings(moduleName := "jawn-parser") - .settings(jawnSettings: _*) - .disablePlugins(JmhPlugin) - -lazy val util = project.in(file("util")) - .dependsOn(parser % "compile->compile;test->test") - .settings(name := "util") - .settings(moduleName := "jawn-util") - .settings(jawnSettings: _*) - .disablePlugins(JmhPlugin) - -lazy val ast = project.in(file("ast")) - .dependsOn(parser % "compile->compile;test->test") - .dependsOn(util % "compile->compile;test->test") - .settings(name := "ast") - .settings(moduleName := "jawn-ast") - .settings(jawnSettings: _*) - .disablePlugins(JmhPlugin) - -def support(s: String) = - Project(id = s, base = file(s"support/$s")) - .settings(name := (s + "-support")) - .settings(moduleName := "jawn-" + s) - .dependsOn(parser) - .settings(jawnSettings: _*) - .disablePlugins(JmhPlugin) - -lazy val supportArgonaut = support("argonaut") - .settings(crossScalaVersions := stableCrossVersions) - .settings(libraryDependencies += "io.argonaut" %% "argonaut" % "6.2") - -lazy val supportJson4s = support("json4s") - .dependsOn(util) - .settings(crossScalaVersions := stableCrossVersions) - .settings(libraryDependencies += "org.json4s" %% "json4s-ast" % "3.5.2") - -lazy val supportPlay = support("play") - .settings(crossScalaVersions := stableCrossVersions) - .settings(libraryDependencies += (scalaBinaryVersion.value match { - case "2.10" => "com.typesafe.play" %% "play-json" % "2.4.11" - case "2.11" => "com.typesafe.play" %% "play-json" % "2.5.15" - case _ => "com.typesafe.play" %% "play-json" % "2.6.0" - })) - -lazy val supportRojoma = support("rojoma") - .settings(crossScalaVersions := stableCrossVersions) - .settings(libraryDependencies += "com.rojoma" %% "rojoma-json" % "2.4.3") - -lazy val supportRojomaV3 = support("rojoma-v3") - .settings(crossScalaVersions := stableCrossVersions) - .settings(libraryDependencies += "com.rojoma" %% "rojoma-json-v3" % "3.7.2") - -lazy val supportSpray = support("spray") - .settings(crossScalaVersions := stableCrossVersions) - .settings(resolvers += "spray" at "http://repo.spray.io/") - .settings(libraryDependencies += "io.spray" %% "spray-json" % "1.3.3") - -lazy val benchmark = project.in(file("benchmark")) - .dependsOn(all.map(Project.classpathDependency[Project]): _*) - .settings(name := "jawn-benchmark") - .settings(jawnSettings: _*) - .settings(scalaVersion := benchmarkVersion) - .settings(crossScalaVersions := Seq(benchmarkVersion)) - .settings(noPublish: _*) - .enablePlugins(JmhPlugin) - -lazy val all = - Seq(parser, util, ast, supportArgonaut, supportJson4s, supportPlay, supportRojoma, supportRojomaV3, supportSpray) diff --git a/scalalib/src/test/resource/jawn/parser/src/main/resources/utf8.json b/scalalib/src/test/resource/jawn/parser/src/main/resources/utf8.json deleted file mode 100644 index 6549eaa0..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/resources/utf8.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "copyright": "©", - "accent-e": "é", - "combined-e": "é", - "devenagari": "क्तु", - "math": "𝔊" -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/AsyncParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/AsyncParser.scala deleted file mode 100644 index acf770d7..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/AsyncParser.scala +++ /dev/null @@ -1,319 +0,0 @@ -package jawn - -import scala.annotation.{switch, tailrec} -import scala.math.max -import scala.collection.mutable -import scala.util.control -import java.nio.ByteBuffer - -object AsyncParser { - - sealed abstract class Mode(val start: Int, val value: Int) - case object UnwrapArray extends Mode(-5, 1) - case object ValueStream extends Mode(-1, 0) - case object SingleValue extends Mode(-1, -1) - - def apply[J](mode: Mode = SingleValue): AsyncParser[J] = - new AsyncParser(state = mode.start, curr = 0, stack = Nil, - data = new Array[Byte](131072), len = 0, allocated = 131072, - offset = 0, done = false, streamMode = mode.value) -} - -/** - * AsyncParser is able to parse chunks of data (encoded as - * Option[ByteBuffer] instances) and parse asynchronously. You can - * use the factory methods in the companion object to instantiate an - * async parser. - * - * The async parser's fields are described below: - * - * The (state, curr, stack) triple is used to save and restore parser - * state between async calls. State also helps encode extra - * information when streaming or unwrapping an array. - * - * The (data, len, allocated) triple is used to manage the underlying - * data the parser is keeping track of. As new data comes in, data may - * be expanded if not enough space is available. - * - * The offset parameter is used to drive the outer async parsing. It - * stores similar information to curr but is kept separate to avoid - * "corrupting" our snapshot. - * - * The done parameter is used internally to help figure out when the - * atEof() parser method should return true. This will be set when - * apply(None) is called. - * - * The streamMode parameter controls how the asynchronous parser will - * be handling multiple values. There are three states: - * - * 1: An array is being unwrapped. Normal JSON array rules apply - * (Note that if the outer value observed is not an array, this - * mode will toggle to the -1 mode). - * - * 0: A stream of individual JSON elements separated by whitespace - * are being parsed. We can return each complete element as we - * parse it. - * - * -1: No streaming is occuring. Only a single JSON value is - * allowed. - */ -final class AsyncParser[J] protected[jawn] ( - protected[jawn] var state: Int, - protected[jawn] var curr: Int, - protected[jawn] var stack: List[FContext[J]], - protected[jawn] var data: Array[Byte], - protected[jawn] var len: Int, - protected[jawn] var allocated: Int, - protected[jawn] var offset: Int, - protected[jawn] var done: Boolean, - protected[jawn] var streamMode: Int -) extends ByteBasedParser[J] { - - protected[this] var line = 0 - protected[this] var pos = 0 - protected[this] final def newline(i: Int) { line += 1; pos = i + 1 } - protected[this] final def column(i: Int) = i - pos - - final def copy() = - new AsyncParser(state, curr, stack, data.clone, len, allocated, offset, done, streamMode) - - final def absorb(buf: ByteBuffer)(implicit facade: Facade[J]): Either[ParseException, Seq[J]] = { - done = false - val buflen = buf.limit - buf.position - val need = len + buflen - resizeIfNecessary(need) - buf.get(data, len, buflen) - len = need - churn() - } - - final def absorb(bytes: Array[Byte])(implicit facade: Facade[J]): Either[ParseException, Seq[J]] = - absorb(ByteBuffer.wrap(bytes)) - - final def absorb(s: String)(implicit facade: Facade[J]): Either[ParseException, Seq[J]] = - absorb(ByteBuffer.wrap(s.getBytes(utf8))) - - final def finish()(implicit facade: Facade[J]): Either[ParseException, Seq[J]] = { - done = true - churn() - } - - protected[this] final def resizeIfNecessary(need: Int): Unit = { - // if we don't have enough free space available we'll need to grow our - // data array. we never shrink the data array, assuming users will call - // feed with similarly-sized buffers. - if (need > allocated) { - val doubled = if (allocated < 0x40000000) allocated * 2 else Int.MaxValue - val newsize = max(need, doubled) - val newdata = new Array[Byte](newsize) - System.arraycopy(data, 0, newdata, 0, len) - data = newdata - allocated = newsize - } - } - - /** - * Explanation of the new synthetic states. The parser machinery - * uses positive integers for states while parsing json values. We - * use these negative states to keep track of the async parser's - * status between json values. - * - * ASYNC_PRESTART: We haven't seen any non-whitespace yet. We - * could be parsing an array, or not. We are waiting for valid - * JSON. - * - * ASYNC_START: We've seen an array and have begun unwrapping - * it. We could see a ] if the array is empty, or valid JSON. - * - * ASYNC_END: We've parsed an array and seen the final ]. At this - * point we should only see whitespace or an EOF. - * - * ASYNC_POSTVAL: We just parsed a value from inside the array. We - * expect to see whitespace, a comma, or a ]. - * - * ASYNC_PREVAL: We are in an array and we just saw a comma. We - * expect to see whitespace or a JSON value. - */ - @inline private[this] final def ASYNC_PRESTART = -5 - @inline private[this] final def ASYNC_START = -4 - @inline private[this] final def ASYNC_END = -3 - @inline private[this] final def ASYNC_POSTVAL = -2 - @inline private[this] final def ASYNC_PREVAL = -1 - - protected[jawn] def churn()(implicit facade: Facade[J]): Either[ParseException, Seq[J]] = { - - // accumulates json values - val results = mutable.ArrayBuffer.empty[J] - - // we rely on exceptions to tell us when we run out of data - try { - while (true) { - if (state < 0) { - (at(offset): @switch) match { - case '\n' => - newline(offset) - offset += 1 - - case ' ' | '\t' | '\r' => - offset += 1 - - case '[' => - if (state == ASYNC_PRESTART) { - offset += 1 - state = ASYNC_START - } else if (state == ASYNC_END) { - die(offset, "expected eof") - } else if (state == ASYNC_POSTVAL) { - die(offset, "expected , or ]") - } else { - state = 0 - } - - case ',' => - if (state == ASYNC_POSTVAL) { - offset += 1 - state = ASYNC_PREVAL - } else if (state == ASYNC_END) { - die(offset, "expected eof") - } else { - die(offset, "expected json value") - } - - case ']' => - if (state == ASYNC_POSTVAL || state == ASYNC_START) { - if (streamMode > 0) { - offset += 1 - state = ASYNC_END - } else { - die(offset, "expected json value or eof") - } - } else if (state == ASYNC_END) { - die(offset, "expected eof") - } else { - die(offset, "expected json value") - } - - case c => - if (state == ASYNC_END) { - die(offset, "expected eof") - } else if (state == ASYNC_POSTVAL) { - die(offset, "expected ] or ,") - } else { - if (state == ASYNC_PRESTART && streamMode > 0) streamMode = -1 - state = 0 - } - } - - } else { - // jump straight back into rparse - offset = reset(offset) - val (value, j) = if (state <= 0) { - parse(offset) - } else { - rparse(state, curr, stack) - } - if (streamMode > 0) { - state = ASYNC_POSTVAL - } else if (streamMode == 0) { - state = ASYNC_PREVAL - } else { - state = ASYNC_END - } - curr = j - offset = j - stack = Nil - results.append(value) - } - } - Right(results) - } catch { - case e: AsyncException => - if (done) { - // if we are done, make sure we ended at a good stopping point - if (state == ASYNC_PREVAL || state == ASYNC_END) Right(results) - else Left(ParseException("exhausted input", -1, -1, -1)) - } else { - // we ran out of data, so return what we have so far - Right(results) - } - - case e: ParseException => - // we hit a parser error, so return that error and results so far - Left(e) - } - } - - // every 1M we shift our array back by 1M. - protected[this] final def reset(i: Int): Int = { - if (offset >= 1048576) { - len -= 1048576 - offset -= 1048576 - pos -= 1048576 - System.arraycopy(data, 1048576, data, 0, len) - i - 1048576 - } else { - i - } - } - - /** - * We use this to keep track of the last recoverable place we've - * seen. If we hit an AsyncException, we can later resume from this - * point. - * - * This method is called during every loop of rparse, and the - * arguments are the exact arguments we can pass to rparse to - * continue where we left off. - */ - protected[this] final def checkpoint(state: Int, i: Int, stack: List[FContext[J]]) { - this.state = state - this.curr = i - this.stack = stack - } - - /** - * This is a specialized accessor for the case where our underlying data are - * bytes not chars. - */ - protected[this] final def byte(i: Int): Byte = - if (i >= len) throw new AsyncException else data(i) - - // we need to signal if we got out-of-bounds - protected[this] final def at(i: Int): Char = - if (i >= len) throw new AsyncException else data(i).toChar - - /** - * Access a byte range as a string. - * - * Since the underlying data are UTF-8 encoded, i and k must occur on unicode - * boundaries. Also, the resulting String is not guaranteed to have length - * (k - i). - */ - protected[this] final def at(i: Int, k: Int): CharSequence = { - if (k > len) throw new AsyncException - val size = k - i - val arr = new Array[Byte](size) - System.arraycopy(data, i, arr, 0, size) - new String(arr, utf8) - } - - // the basic idea is that we don't signal EOF until done is true, which means - // the client explicitly send us an EOF. - protected[this] final def atEof(i: Int): Boolean = - if (done) i >= len else false - - // we don't have to do anything special on close. - protected[this] final def close(): Unit = () -} - -/** - * This class is used internally by AsyncParser to signal that we've - * reached the end of the particular input we were given. - */ -private[jawn] class AsyncException extends Exception with control.NoStackTrace - -/** - * This is a more prosaic exception which indicates that we've hit a - * parsing error. - */ -private[jawn] class FailureException extends Exception diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBasedParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBasedParser.scala deleted file mode 100644 index 9fc5234a..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBasedParser.scala +++ /dev/null @@ -1,104 +0,0 @@ -package jawn - -import scala.annotation.{switch, tailrec} - -/** - * Trait used when the data to be parsed is in UTF-8. - * - * This parser has to translate input bytes to Chars and Strings. It - * provides a byte() method to access individual bytes, and also - * parser strings from bytes. - * - * Its parseString() implementation has two cases. In the first case - * (the hot path) the string has no escape sequences and we can just - * UTF-8 decode the entire set of bytes. In the second case, it goes - * to some trouble to be sure to de-escape correctly given that the - * input data is UTF-8. - */ -trait ByteBasedParser[J] extends Parser[J] { - protected[this] def byte(i: Int): Byte - - /** - * See if the string has any escape sequences. If not, return the end of the - * string. If so, bail out and return -1. - * - * This method expects the data to be in UTF-8 and accesses it as bytes. Thus - * we can just ignore any bytes with the highest bit set. - */ - protected[this] final def parseStringSimple(i: Int, ctxt: FContext[J]): Int = { - var j = i - var c: Int = byte(j) & 0xff - while (c != 34) { - if (c < 32) return die(j, s"control char ($c) in string") - if (c == 92) return -1 - j += 1 - c = byte(j) & 0xff - } - j + 1 - } - - /** - * Parse the string according to JSON rules, and add to the given context. - * - * This method expects the data to be in UTF-8 and accesses it as bytes. - */ - protected[this] final def parseString(i: Int, ctxt: FContext[J]): Int = { - val k = parseStringSimple(i + 1, ctxt) - if (k != -1) { - ctxt.add(at(i + 1, k - 1)) - return k - } - - // TODO: we might be able to do better by identifying where - // escapes occur, and then translating the intermediate strings in - // one go. - - var j = i + 1 - val sb = new CharBuilder - - var c: Int = byte(j) & 0xff - while (c != 34) { // " - if (c == 92) { // \ - (byte(j + 1): @switch) match { - case 98 => { sb.append('\b'); j += 2 } - case 102 => { sb.append('\f'); j += 2 } - case 110 => { sb.append('\n'); j += 2 } - case 114 => { sb.append('\r'); j += 2 } - case 116 => { sb.append('\t'); j += 2 } - - case 34 => { sb.append('"'); j += 2 } - case 47 => { sb.append('/'); j += 2 } - case 92 => { sb.append('\\'); j += 2 } - - // if there's a problem then descape will explode - case 117 => { sb.append(descape(at(j + 2, j + 6))); j += 6 } - - case c => die(j, s"invalid escape sequence (\\${c.toChar})") - } - } else if (c < 32) { - die(j, s"control char ($c) in string") - } else if (c < 128) { - // 1-byte UTF-8 sequence - sb.append(c.toChar) - j += 1 - } else if ((c & 224) == 192) { - // 2-byte UTF-8 sequence - sb.extend(at(j, j + 2)) - j += 2 - } else if ((c & 240) == 224) { - // 3-byte UTF-8 sequence - sb.extend(at(j, j + 3)) - j += 3 - } else if ((c & 248) == 240) { - // 4-byte UTF-8 sequence - sb.extend(at(j, j + 4)) - j += 4 - } else { - die(j, "invalid UTF-8 encoding") - } - c = byte(j) & 0xff - } - ctxt.add(sb.makeString) - j + 1 - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBufferParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBufferParser.scala deleted file mode 100644 index 1902b8d2..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ByteBufferParser.scala +++ /dev/null @@ -1,42 +0,0 @@ -package jawn - -import scala.annotation.{switch, tailrec} -import java.nio.ByteBuffer - -/** - * Basic ByteBuffer parser. - * - * This assumes that the provided ByteBuffer is ready to be read. The - * user is responsible for any necessary flipping/resetting of the - * ByteBuffer before parsing. - * - * The parser makes absolute calls to the ByteBuffer, which will not - * update its own mutable position fields. - */ -final class ByteBufferParser[J](src: ByteBuffer) extends SyncParser[J] with ByteBasedParser[J] { - private[this] final val start = src.position - private[this] final val limit = src.limit - start - - private[this] var lineState = 0 - protected[this] def line(): Int = lineState - - protected[this] final def newline(i: Int) { lineState += 1 } - protected[this] final def column(i: Int) = i - - protected[this] final def close() { src.position(src.limit) } - protected[this] final def reset(i: Int): Int = i - protected[this] final def checkpoint(state: Int, i: Int, stack: List[FContext[J]]) {} - protected[this] final def byte(i: Int): Byte = src.get(i + start) - protected[this] final def at(i: Int): Char = src.get(i + start).toChar - - protected[this] final def at(i: Int, k: Int): CharSequence = { - val len = k - i - val arr = new Array[Byte](len) - src.position(i + start) - src.get(arr, 0, len) - src.position(start) - new String(arr, utf8) - } - - protected[this] final def atEof(i: Int) = i >= limit -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ChannelParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ChannelParser.scala deleted file mode 100644 index 3c93e741..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/ChannelParser.scala +++ /dev/null @@ -1,164 +0,0 @@ -package jawn - -import java.lang.Integer.{ bitCount, highestOneBit } -import java.io.{File, FileInputStream} -import java.nio.ByteBuffer -import java.nio.channels.ReadableByteChannel - -object ChannelParser { - - final val DefaultBufferSize = 1048576 - - final val ParseAsStringThreshold = 20 * 1048576 - - def fromFile[J](f: File, bufferSize: Int = DefaultBufferSize): SyncParser[J] = - if (f.length < ParseAsStringThreshold) { - val bytes = new Array[Byte](f.length.toInt) - val fis = new FileInputStream(f) - fis.read(bytes) - new StringParser[J](new String(bytes, "UTF-8")) - } else { - new ChannelParser[J](new FileInputStream(f).getChannel, bufferSize) - } - - def fromChannel[J](ch: ReadableByteChannel, bufferSize: Int = DefaultBufferSize): ChannelParser[J] = - new ChannelParser[J](ch, bufferSize) - - /** - * Given a desired buffer size, find the closest positive - * power-of-two larger than that size. - * - * This method throws an exception if the given values are negative - * or too large to have a valid power of two. - */ - def computeBufferSize(x: Int): Int = - if (x < 0) { - throw new IllegalArgumentException("negative bufferSize ($x)") - } else if (x > 0x40000000) { - throw new IllegalArgumentException("bufferSize too large ($x)") - } else if (bitCount(x) == 1) { - x - } else { - highestOneBit(x) << 1 - } -} - -/** - * Basic file parser. - * - * Given a file name this parser opens it, chunks the data, and parses - * it. - */ -final class ChannelParser[J](ch: ReadableByteChannel, bufferSize: Int) extends SyncParser[J] with ByteBasedParser[J] { - - var Bufsize: Int = ChannelParser.computeBufferSize(bufferSize) - var Mask: Int = Bufsize - 1 - var Allsize: Int = Bufsize * 2 - - // these are the actual byte arrays we'll use - private var curr = new Array[Byte](Bufsize) - private var next = new Array[Byte](Bufsize) - - // these are the bytecounts for each array - private var ncurr = ch.read(ByteBuffer.wrap(curr)) - private var nnext = ch.read(ByteBuffer.wrap(next)) - - var line = 0 - private var pos = 0 - protected[this] final def newline(i: Int): Unit = { line += 1; pos = i } - protected[this] final def column(i: Int): Int = i - pos - - protected[this] final def close(): Unit = ch.close() - - /** - * Swap the curr and next arrays/buffers/counts. - * - * We'll call this in response to certain reset() calls. Specifically, when - * the index provided to reset is no longer in the 'curr' buffer, we want to - * clear that data and swap the buffers. - */ - protected[this] final def swap(): Unit = { - var tmp = curr; curr = next; next = tmp - var ntmp = ncurr; ncurr = nnext; nnext = ntmp - } - - protected[this] final def grow(): Unit = { - val cc = new Array[Byte](Allsize) - System.arraycopy(curr, 0, cc, 0, Bufsize) - System.arraycopy(next, 0, cc, Bufsize, Bufsize) - - curr = cc - ncurr = ncurr + nnext - next = new Array[Byte](Allsize) - nnext = ch.read(ByteBuffer.wrap(next)) - - Bufsize = Allsize - Mask = Allsize - 1 - Allsize *= 2 - } - - /** - * If the cursor 'i' is past the 'curr' buffer, we want to clear the - * current byte buffer, do a swap, load some more data, and - * continue. - */ - protected[this] final def reset(i: Int): Int = - if (i >= Bufsize) { - swap() - nnext = ch.read(ByteBuffer.wrap(next)) - pos -= Bufsize - i - Bufsize - } else { - i - } - - protected[this] final def checkpoint(state: Int, i: Int, stack: List[FContext[J]]): Unit = () - - /** - * This is a specialized accessor for the case where our underlying - * data are bytes not chars. - */ - protected[this] final def byte(i: Int): Byte = - if (i < Bufsize) curr(i) - else if (i < Allsize) next(i & Mask) - else { grow(); byte(i) } - - /** - * Reads a byte as a single Char. The byte must be valid ASCII (this - * method is used to parse JSON values like numbers, constants, or - * delimiters, which are known to be within ASCII). - */ - protected[this] final def at(i: Int): Char = - if (i < Bufsize) curr(i).toChar - else if (i < Allsize) next(i & Mask).toChar - else { grow(); at(i) } - - /** - * Access a byte range as a string. - * - * Since the underlying data are UTF-8 encoded, i and k must occur - * on unicode boundaries. Also, the resulting String is not - * guaranteed to have length (k - i). - */ - protected[this] final def at(i: Int, k: Int): CharSequence = { - val len = k - i - if (k > Allsize) { - grow() - at(i, k) - } else if (k <= Bufsize) { - new String(curr, i, len, utf8) - } else if (i >= Bufsize) { - new String(next, i - Bufsize, len, utf8) - } else { - val arr = new Array[Byte](len) - val mid = Bufsize - i - System.arraycopy(curr, i, arr, 0, mid) - System.arraycopy(next, 0, arr, mid, k - Bufsize) - new String(arr, utf8) - } - } - - protected[this] final def atEof(i: Int) = - if (i < Bufsize) i >= ncurr - else i >= (nnext + Bufsize) -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBasedParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBasedParser.scala deleted file mode 100644 index a054e5dc..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBasedParser.scala +++ /dev/null @@ -1,98 +0,0 @@ -package jawn - -import scala.annotation.{switch, tailrec} - -/** - * Trait used when the data to be parsed is in UTF-16. - * - * This parser provides parseString(). Like ByteBasedParser it has - * fast/slow paths for string parsing depending on whether any escapes - * are present. - * - * It is simpler than ByteBasedParser. - */ -trait CharBasedParser[J] extends Parser[J] { - - private[this] final val charBuilder = new CharBuilder() - - /** - * See if the string has any escape sequences. If not, return the - * end of the string. If so, bail out and return -1. - * - * This method expects the data to be in UTF-16 and accesses it as - * chars. - */ - protected[this] final def parseStringSimple(i: Int, ctxt: FContext[J]): Int = { - var j = i - var c = at(j) - while (c != '"') { - if (c < ' ') return die(j, s"control char (${c.toInt}) in string") - if (c == '\\') return -1 - j += 1 - c = at(j) - } - j + 1 - } - - /** - * Parse a string that is known to have escape sequences. - */ - protected[this] final def parseStringComplex(i: Int, ctxt: FContext[J]): Int = { - var j = i + 1 - val sb = charBuilder.reset() - - var c = at(j) - while (c != '"') { - if (c < ' ') { - die(j, s"control char (${c.toInt}) in string") - } else if (c == '\\') { - (at(j + 1): @switch) match { - case 'b' => { sb.append('\b'); j += 2 } - case 'f' => { sb.append('\f'); j += 2 } - case 'n' => { sb.append('\n'); j += 2 } - case 'r' => { sb.append('\r'); j += 2 } - case 't' => { sb.append('\t'); j += 2 } - - case '"' => { sb.append('"'); j += 2 } - case '/' => { sb.append('/'); j += 2 } - case '\\' => { sb.append('\\'); j += 2 } - - // if there's a problem then descape will explode - case 'u' => { sb.append(descape(at(j + 2, j + 6))); j += 6 } - - case c => die(j, s"illegal escape sequence (\\$c)") - } - } else { - // this case is for "normal" code points that are just one Char. - // - // we don't have to worry about surrogate pairs, since those - // will all be in the ranges D800–DBFF (high surrogates) or - // DC00–DFFF (low surrogates). - sb.append(c) - j += 1 - } - j = reset(j) - c = at(j) - } - ctxt.add(sb.makeString) - j + 1 - } - - /** - * Parse the string according to JSON rules, and add to the given - * context. - * - * This method expects the data to be in UTF-16, and access it as - * Char. It performs the correct checks to make sure that we don't - * interpret a multi-char code point incorrectly. - */ - protected[this] final def parseString(i: Int, ctxt: FContext[J]): Int = { - val k = parseStringSimple(i + 1, ctxt) - if (k != -1) { - ctxt.add(at(i + 1, k - 1)) - k - } else { - parseStringComplex(i, ctxt) - } - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBuilder.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBuilder.scala deleted file mode 100644 index 589437bf..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharBuilder.scala +++ /dev/null @@ -1,56 +0,0 @@ -package jawn - -/** - * CharBuilder is a specialized way to build Strings. - * - * It wraps a (growable) array of characters, and can accept - * additional String or Char data to be added to its buffer. - */ -private[jawn] final class CharBuilder { - @inline final def INITIALSIZE = 32 - - private var cs = new Array[Char](INITIALSIZE) - private var capacity = INITIALSIZE - private var len = 0 - - def reset(): CharBuilder = { - len = 0 - this - } - - def makeString: String = new String(cs, 0, len) - - def resizeIfNecessary(goal: Int): Unit = { - if (goal <= capacity) return () - var cap = capacity - while (goal > cap && cap > 0) cap *= 2 - if (cap > capacity) { - val ncs = new Array[Char](cap) - System.arraycopy(cs, 0, ncs, 0, capacity) - cs = ncs - capacity = cap - } else if (cap < capacity) { - sys.error("maximum string size exceeded") - } - } - - def extend(s: CharSequence): Unit = { - val tlen = len + s.length - resizeIfNecessary(tlen) - var i = 0 - var j = len - len = tlen - while (i < s.length) { - cs(j) = s.charAt(i) - i += 1 - j += 1 - } - } - - def append(c: Char): Unit = { - val tlen = len + 1 - resizeIfNecessary(tlen) - cs(len) = c - len = tlen - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharSequenceParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharSequenceParser.scala deleted file mode 100644 index c592326e..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/CharSequenceParser.scala +++ /dev/null @@ -1,18 +0,0 @@ -package jawn - -/** - * Lazy character sequence parsing. - * - * This is similar to StringParser, but acts on character sequences. - */ -private[jawn] final class CharSequenceParser[J](cs: CharSequence) extends SyncParser[J] with CharBasedParser[J] { - var line = 0 - final def column(i: Int) = i - final def newline(i: Int) { line += 1 } - final def reset(i: Int): Int = i - final def checkpoint(state: Int, i: Int, stack: List[FContext[J]]): Unit = () - final def at(i: Int): Char = cs.charAt(i) - final def at(i: Int, j: Int): CharSequence = cs.subSequence(i, j) - final def atEof(i: Int) = i == cs.length - final def close() = () -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Facade.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Facade.scala deleted file mode 100644 index 203b68e9..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Facade.scala +++ /dev/null @@ -1,34 +0,0 @@ -package jawn - -/** - * Facade is a type class that describes how Jawn should construct - * JSON AST elements of type J. - * - * Facade[J] also uses FContext[J] instances, so implementors will - * usually want to define both. - */ -trait Facade[J] { - def singleContext(): FContext[J] - def arrayContext(): FContext[J] - def objectContext(): FContext[J] - - def jnull(): J - def jfalse(): J - def jtrue(): J - def jnum(s: CharSequence, decIndex: Int, expIndex: Int): J - def jstring(s: CharSequence): J -} - -/** - * FContext is used to construct nested JSON values. - * - * The most common cases are to build objects and arrays. However, - * this type is also used to build a single top-level JSON element, in - * cases where the entire JSON document consists of "333.33". - */ -trait FContext[J] { - def add(s: CharSequence): Unit - def add(v: J): Unit - def finish: J - def isObj: Boolean -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/MutableFacade.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/MutableFacade.scala deleted file mode 100644 index 8fe5716b..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/MutableFacade.scala +++ /dev/null @@ -1,35 +0,0 @@ -package jawn - -import scala.collection.mutable - -trait MutableFacade[J] extends Facade[J] { - def jarray(vs: mutable.ArrayBuffer[J]): J - def jobject(vs: mutable.Map[String, J]): J - - def singleContext() = new FContext[J] { - var value: J = _ - def add(s: CharSequence) { value = jstring(s) } - def add(v: J) { value = v } - def finish: J = value - def isObj: Boolean = false - } - - def arrayContext() = new FContext[J] { - val vs = mutable.ArrayBuffer.empty[J] - def add(s: CharSequence) { vs.append(jstring(s)) } - def add(v: J) { vs.append(v) } - def finish: J = jarray(vs) - def isObj: Boolean = false - } - - def objectContext() = new FContext[J] { - var key: String = null - val vs = mutable.Map.empty[String, J] - def add(s: CharSequence): Unit = - if (key == null) { key = s.toString } else { vs(key) = jstring(s); key = null } - def add(v: J): Unit = - { vs(key) = v; key = null } - def finish = jobject(vs) - def isObj = true - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/NullFacade.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/NullFacade.scala deleted file mode 100644 index 39d55884..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/NullFacade.scala +++ /dev/null @@ -1,30 +0,0 @@ -package jawn - -/** - * NullFacade discards all JSON AST information. - * - * This is the simplest possible facade. It could be useful for - * checking JSON for correctness (via parsing) without worrying about - * saving the data. - * - * It will always return () on any successful parse, no matter the - * content. - */ -object NullFacade extends Facade[Unit] { - - case class NullContext(isObj: Boolean) extends FContext[Unit] { - def add(s: CharSequence): Unit = () - def add(v: Unit): Unit = () - def finish: Unit = () - } - - val singleContext: FContext[Unit] = NullContext(false) - val arrayContext: FContext[Unit] = NullContext(false) - val objectContext: FContext[Unit] = NullContext(true) - - def jnull(): Unit = () - def jfalse(): Unit = () - def jtrue(): Unit = () - def jnum(s: CharSequence, decIndex: Int, expIndex: Int): Unit = () - def jstring(s: CharSequence): Unit = () -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Parser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Parser.scala deleted file mode 100644 index 1177e91f..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Parser.scala +++ /dev/null @@ -1,507 +0,0 @@ -package jawn - -import java.io.File -import java.nio.ByteBuffer -import java.nio.channels.ReadableByteChannel -import java.nio.charset.Charset -import scala.annotation.{switch, tailrec} -import scala.util.Try - -case class ParseException(msg: String, index: Int, line: Int, col: Int) extends Exception(msg) - -case class IncompleteParseException(msg: String) extends Exception(msg) - -/** - * Parser implements a state machine for correctly parsing JSON data. - * - * The trait relies on a small number of methods which are left - * abstract, and which generalize parsing based on whether the input - * is in Bytes or Chars, coming from Strings, files, or other input. - * All methods provided here are protected, so different parsers can - * choose which functionality to expose. - * - * Parser is parameterized on J, which is the type of the JSON AST it - * will return. Jawn can produce any AST for which a Facade[J] is - * available. - * - * The parser trait does not hold any state itself, but particular - * implementations will usually hold state. Parser instances should - * not be reused between parsing runs. - * - * For now the parser requires input to be in UTF-8. This requirement - * may eventually be relaxed. - */ -abstract class Parser[J] { - - protected[this] final val utf8 = Charset.forName("UTF-8") - - /** - * Read the byte/char at 'i' as a Char. - * - * Note that this should not be used on potential multi-byte - * sequences. - */ - protected[this] def at(i: Int): Char - - /** - * Read the bytes/chars from 'i' until 'j' as a String. - */ - protected[this] def at(i: Int, j: Int): CharSequence - - /** - * Return true iff 'i' is at or beyond the end of the input (EOF). - */ - protected[this] def atEof(i: Int): Boolean - - /** - * The reset() method is used to signal that we're working from the - * given position, and any previous data can be released. Some - * parsers (e.g. StringParser) will ignore release, while others - * (e.g. PathParser) will need to use this information to release - * and allocate different areas. - */ - protected[this] def reset(i: Int): Int - - /** - * The checkpoint() method is used to allow some parsers to store - * their progress. - */ - protected[this] def checkpoint(state: Int, i: Int, stack: List[FContext[J]]): Unit - - /** - * Should be called when parsing is finished. - */ - protected[this] def close(): Unit - - /** - * Valid parser states. - */ - @inline protected[this] final val ARRBEG = 6 - @inline protected[this] final val OBJBEG = 7 - @inline protected[this] final val DATA = 1 - @inline protected[this] final val KEY = 2 - @inline protected[this] final val SEP = 3 - @inline protected[this] final val ARREND = 4 - @inline protected[this] final val OBJEND = 5 - - protected[this] def newline(i: Int): Unit - protected[this] def line(): Int - protected[this] def column(i: Int): Int - - protected[this] final val HexChars: Array[Int] = { - val arr = new Array[Int](128) - var i = 0 - while (i < 10) { arr(i + '0') = i; i += 1 } - i = 0 - while (i < 16) { arr(i + 'a') = 10 + i; arr(i + 'A') = 10 + i; i += 1 } - arr - } - - /** - * Used to generate error messages with character info and offsets. - */ - protected[this] def die(i: Int, msg: String): Nothing = { - val y = line() + 1 - val x = column(i) + 1 - val s = "%s got %s (line %d, column %d)" format (msg, at(i), y, x) - throw ParseException(s, i, y, x) - } - - /** - * Used to generate messages for internal errors. - * - * This should only be used in situations where a possible bug in - * the parser was detected. For errors in user-provided JSON, use - * die(). - */ - protected[this] def error(msg: String) = - sys.error(msg) - - /** - * Parse the given number, and add it to the given context. - * - * We don't actually instantiate a number here, but rather pass the - * string of for future use. Facades can choose to be lazy and just - * store the string. This ends up being way faster and has the nice - * side-effect that we know exactly how the user represented the - * number. - */ - protected[this] final def parseNum(i: Int, ctxt: FContext[J])(implicit facade: Facade[J]): Int = { - var j = i - var c = at(j) - var decIndex = -1 - var expIndex = -1 - - if (c == '-') { - j += 1 - c = at(j) - } - if (c == '0') { - j += 1 - c = at(j) - } else if ('1' <= c && c <= '9') { - while ('0' <= c && c <= '9') { j += 1; c = at(j) } - } else { - die(i, "expected digit") - } - - if (c == '.') { - decIndex = j - i - j += 1 - c = at(j) - if ('0' <= c && c <= '9') { - while ('0' <= c && c <= '9') { j += 1; c = at(j) } - } else { - die(i, "expected digit") - } - } - - if (c == 'e' || c == 'E') { - expIndex = j - i - j += 1 - c = at(j) - if (c == '+' || c == '-') { - j += 1 - c = at(j) - } - if ('0' <= c && c <= '9') { - while ('0' <= c && c <= '9') { j += 1; c = at(j) } - } else { - die(i, "expected digit") - } - } - - ctxt.add(facade.jnum(at(i, j), decIndex, expIndex)) - j - } - - /** - * Parse the given number, and add it to the given context. - * - * This method is a bit slower than parseNum() because it has to be - * sure it doesn't run off the end of the input. - * - * Normally (when operating in rparse in the context of an outer - * array or object) we don't need to worry about this and can just - * grab characters, because if we run out of characters that would - * indicate bad input. This is for cases where the number could - * possibly be followed by a valid EOF. - * - * This method has all the same caveats as the previous method. - */ - protected[this] final def parseNumSlow(i: Int, ctxt: FContext[J])(implicit facade: Facade[J]): Int = { - var j = i - var c = at(j) - var decIndex = -1 - var expIndex = -1 - - if (c == '-') { - // any valid input will require at least one digit after - - j += 1 - c = at(j) - } - if (c == '0') { - j += 1 - if (atEof(j)) { - ctxt.add(facade.jnum(at(i, j), decIndex, expIndex)) - return j - } - c = at(j) - } else if ('1' <= c && c <= '9') { - while ('0' <= c && c <= '9') { - j += 1 - if (atEof(j)) { - ctxt.add(facade.jnum(at(i, j), decIndex, expIndex)) - return j - } - c = at(j) - } - } else { - die(i, "expected digit") - } - - if (c == '.') { - // any valid input will require at least one digit after . - decIndex = j - i - j += 1 - c = at(j) - if ('0' <= c && c <= '9') { - while ('0' <= c && c <= '9') { - j += 1 - if (atEof(j)) { - ctxt.add(facade.jnum(at(i, j), decIndex, expIndex)) - return j - } - c = at(j) - } - } else { - die(i, "expected digit") - } - } - - if (c == 'e' || c == 'E') { - // any valid input will require at least one digit after e, e+, etc - expIndex = j - i - j += 1 - c = at(j) - if (c == '+' || c == '-') { - j += 1 - c = at(j) - } - if ('0' <= c && c <= '9') { - while ('0' <= c && c <= '9') { - j += 1 - if (atEof(j)) { - ctxt.add(facade.jnum(at(i, j), decIndex, expIndex)) - return j - } - c = at(j) - } - } else { - die(i, "expected digit") - } - } - - ctxt.add(facade.jnum(at(i, j), decIndex, expIndex)) - j - } - - /** - * Generate a Char from the hex digits of "\u1234" (i.e. "1234"). - * - * NOTE: This is only capable of generating characters from the basic plane. - * This is why it can only return Char instead of Int. - */ - protected[this] final def descape(s: CharSequence): Char = { - val hc = HexChars - var i = 0 - var x = 0 - while (i < 4) { - x = (x << 4) | hc(s.charAt(i).toInt) - i += 1 - } - x.toChar - } - - /** - * Parse the JSON string starting at 'i' and save it into 'ctxt'. - */ - protected[this] def parseString(i: Int, ctxt: FContext[J]): Int - - /** - * Parse the JSON constant "true". - * - * Note that this method assumes that the first character has already been checked. - */ - protected[this] final def parseTrue(i: Int)(implicit facade: Facade[J]): J = - if (at(i + 1) == 'r' && at(i + 2) == 'u' && at(i + 3) == 'e') { - facade.jtrue - } else { - die(i, "expected true") - } - - /** - * Parse the JSON constant "false". - * - * Note that this method assumes that the first character has already been checked. - */ - protected[this] final def parseFalse(i: Int)(implicit facade: Facade[J]): J = - if (at(i + 1) == 'a' && at(i + 2) == 'l' && at(i + 3) == 's' && at(i + 4) == 'e') { - facade.jfalse - } else { - die(i, "expected false") - } - - /** - * Parse the JSON constant "null". - * - * Note that this method assumes that the first character has already been checked. - */ - protected[this] final def parseNull(i: Int)(implicit facade: Facade[J]): J = - if (at(i + 1) == 'u' && at(i + 2) == 'l' && at(i + 3) == 'l') { - facade.jnull - } else { - die(i, "expected null") - } - - /** - * Parse and return the next JSON value and the position beyond it. - */ - protected[this] final def parse(i: Int)(implicit facade: Facade[J]): (J, Int) = try { - (at(i): @switch) match { - // ignore whitespace - case ' ' => parse(i + 1) - case '\t' => parse(i + 1) - case '\r' => parse(i + 1) - case '\n' => newline(i); parse(i + 1) - - // if we have a recursive top-level structure, we'll delegate the parsing - // duties to our good friend rparse(). - case '[' => rparse(ARRBEG, i + 1, facade.arrayContext() :: Nil) - case '{' => rparse(OBJBEG, i + 1, facade.objectContext() :: Nil) - - // we have a single top-level number - case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => - val ctxt = facade.singleContext() - val j = parseNumSlow(i, ctxt) - (ctxt.finish, j) - - // we have a single top-level string - case '"' => - val ctxt = facade.singleContext() - val j = parseString(i, ctxt) - (ctxt.finish, j) - - // we have a single top-level constant - case 't' => (parseTrue(i), i + 4) - case 'f' => (parseFalse(i), i + 5) - case 'n' => (parseNull(i), i + 4) - - // invalid - case _ => die(i, "expected json value") - } - } catch { - case _: IndexOutOfBoundsException => - throw IncompleteParseException("exhausted input") - } - - /** - * Tail-recursive parsing method to do the bulk of JSON parsing. - * - * This single method manages parser states, data, etc. Except for - * parsing non-recursive values (like strings, numbers, and - * constants) all important work happens in this loop (or in methods - * it calls, like reset()). - * - * Currently the code is optimized to make use of switch - * statements. Future work should consider whether this is better or - * worse than manually constructed if/else statements or something - * else. Also, it may be possible to reorder some cases for speed - * improvements. - */ - @tailrec - protected[this] final def rparse(state: Int, j: Int, stack: List[FContext[J]])(implicit facade: Facade[J]): (J, Int) = { - val i = reset(j) - checkpoint(state, i, stack) - - val c = at(i) - - if (c == '\n') { - newline(i) - rparse(state, i + 1, stack) - } else if (c == ' ' || c == '\t' || c == '\r') { - rparse(state, i + 1, stack) - } else if (state == DATA) { - // we are inside an object or array expecting to see data - if (c == '[') { - rparse(ARRBEG, i + 1, facade.arrayContext() :: stack) - } else if (c == '{') { - rparse(OBJBEG, i + 1, facade.objectContext() :: stack) - } else { - val ctxt = stack.head - - if ((c >= '0' && c <= '9') || c == '-') { - val j = parseNum(i, ctxt) - rparse(if (ctxt.isObj) OBJEND else ARREND, j, stack) - } else if (c == '"') { - val j = parseString(i, ctxt) - rparse(if (ctxt.isObj) OBJEND else ARREND, j, stack) - } else if (c == 't') { - ctxt.add(parseTrue(i)) - rparse(if (ctxt.isObj) OBJEND else ARREND, i + 4, stack) - } else if (c == 'f') { - ctxt.add(parseFalse(i)) - rparse(if (ctxt.isObj) OBJEND else ARREND, i + 5, stack) - } else if (c == 'n') { - ctxt.add(parseNull(i)) - rparse(if (ctxt.isObj) OBJEND else ARREND, i + 4, stack) - } else { - die(i, "expected json value") - } - } - } else if ( - (c == ']' && (state == ARREND || state == ARRBEG)) || - (c == '}' && (state == OBJEND || state == OBJBEG)) - ) { - // we are inside an array or object and have seen a key or a closing - // brace, respectively. - if (stack.isEmpty) { - error("invalid stack") - } else { - val ctxt1 = stack.head - val tail = stack.tail - - if (tail.isEmpty) { - (ctxt1.finish, i + 1) - } else { - val ctxt2 = tail.head - ctxt2.add(ctxt1.finish) - rparse(if (ctxt2.isObj) OBJEND else ARREND, i + 1, tail) - } - } - } else if (state == KEY) { - // we are in an object expecting to see a key. - if (c == '"') { - val j = parseString(i, stack.head) - rparse(SEP, j, stack) - } else { - die(i, "expected \"") - } - } else if (state == SEP) { - // we are in an object just after a key, expecting to see a colon. - if (c == ':') { - rparse(DATA, i + 1, stack) - } else { - die(i, "expected :") - } - } else if (state == ARREND) { - // we are in an array, expecting to see a comma (before more data). - if (c == ',') { - rparse(DATA, i + 1, stack) - } else { - die(i, "expected ] or ,") - } - } else if (state == OBJEND) { - // we are in an object, expecting to see a comma (before more data). - if (c == ',') { - rparse(KEY, i + 1, stack) - } else { - die(i, "expected } or ,") - } - } else if (state == ARRBEG) { - // we are starting an array, expecting to see data or a closing bracket. - rparse(DATA, i, stack) - } else { - // we are starting an object, expecting to see a key or a closing brace. - rparse(KEY, i, stack) - } - } -} - - -object Parser { - - def parseUnsafe[J](s: String)(implicit facade: Facade[J]): J = - new StringParser(s).parse() - - def parseFromString[J](s: String)(implicit facade: Facade[J]): Try[J] = - Try(new StringParser[J](s).parse) - - def parseFromCharSequence[J](cs: CharSequence)(implicit facade: Facade[J]): Try[J] = - Try(new CharSequenceParser[J](cs).parse) - - def parseFromPath[J](path: String)(implicit facade: Facade[J]): Try[J] = - Try(ChannelParser.fromFile[J](new File(path)).parse) - - def parseFromFile[J](file: File)(implicit facade: Facade[J]): Try[J] = - Try(ChannelParser.fromFile[J](file).parse) - - def parseFromChannel[J](ch: ReadableByteChannel)(implicit facade: Facade[J]): Try[J] = - Try(ChannelParser.fromChannel[J](ch).parse) - - def parseFromByteBuffer[J](buf: ByteBuffer)(implicit facade: Facade[J]): Try[J] = - Try(new ByteBufferParser[J](buf).parse) - - def async[J](mode: AsyncParser.Mode)(implicit facade: Facade[J]): AsyncParser[J] = - AsyncParser[J](mode) -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SimpleFacade.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SimpleFacade.scala deleted file mode 100644 index dabec016..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SimpleFacade.scala +++ /dev/null @@ -1,42 +0,0 @@ -package jawn - -import scala.collection.mutable - -/** - * Facade is a type class that describes how Jawn should construct - * JSON AST elements of type J. - * - * Facade[J] also uses FContext[J] instances, so implementors will - * usually want to define both. - */ -trait SimpleFacade[J] extends Facade[J] { - def jarray(vs: List[J]): J - def jobject(vs: Map[String, J]): J - - def singleContext() = new FContext[J] { - var value: J = _ - def add(s: CharSequence) { value = jstring(s) } - def add(v: J) { value = v } - def finish: J = value - def isObj: Boolean = false - } - - def arrayContext() = new FContext[J] { - val vs = mutable.ListBuffer.empty[J] - def add(s: CharSequence) { vs += jstring(s) } - def add(v: J) { vs += v } - def finish: J = jarray(vs.toList) - def isObj: Boolean = false - } - - def objectContext() = new FContext[J] { - var key: String = null - var vs = Map.empty[String, J] - def add(s: CharSequence): Unit = - if (key == null) { key = s.toString } else { vs = vs.updated(key, jstring(s)); key = null } - def add(v: J): Unit = - { vs = vs.updated(key, v); key = null } - def finish = jobject(vs) - def isObj = true - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/StringParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/StringParser.scala deleted file mode 100644 index 91662fc0..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/StringParser.scala +++ /dev/null @@ -1,25 +0,0 @@ -package jawn - -/** - * Basic in-memory string parsing. - * - * This is probably the simplest Parser implementation, since there is - * no UTF-8 decoding, and the data is already fully available. - * - * This parser is limited to the maximum string size (~2G). Obviously - * for large JSON documents it's better to avoid using this parser and - * go straight from disk, to avoid having to load the whole thing into - * memory at once. So this limit will probably not be a problem in - * practice. - */ -private[jawn] final class StringParser[J](s: String) extends SyncParser[J] with CharBasedParser[J] { - var line = 0 - final def column(i: Int) = i - final def newline(i: Int) { line += 1 } - final def reset(i: Int): Int = i - final def checkpoint(state: Int, i: Int, stack: List[FContext[J]]): Unit = () - final def at(i: Int): Char = s.charAt(i) - final def at(i: Int, j: Int): CharSequence = s.substring(i, j) - final def atEof(i: Int) = i == s.length - final def close() = () -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SupportParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SupportParser.scala deleted file mode 100644 index 2304a8dd..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SupportParser.scala +++ /dev/null @@ -1,31 +0,0 @@ -package jawn - -import java.io.File -import java.nio.ByteBuffer -import java.nio.channels.ReadableByteChannel -import scala.util.Try - -trait SupportParser[J] { - implicit def facade: Facade[J] - - def parseUnsafe(s: String): J = - new StringParser(s).parse() - - def parseFromString(s: String): Try[J] = - Try(new StringParser[J](s).parse) - - def parseFromPath(path: String): Try[J] = - Try(ChannelParser.fromFile[J](new File(path)).parse) - - def parseFromFile(file: File): Try[J] = - Try(ChannelParser.fromFile[J](file).parse) - - def parseFromChannel(ch: ReadableByteChannel): Try[J] = - Try(ChannelParser.fromChannel[J](ch).parse) - - def parseFromByteBuffer(buf: ByteBuffer): Try[J] = - Try(new ByteBufferParser[J](buf).parse) - - def async(mode: AsyncParser.Mode): AsyncParser[J] = - AsyncParser[J](mode) -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SyncParser.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SyncParser.scala deleted file mode 100644 index 988a8ca9..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/SyncParser.scala +++ /dev/null @@ -1,37 +0,0 @@ -package jawn - -import scala.annotation.{switch, tailrec} -import scala.collection.mutable - -/** - * SyncParser extends Parser to do all parsing synchronously. - * - * Most traditional JSON parser are synchronous, and expect to receive - * all their input before returning. SyncParser[J] still leaves - * Parser[J]'s methods abstract, but adds a public methods for users - * to call to actually parse JSON. - */ -abstract class SyncParser[J] extends Parser[J] { - - /** - * Parse the JSON document into a single JSON value. - * - * The parser considers documents like '333', 'true', and '"foo"' to be - * valid, as well as more traditional documents like [1,2,3,4,5]. However, - * multiple top-level objects are not allowed. - */ - final def parse()(implicit facade: Facade[J]): J = { - val (value, i) = parse(0) - var j = i - while (!atEof(j)) { - (at(j): @switch) match { - case '\n' => newline(j); j += 1 - case ' ' | '\t' | '\r' => j += 1 - case _ => die(j, "expected whitespace or eof") - } - } - if (!atEof(j)) die(j, "expected eof") - close() - value - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Syntax.scala b/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Syntax.scala deleted file mode 100644 index 119b5783..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/main/scala/jawn/Syntax.scala +++ /dev/null @@ -1,27 +0,0 @@ -package jawn - -import java.io.File -import java.nio.ByteBuffer -import java.nio.channels.ReadableByteChannel -import java.nio.charset.Charset -import scala.annotation.{switch, tailrec} -import scala.util.Try - -object Syntax { - implicit def unitFacade: Facade[Unit] = NullFacade - - def checkString(s: String): Boolean = - Try(new StringParser(s).parse).isSuccess - - def checkPath(path: String): Boolean = - Try(ChannelParser.fromFile(new File(path)).parse).isSuccess - - def checkFile(file: File): Boolean = - Try(ChannelParser.fromFile(file).parse).isSuccess - - def checkChannel(ch: ReadableByteChannel): Boolean = - Try(ChannelParser.fromChannel(ch).parse).isSuccess - - def checkByteBuffer(buf: ByteBuffer): Boolean = - Try(new ByteBufferParser(buf).parse).isSuccess -} diff --git a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/ChannelSpec.scala b/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/ChannelSpec.scala deleted file mode 100644 index 6d5d33a9..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/ChannelSpec.scala +++ /dev/null @@ -1,25 +0,0 @@ -package jawn -package parser - -import org.scalatest._ - -import java.nio.channels.ByteChannel -import scala.util.Success - -class ChannelSpec extends PropSpec with Matchers { - - property("large strings in files are ok") { - val M = 1000000 - val q = "\"" - val big = q + ("x" * (40 * M)) + q - val bigEscaped = q + ("\\\\" * (20 * M)) + q - - TestUtil.withTemp(big) { t => - Parser.parseFromFile(t)(NullFacade).isSuccess shouldBe true - } - - TestUtil.withTemp(bigEscaped) { t => - Parser.parseFromFile(t)(NullFacade).isSuccess shouldBe true - } - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/CharBuilderSpec.scala b/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/CharBuilderSpec.scala deleted file mode 100644 index b25e67fe..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/CharBuilderSpec.scala +++ /dev/null @@ -1,23 +0,0 @@ -package jawn - -import org.scalatest._ -import org.scalatest.prop._ - -class CharBuilderSpec extends PropSpec with Matchers with PropertyChecks { - - property("append") { - forAll { xs: List[Char] => - val builder = new CharBuilder - xs.foreach(builder.append) - builder.makeString shouldBe xs.mkString - } - } - - property("extend") { - forAll { xs: List[String] => - val builder = new CharBuilder - xs.foreach(builder.extend) - builder.makeString shouldBe xs.mkString - } - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/JNumIndexCheck.scala b/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/JNumIndexCheck.scala deleted file mode 100644 index b0b6568d..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/JNumIndexCheck.scala +++ /dev/null @@ -1,81 +0,0 @@ -package jawn -package parser - -import java.nio.ByteBuffer -import org.scalatest.{Matchers, PropSpec} -import org.scalatest.prop.PropertyChecks -import scala.util.Success - -class JNumIndexCheck extends PropSpec with Matchers with PropertyChecks { - object JNumIndexCheckFacade extends Facade[Boolean] { - class JNumIndexCheckContext(val isObj: Boolean) extends FContext[Boolean] { - var failed = false - def add(s: CharSequence): Unit = () - def add(v: Boolean): Unit = { - if (!v) failed = true - } - def finish: Boolean = !failed - } - - val singleContext: FContext[Boolean] = new JNumIndexCheckContext(false) - val arrayContext: FContext[Boolean] = new JNumIndexCheckContext(false) - val objectContext: FContext[Boolean] = new JNumIndexCheckContext(true) - - def jnull(): Boolean = true - def jfalse(): Boolean = true - def jtrue(): Boolean = true - def jnum(s: CharSequence, decIndex: Int, expIndex: Int): Boolean = { - val input = s.toString - val inputDecIndex = input.indexOf('.') - val inputExpIndex = if (input.indexOf('e') == -1) input.indexOf("E") else input.indexOf('e') - - decIndex == inputDecIndex && expIndex == inputExpIndex - } - def jstring(s: CharSequence): Boolean = true - } - - property("jnum provides the correct indices with parseFromString") { - forAll { (value: BigDecimal) => - val json = s"""{ "num": ${value.toString} }""" - Parser.parseFromString(json)(JNumIndexCheckFacade) shouldBe Success(true) - } - } - - property("jnum provides the correct indices with parseFromByteBuffer") { - forAll { (value: BigDecimal) => - val json = s"""{ "num": ${value.toString} }""" - val bb = ByteBuffer.wrap(json.getBytes("UTF-8")) - Parser.parseFromByteBuffer(bb)(JNumIndexCheckFacade) shouldBe Success(true) - } - } - - property("jnum provides the correct indices with parseFromFile") { - forAll { (value: BigDecimal) => - val json = s"""{ "num": ${value.toString} }""" - TestUtil.withTemp(json) { t => - Parser.parseFromFile(t)(JNumIndexCheckFacade) shouldBe Success(true) - } - } - } - - property("jnum provides the correct indices at the top level with parseFromString") { - forAll { (value: BigDecimal) => - Parser.parseFromString(value.toString)(JNumIndexCheckFacade) shouldBe Success(true) - } - } - - property("jnum provides the correct indices at the top level with parseFromByteBuffer") { - forAll { (value: BigDecimal) => - val bb = ByteBuffer.wrap(value.toString.getBytes("UTF-8")) - Parser.parseFromByteBuffer(bb)(JNumIndexCheckFacade) shouldBe Success(true) - } - } - - property("jnum provides the correct indices at the top level with parseFromFile") { - forAll { (value: BigDecimal) => - TestUtil.withTemp(value.toString) { t => - Parser.parseFromFile(t)(JNumIndexCheckFacade) shouldBe Success(true) - } - } - } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/SyntaxCheck.scala b/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/SyntaxCheck.scala deleted file mode 100644 index fd00c260..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/SyntaxCheck.scala +++ /dev/null @@ -1,131 +0,0 @@ -package jawn -package parser - -import org.scalatest._ -import prop._ -import org.scalacheck.Arbitrary._ -import org.scalacheck._ -import Gen._ -import Arbitrary.arbitrary - -import scala.util.{Try, Success, Failure} - -class SyntaxCheck extends PropSpec with Matchers with PropertyChecks { - - sealed trait J { - def build: String = this match { - case JAtom(s) => s - case JArray(js) => js.map(_.build).mkString("[", ",", "]") - case JObject(js) => js.map { case (k, v) => - val kk = "\"" + k + "\"" - val vv = v.build - s"$kk: $vv" - }.mkString("{", ",", "}") - } - } - - case class JAtom(s: String) extends J - case class JArray(js: List[J]) extends J - case class JObject(js: Map[String, J]) extends J - - val jatom: Gen[JAtom] = - Gen.oneOf( - "null", "true", "false", "1234", "-99", "16.0", "2e9", - "-4.44E-10", "11e+14", "\"foo\"", "\"\"", "\"bar\"", - "\"qux\"", "\"duh\"", "\"abc\"", "\"xyz\"", "\"zzzzzz\"", - "\"\\u1234\"").map(JAtom(_)) - - def jarray(lvl: Int): Gen[JArray] = - Gen.containerOf[List, J](jvalue(lvl + 1)).map(JArray(_)) - - val keys = Gen.oneOf("foo", "bar", "qux", "abc", "def", "xyz") - def jitem(lvl: Int): Gen[(String, J)] = - for { s <- keys; j <- jvalue(lvl) } yield (s, j) - - def jobject(lvl: Int): Gen[JObject] = - Gen.containerOf[List, (String, J)](jitem(lvl + 1)).map(ts => JObject(ts.toMap)) - - def jvalue(lvl: Int): Gen[J] = - if (lvl < 3) { - Gen.frequency((16, 'ato), (1, 'arr), (2, 'obj)).flatMap { - case 'ato => jatom - case 'arr => jarray(lvl) - case 'obj => jobject(lvl) - } - } else { - jatom - } - - implicit lazy val arbJValue: Arbitrary[J] = - Arbitrary(jvalue(0)) - - import java.nio.ByteBuffer - - def isValidSyntax(s: String): Boolean = { - val cs = java.nio.CharBuffer.wrap(s.toCharArray) - val r0 = Parser.parseFromCharSequence(cs)(NullFacade).isSuccess - val r1 = Parser.parseFromString(s)(NullFacade).isSuccess - val bb = ByteBuffer.wrap(s.getBytes("UTF-8")) - val r2 = Parser.parseFromByteBuffer(bb)(NullFacade).isSuccess - if (r0 == r1) r1 else sys.error(s"CharSequence/String parsing disagree($r0, $r1): $s") - if (r1 == r2) r1 else sys.error(s"String/ByteBuffer parsing disagree($r1, $r2): $s") - - TestUtil.withTemp(s) { t => - Parser.parseFromFile(t)(NullFacade).isSuccess - } - - val async = AsyncParser[Unit](AsyncParser.SingleValue) - val r3 = async.absorb(s)(NullFacade).isRight && async.finish()(NullFacade).isRight - if (r1 == r3) r1 else sys.error(s"Sync/Async parsing disagree($r1, $r3): $s") - } - - property("syntax-checking") { - forAll { (j: J) => isValidSyntax(j.build) shouldBe true } - } - - def qs(s: String): String = "\"" + s + "\"" - - property("unicode is ok") { - isValidSyntax(qs("ö")) shouldBe true - isValidSyntax(qs("ö\\\\")) shouldBe true - isValidSyntax(qs("\\\\ö")) shouldBe true - } - - property("literal TAB is invalid") { isValidSyntax(qs("\t")) shouldBe false } - property("literal NL is invalid") { isValidSyntax(qs("\n")) shouldBe false } - property("literal CR is invalid") { isValidSyntax(qs("\r")) shouldBe false } - property("literal NUL is invalid") { isValidSyntax(qs("\u0000")) shouldBe false } - property("literal BS TAB is invalid") { isValidSyntax(qs("\\\t")) shouldBe false } - property("literal BS NL is invalid") { isValidSyntax(qs("\\\n")) shouldBe false } - property("literal BS CR is invalid") { isValidSyntax(qs("\\\r")) shouldBe false } - property("literal BS NUL is invalid") { isValidSyntax(qs("\\\u0000")) shouldBe false } - property("literal BS ZERO is invalid") { isValidSyntax(qs("\\0")) shouldBe false } - property("literal BS X is invalid") { isValidSyntax(qs("\\x")) shouldBe false } - - property("0 is ok") { isValidSyntax("0") shouldBe true } - property("0e is invalid") { isValidSyntax("0e") shouldBe false } - property("123e is invalid") { isValidSyntax("123e") shouldBe false } - property(".999 is invalid") { isValidSyntax(".999") shouldBe false } - property("0.999 is ok") { isValidSyntax("0.999") shouldBe true } - property("-.999 is invalid") { isValidSyntax("-.999") shouldBe false } - property("-0.999 is ok") { isValidSyntax("-0.999") shouldBe true } - property("+0.999 is invalid") { isValidSyntax("+0.999") shouldBe false } - property("--0.999 is invalid") { isValidSyntax("--0.999") shouldBe false } - property("01 is invalid") { isValidSyntax("01") shouldBe false } - property("1e is invalid") { isValidSyntax("1e") shouldBe false } - property("1e- is invalid") { isValidSyntax("1e+") shouldBe false } - property("1e+ is invalid") { isValidSyntax("1e-") shouldBe false } - property("1. is invalid") { isValidSyntax("1.") shouldBe false } - property("1.e is invalid") { isValidSyntax("1.e") shouldBe false } - property("1.e9 is invalid") { isValidSyntax("1.e9") shouldBe false } - property("1.e- is invalid") { isValidSyntax("1.e+") shouldBe false } - property("1.e+ is invalid") { isValidSyntax("1.e-") shouldBe false } - property("1.1e is invalid") { isValidSyntax("1.1e") shouldBe false } - property("1.1e- is invalid") { isValidSyntax("1.1e-") shouldBe false } - property("1.1e+ is invalid") { isValidSyntax("1.1e+") shouldBe false } - property("1.1e1 is ok") { isValidSyntax("1.1e1") shouldBe true } - property("1.1e-1 is ok") { isValidSyntax("1.1e-1") shouldBe true } - property("1.1e+1 is ok") { isValidSyntax("1.1e+1") shouldBe true } - property("1+ is invalid") { isValidSyntax("1+") shouldBe false } - property("1- is invalid") { isValidSyntax("1-") shouldBe false } -} diff --git a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/TestUtil.scala b/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/TestUtil.scala deleted file mode 100644 index 64b8dd59..00000000 --- a/scalalib/src/test/resource/jawn/parser/src/test/scala/jawn/TestUtil.scala +++ /dev/null @@ -1,18 +0,0 @@ -package jawn -package parser - -import java.io._ - -object TestUtil { - def withTemp[A](s: String)(f: File => A): A = { - val t = File.createTempFile("jawn-syntax", ".json") - val pw = new PrintWriter(t) - pw.println(s) - pw.close() - try { - f(t) - } finally { - t.delete() - } - } -} diff --git a/scalalib/src/test/resource/jawn/project/ReleaseHelper.scala b/scalalib/src/test/resource/jawn/project/ReleaseHelper.scala deleted file mode 100644 index 354d6506..00000000 --- a/scalalib/src/test/resource/jawn/project/ReleaseHelper.scala +++ /dev/null @@ -1,34 +0,0 @@ -import sbt._ -import sbt.Keys._ -import sbt.complete.Parser - -object ReleaseHelper { - - /** Invoke a command and carry out remaining commands until completion. - * - * This is necessary because sbt-release's releaseStepCommand does not - * execute remaining commands, which sbt-doge relies on. - * - * Based on https://github.com/playframework/playframework/blob/master/framework/project/Release.scala - * - * NOTE: This can be removed in favor of https://github.com/sbt/sbt-release/pull/171 if/when merged upstream - */ - def runCommandAndRemaining(command: String): State => State = { originalState => - val originalRemaining = originalState.remainingCommands - - @annotation.tailrec - def runCommand(command: String, state: State): State = { - val newState = Parser.parse(command, state.combinedParser) match { - case Right(cmd) => cmd() - case Left(msg) => throw sys.error(s"Invalid programmatic input:\n$msg") - } - if (newState.remainingCommands.isEmpty) { - newState - } else { - runCommand(newState.remainingCommands.head, newState.copy(remainingCommands = newState.remainingCommands.tail)) - } - } - - runCommand(command, originalState.copy(remainingCommands = Nil)).copy(remainingCommands = originalRemaining) - } -} diff --git a/scalalib/src/test/resource/jawn/project/build.properties b/scalalib/src/test/resource/jawn/project/build.properties deleted file mode 100644 index 64317fda..00000000 --- a/scalalib/src/test/resource/jawn/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=0.13.15 diff --git a/scalalib/src/test/resource/jawn/project/plugins.sbt b/scalalib/src/test/resource/jawn/project/plugins.sbt deleted file mode 100644 index 618876a9..00000000 --- a/scalalib/src/test/resource/jawn/project/plugins.sbt +++ /dev/null @@ -1,6 +0,0 @@ -addSbtPlugin("com.eed3si9n" % "sbt-doge" % "0.1.5") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.25") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.14") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.1") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.5") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1") diff --git a/scalalib/src/test/resource/jawn/randjson.py b/scalalib/src/test/resource/jawn/randjson.py deleted file mode 100644 index 1783eba5..00000000 --- a/scalalib/src/test/resource/jawn/randjson.py +++ /dev/null @@ -1,78 +0,0 @@ -import json -import os -from random import * -import string -import sys - -constants = [True, False, None] - -def mkconstant(): - return choice(constants) - -def mkinteger(): - return randint(-1e3, 1e3) * (10 ** normalvariate(0, 4)) + randint(-1e3, 1e3) - -def mkdouble(): - return random() * (10 ** normalvariate(0, 30)) - -def mknum(): - if randint(0, 1): - return mkdouble() - else: - return mkinteger() - -def mkstring(): - n = int(min(abs(normalvariate(40, 20)), abs(normalvariate(30, 10)))) - return ''.join([choice(string.ascii_letters) for i in range(0, n)]) - -values = [mkconstant, mknum, mknum, mknum, mkstring] - -def mkvalue(): - return choice(values)() - -def mkarray(n, t, threshold): - a = [] - t2 = t + random() - if (t > threshold): - for i in range(0, 2 * n): - a.append(mkvalue()) - else: - #print "mkarray(%s, %s, %s)" % (n, t, threshold) - for i in range(0, n / 5): - a.append(mkcontainer(t2, threshold)) - return a - -def mkobject(n, t, threshold): - d = {} - t2 = t + random() - if (t > threshold): - for i in range(0, n): - k = mkstring() - v = mkvalue() - d[k] = v - else: - #print "mkobject(%s, %s, %s)" % (n, t, threshold) - for i in range(0, n / 10): - k = mkstring() - v = mkcontainer(t2, threshold) - d[k] = v - return d - -containers = [mkarray, mkobject, mkobject] - -def mkcontainer(t, threshold): - n = int(abs(normalvariate(10, 30))) - return choice(containers)(n, t, threshold) - -if __name__ == "__main__": - args = sys.argv[1:] - try: - weight = float(args[0]) - path = args[1] - print "generating random JSON with weight %s into %s" % (weight, path) - f = open(path, 'w') - c = mkcontainer(0.0, weight) - f.write(json.dumps(c)) - f.close() - except: - print "usage: %s WEIGHT (0.0 < w < ~4.0) FILE" % sys.argv[0] diff --git a/scalalib/src/test/resource/jawn/randjson2.py b/scalalib/src/test/resource/jawn/randjson2.py deleted file mode 100644 index eb6b9a3a..00000000 --- a/scalalib/src/test/resource/jawn/randjson2.py +++ /dev/null @@ -1,53 +0,0 @@ -import json -import os -from random import * -import string -import sys - -constants = [True, False, None] - -def mkconstant(): - return choice(constants) - -def mkinteger(): - return randint(-1e3, 1e3) * (10 ** normalvariate(0, 4)) + randint(-1e3, 1e3) - -def mkdouble(): - return random() * (10 ** normalvariate(0, 30)) - -def mknum(): - if randint(0, 1): - return mkdouble() - else: - return mkinteger() - -def mkstring(): - n = int(min(abs(normalvariate(40, 20)), abs(normalvariate(30, 10)))) - return ''.join([choice(string.ascii_letters) for i in range(0, n)]) - -values = [mkconstant, mknum, mknum, mknum, mkstring] - -def mkvalue(): - return choice(values)() - -if __name__ == "__main__": - args = sys.argv[1:] - try: - num = int(args[0]) - path = args[1] - print "writing json (%d rows) into %s" % (num, path) - f = open(path, 'w') - f.write("[") - for i in range(0, num): - if i > 0: f.write(", ") - c = {"foo": mkstring(), - "bar": mknum(), - "qux": mkvalue(), - "duh": {"a": mknum(), "b": mknum(), "c": mknum()}, - "xyz": {"yy": mkstring(), "zz": mkvalue()}, - "abc": [mkvalue() for i in range(0, 4)]} - f.write(json.dumps(c)) - f.write("]") - f.close() - except Exception, e: - print "usage: %s NUM PATH" % sys.argv[0] diff --git a/scalalib/src/test/resource/jawn/support/argonaut/src/main/scala/Parser.scala b/scalalib/src/test/resource/jawn/support/argonaut/src/main/scala/Parser.scala deleted file mode 100644 index 0c57e4d7..00000000 --- a/scalalib/src/test/resource/jawn/support/argonaut/src/main/scala/Parser.scala +++ /dev/null @@ -1,45 +0,0 @@ -package jawn -package support.argonaut - -import scala.collection.mutable -import argonaut._ - -object Parser extends SupportParser[Json] { - implicit val facade: Facade[Json] = - new Facade[Json] { - def jnull() = Json.jNull - def jfalse() = Json.jFalse - def jtrue() = Json.jTrue - - def jnum(s: CharSequence, decIndex: Int, expIndex: Int) = - Json.jNumber(JsonNumber.unsafeDecimal(s.toString)) - def jstring(s: CharSequence) = Json.jString(s.toString) - - def singleContext() = new FContext[Json] { - var value: Json = null - def add(s: CharSequence) { value = jstring(s) } - def add(v: Json) { value = v } - def finish: Json = value - def isObj: Boolean = false - } - - def arrayContext() = new FContext[Json] { - val vs = mutable.ListBuffer.empty[Json] - def add(s: CharSequence) { vs += jstring(s) } - def add(v: Json) { vs += v } - def finish: Json = Json.jArray(vs.toList) - def isObj: Boolean = false - } - - def objectContext() = new FContext[Json] { - var key: String = null - var vs = JsonObject.empty - def add(s: CharSequence): Unit = - if (key == null) { key = s.toString } else { vs = vs + (key, jstring(s)); key = null } - def add(v: Json): Unit = - { vs = vs + (key, v); key = null } - def finish = Json.jObject(vs) - def isObj = true - } - } -} diff --git a/scalalib/src/test/resource/jawn/support/argonaut/src/test/scala/ParserSpec.scala b/scalalib/src/test/resource/jawn/support/argonaut/src/test/scala/ParserSpec.scala deleted file mode 100644 index bb6a8566..00000000 --- a/scalalib/src/test/resource/jawn/support/argonaut/src/test/scala/ParserSpec.scala +++ /dev/null @@ -1,41 +0,0 @@ -package jawn -package support.argonaut - -import argonaut._ -import Argonaut._ -import org.scalacheck.Arbitrary -import org.scalacheck.Arbitrary.arbitrary -import org.scalatest.prop.Checkers -import org.scalatest.{Matchers, FlatSpec} -import scala.util.Try - -object ParserSpec { - case class Example(a: Int, b: Long, c: Double) - - val exampleCodecJson: CodecJson[Example] = - casecodec3(Example.apply, Example.unapply)("a", "b", "c") - - implicit val exampleCaseClassArbitrary: Arbitrary[Example] = Arbitrary( - for { - a <- arbitrary[Int] - b <- arbitrary[Long] - c <- arbitrary[Double] - } yield Example(a, b, c) - ) -} - -class ParserSpec extends FlatSpec with Matchers with Checkers { - import ParserSpec._ - import jawn.support.argonaut.Parser.facade - - "The Argonaut support Parser" should "correctly marshal case classes with Long values" in { - check { (e: Example) => - val jsonString: String = exampleCodecJson.encode(e).nospaces - val json: Try[Json] = jawn.Parser.parseFromString(jsonString) - exampleCodecJson.decodeJson(json.get).toOption match { - case None => fail() - case Some(example) => example == e - } - } - } -} diff --git a/scalalib/src/test/resource/jawn/support/json4s/src/main/scala/Parser.scala b/scalalib/src/test/resource/jawn/support/json4s/src/main/scala/Parser.scala deleted file mode 100644 index e552621c..00000000 --- a/scalalib/src/test/resource/jawn/support/json4s/src/main/scala/Parser.scala +++ /dev/null @@ -1,59 +0,0 @@ -package jawn -package support.json4s - -import scala.collection.mutable -import org.json4s.JsonAST._ - -object Parser extends Parser(false, false) - -class Parser(useBigDecimalForDouble: Boolean, useBigIntForLong: Boolean) extends SupportParser[JValue] { - - implicit val facade: Facade[JValue] = - new Facade[JValue] { - def jnull() = JNull - def jfalse() = JBool(false) - def jtrue() = JBool(true) - - def jnum(s: CharSequence, decIndex: Int, expIndex: Int) = - if (decIndex == -1 && expIndex == -1) { - if (useBigIntForLong) JInt(BigInt(s.toString)) - else JLong(util.parseLongUnsafe(s)) - } else { - if (useBigDecimalForDouble) JDecimal(BigDecimal(s.toString)) - else JDouble(s.toString.toDouble) - } - - def jstring(s: CharSequence) = JString(s.toString) - - def singleContext() = - new FContext[JValue] { - var value: JValue = null - def add(s: CharSequence) { value = jstring(s) } - def add(v: JValue) { value = v } - def finish: JValue = value - def isObj: Boolean = false - } - - def arrayContext() = - new FContext[JValue] { - val vs = mutable.ListBuffer.empty[JValue] - def add(s: CharSequence) { vs += jstring(s) } - def add(v: JValue) { vs += v } - def finish: JValue = JArray(vs.toList) - def isObj: Boolean = false - } - - def objectContext() = - new FContext[JValue] { - var key: String = null - val vs = mutable.ListBuffer.empty[JField] - def add(s: CharSequence): Unit = - if (key == null) key = s.toString - else { vs += JField(key, jstring(s)); key = null } - def add(v: JValue): Unit = - { vs += JField(key, v); key = null } - def finish: JValue = JObject(vs.toList) - def isObj: Boolean = true - } - } -} diff --git a/scalalib/src/test/resource/jawn/support/play/src/main/scala/Parser.scala b/scalalib/src/test/resource/jawn/support/play/src/main/scala/Parser.scala deleted file mode 100644 index 1bca206a..00000000 --- a/scalalib/src/test/resource/jawn/support/play/src/main/scala/Parser.scala +++ /dev/null @@ -1,20 +0,0 @@ -package jawn -package support.play - -import play.api.libs.json._ - -object Parser extends SupportParser[JsValue] { - - implicit val facade: Facade[JsValue] = - new SimpleFacade[JsValue] { - def jnull() = JsNull - def jfalse() = JsBoolean(false) - def jtrue() = JsBoolean(true) - - def jnum(s: CharSequence, decIndex: Int, expIndex: Int) = JsNumber(BigDecimal(s.toString)) - def jstring(s: CharSequence) = JsString(s.toString) - - def jarray(vs: List[JsValue]) = JsArray(vs) - def jobject(vs: Map[String, JsValue]) = JsObject(vs) - } -} diff --git a/scalalib/src/test/resource/jawn/support/rojoma-v3/src/main/scala/Parser.scala b/scalalib/src/test/resource/jawn/support/rojoma-v3/src/main/scala/Parser.scala deleted file mode 100644 index c031e71f..00000000 --- a/scalalib/src/test/resource/jawn/support/rojoma-v3/src/main/scala/Parser.scala +++ /dev/null @@ -1,18 +0,0 @@ -package jawn -package support.rojoma.v3 - -import scala.collection.mutable -import com.rojoma.json.v3.ast._ - -object Parser extends SupportParser[JValue] { - implicit val facade: Facade[JValue] = - new MutableFacade[JValue] { - def jnull() = JNull - def jfalse() = JBoolean.canonicalFalse - def jtrue() = JBoolean.canonicalTrue - def jnum(s: CharSequence, decIndex: Int, expIndex: Int) = JNumber.unsafeFromString(s.toString) - def jstring(s: CharSequence) = JString(s.toString) - def jarray(vs: mutable.ArrayBuffer[JValue]) = JArray(vs) - def jobject(vs: mutable.Map[String, JValue]) = JObject(vs) - } -} diff --git a/scalalib/src/test/resource/jawn/support/rojoma/src/main/scala/Parser.scala b/scalalib/src/test/resource/jawn/support/rojoma/src/main/scala/Parser.scala deleted file mode 100644 index c0725ea3..00000000 --- a/scalalib/src/test/resource/jawn/support/rojoma/src/main/scala/Parser.scala +++ /dev/null @@ -1,18 +0,0 @@ -package jawn -package support.rojoma - -import scala.collection.mutable -import com.rojoma.json.ast._ - -object Parser extends SupportParser[JValue] { - implicit val facade: Facade[JValue] = - new MutableFacade[JValue] { - def jnull() = JNull - def jfalse() = JBoolean.canonicalFalse - def jtrue() = JBoolean.canonicalTrue - def jnum(s: CharSequence, decIndex: Int, expIndex: Int) = JNumber(BigDecimal(s.toString)) - def jstring(s: CharSequence) = JString(s.toString) - def jarray(vs: mutable.ArrayBuffer[JValue]) = JArray(vs) - def jobject(vs: mutable.Map[String, JValue]) = JObject(vs) - } -} diff --git a/scalalib/src/test/resource/jawn/support/spray/src/main/scala/Parser.scala b/scalalib/src/test/resource/jawn/support/spray/src/main/scala/Parser.scala deleted file mode 100644 index 2e589666..00000000 --- a/scalalib/src/test/resource/jawn/support/spray/src/main/scala/Parser.scala +++ /dev/null @@ -1,17 +0,0 @@ -package jawn -package support.spray - -import spray.json._ - -object Parser extends SupportParser[JsValue] { - implicit val facade: Facade[JsValue] = - new SimpleFacade[JsValue] { - def jnull() = JsNull - def jfalse() = JsFalse - def jtrue() = JsTrue - def jnum(s: CharSequence, decIndex: Int, expIndex: Int) = JsNumber(s.toString) - def jstring(s: CharSequence) = JsString(s.toString) - def jarray(vs: List[JsValue]) = JsArray(vs: _*) - def jobject(vs: Map[String, JsValue]) = JsObject(vs) - } -} diff --git a/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/InvalidLong.scala b/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/InvalidLong.scala deleted file mode 100644 index adffb979..00000000 --- a/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/InvalidLong.scala +++ /dev/null @@ -1,7 +0,0 @@ -package jawn.util - -class InvalidLong(s: String) extends NumberFormatException(s"For input string '$s'") - -object InvalidLong { - def apply(s: String): InvalidLong = new InvalidLong(s) -} diff --git a/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/Slice.scala b/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/Slice.scala deleted file mode 100644 index 93a8159b..00000000 --- a/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/Slice.scala +++ /dev/null @@ -1,95 +0,0 @@ -package jawn.util - -/** - * Character sequence representing a lazily-calculated substring. - * - * This class has three constructors: - * - * - Slice(s) wraps a string, ensuring that future operations - * (e.g. subSequence) will construct slices instead of strings. - * - * - Slice(s, start, limit) is the default, and ensures that: - * - * 1. start >= 0 - * 2. limit >= start - * 3. limit <= s.length - * - * - Slice.unsafe(s, start, limit) is for situations where the above - * bounds-checking has already occurred. Only use this if you are - * absolutely sure your arguments satisfy the above invariants. - * - * Slice's subSequence returns another slice. This means that when - * wrapping a very large string, garbage collection on the underlying - * string will not occur until all slices are freed. - * - * Slice's universal equality is only defined with regard to other - * slices. This means comparing a Slice with other CharSequence values - * (including String) will always return false. - * - * Slices are serializable. However! They use the default Java - * serialization layout, which is not that efficient, and could be a - * disaster in cases where a large shared string might be serialized - * many times in different slices. - */ -@SerialVersionUID(1L) -final class Slice private[jawn] (s: String, start: Int, limit: Int) extends CharSequence with Serializable { - - final val length: Int = - limit - start - - def charAt(i: Int): Char = - if (i < 0 || length <= i) throw new StringIndexOutOfBoundsException(s"index out of range: $i") - else s.charAt(start + i) - - def subSequence(i: Int, j: Int): Slice = - Slice(s, start + i, start + j) - - override def toString: String = - s.substring(start, limit) - - override def equals(that: Any): Boolean = - that match { - case t: AnyRef if this eq t => - true - case slice: Slice => - if (length != slice.length) return false - var i: Int = 0 - while (i < length) { - if (charAt(i) != slice.charAt(i)) return false - i += 1 - } - true - case _ => - false - } - - override def hashCode: Int = { - var hash: Int = 0x90decade - var i: Int = start - while (i < limit) { - hash = s.charAt(i) + (hash * 103696301) // prime - i += 1 - } - hash - } -} - -object Slice { - - val Empty: Slice = Slice("", 0, 0) - - def empty: Slice = Empty - - def apply(s: String): Slice = - new Slice(s, 0, s.length) - - def apply(s: String, start: Int, limit: Int): Slice = - if (start < 0 || limit < start || s.length < limit) { - throw new IndexOutOfBoundsException(s"invalid slice: start=$start, limit=$limit, length=${s.length}") - } else { - new Slice(s, start, limit) - } - - def unsafe(s: String, start: Int, limit: Int): Slice = - new Slice(s, start, limit) -} diff --git a/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/package.scala b/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/package.scala deleted file mode 100644 index 08f7ae3e..00000000 --- a/scalalib/src/test/resource/jawn/util/src/main/scala/jawn/util/package.scala +++ /dev/null @@ -1,96 +0,0 @@ -package jawn - -package object util { - - /** - * Parse the given character sequence as a single Long value (64-bit - * signed integer) in decimal (base-10). - * - * Other than "0", leading zeros are not allowed, nor are leading - * plusses. At most one leading minus is allowed. The value "-0" is - * allowed, and is interpreted as 0. - * - * Stated more precisely, accepted values: - * - * - conform to the pattern: -?(0|([1-9][0-9]*)) - * - are within [-9223372036854775808, 9223372036854775807] - * - * This method will throw an `InvalidLong` exception on invalid - * input. - */ - def parseLong(cs: CharSequence): Long = { - - // we store the inverse of the positive sum, to ensure we don't - // incorrectly overflow on Long.MinValue. for positive numbers - // this inverse sum will be inverted before being returned. - var inverseSum: Long = 0L - var inverseSign: Long = -1L - var i: Int = 0 - - if (cs.charAt(0) == '-') { - inverseSign = 1L - i = 1 - } - - val len = cs.length - val size = len - i - if (i >= len) throw InvalidLong(cs.toString) - if (size > 19) throw InvalidLong(cs.toString) - if (cs.charAt(i) == '0' && size > 1) throw InvalidLong(cs.toString) - - while (i < len) { - val digit = cs.charAt(i).toInt - 48 - if (digit < 0 || 9 < digit) throw InvalidLong(cs.toString) - inverseSum = inverseSum * 10L - digit - i += 1 - } - - // detect and throw on overflow - if (size == 19 && (inverseSum >= 0 || (inverseSum == Long.MinValue && inverseSign < 0))) { - throw InvalidLong(cs.toString) - } - - inverseSum * inverseSign - } - - /** - * Parse the given character sequence as a single Long value (64-bit - * signed integer) in decimal (base-10). - * - * For valid inputs, this method produces the same values as - * `parseLong`. However, by avoiding input validation it is up to - * 50% faster. - * - * For inputs which `parseLong` throws an error on, - * `parseLongUnsafe` may (or may not) throw an error, or return a - * bogus value. This method makes no guarantees about how it handles - * invalid input. - * - * This method should only be used on sequences which have already - * been parsed (e.g. by a Jawn parser). When in doubt, use - * `parseLong(cs)`, which is still significantly faster than - * `java.lang.Long.parseLong(cs.toString)`. - */ - def parseLongUnsafe(cs: CharSequence): Long = { - - // we store the inverse of the positive sum, to ensure we don't - // incorrectly overflow on Long.MinValue. for positive numbers - // this inverse sum will be inverted before being returned. - var inverseSum: Long = 0L - var inverseSign: Long = -1L - var i: Int = 0 - - if (cs.charAt(0) == '-') { - inverseSign = 1L - i = 1 - } - - val len = cs.length - while (i < len) { - inverseSum = inverseSum * 10L - (cs.charAt(i).toInt - 48) - i += 1 - } - - inverseSum * inverseSign - } -} diff --git a/scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/ParseLongCheck.scala b/scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/ParseLongCheck.scala deleted file mode 100644 index 69c4a0e2..00000000 --- a/scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/ParseLongCheck.scala +++ /dev/null @@ -1,72 +0,0 @@ -package jawn -package util - -import org.scalatest._ -import prop._ -import org.scalacheck._ - -import scala.util._ - -class ParseLongCheck extends PropSpec with Matchers with PropertyChecks { - - case class UniformLong(value: Long) - - object UniformLong { - implicit val arbitraryUniformLong: Arbitrary[UniformLong] = - Arbitrary(Gen.choose(Long.MinValue, Long.MaxValue).map(UniformLong(_))) - } - - property("both parsers accept on valid input") { - forAll { (n0: UniformLong, prefix: String, suffix: String) => - val n = n0.value - val payload = n.toString - val s = prefix + payload + suffix - val i = prefix.length - val cs = s.subSequence(i, payload.length + i) - cs.toString shouldBe payload - parseLong(cs) shouldBe n - parseLongUnsafe(cs) shouldBe n - } - - forAll { (s: String) => - Try(parseLong(s)) match { - case Success(n) => parseLongUnsafe(s) shouldBe n - case Failure(_) => succeed - } - } - } - - property("safe parser fails on invalid input") { - forAll { (n: Long, m: Long, suffix: String) => - val s1 = n.toString + suffix - Try(parseLong(s1)) match { - case Success(n) => n shouldBe s1.toLong - case Failure(_) => Try(s1.toLong).isFailure - } - - val s2 = n.toString + (m & 0x7fffffffffffffffL).toString - Try(parseLong(s2)) match { - case Success(n) => n shouldBe s2.toLong - case Failure(_) => Try(s2.toLong).isFailure - } - } - - Try(parseLong("9223372036854775807")) shouldBe Try(Long.MaxValue) - Try(parseLong("-9223372036854775808")) shouldBe Try(Long.MinValue) - Try(parseLong("-0")) shouldBe Try(0L) - - assert(Try(parseLong("")).isFailure) - assert(Try(parseLong("+0")).isFailure) - assert(Try(parseLong("00")).isFailure) - assert(Try(parseLong("01")).isFailure) - assert(Try(parseLong("+1")).isFailure) - assert(Try(parseLong("-")).isFailure) - assert(Try(parseLong("--1")).isFailure) - assert(Try(parseLong("9223372036854775808")).isFailure) - assert(Try(parseLong("-9223372036854775809")).isFailure) - } - - // NOTE: parseLongUnsafe is not guaranteed to crash, or do anything - // predictable, on invalid input, so we don't test this direction. - // Its "unsafe" suffix is there for a reason. -} diff --git a/scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/SliceCheck.scala b/scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/SliceCheck.scala deleted file mode 100644 index b56e105e..00000000 --- a/scalalib/src/test/resource/jawn/util/src/test/scala/jawn/util/SliceCheck.scala +++ /dev/null @@ -1,131 +0,0 @@ -package jawn -package util - -import org.scalatest._ -import prop._ -import org.scalacheck._ - -import Arbitrary.arbitrary - -import scala.util._ - -class SliceCheck extends PropSpec with Matchers with PropertyChecks { - - val genSlice: Gen[Slice] = { - val g = arbitrary[String] - def c(start: Int, end: Int): Gen[Int] = - if (end <= start) Gen.const(start) - else Gen.choose(start, end) - Gen.oneOf( - g.map(Slice(_)), - for { s <- g; n = s.length; i <- c(0, n) } yield Slice(s, i, n), - for { s <- g; n = s.length; j <- c(0, n) } yield Slice(s, 0, j), - for { s <- g; n = s.length; i <- c(0, n); j <- c(i, n) } yield Slice(s, i, j)) - } - - implicit val arbitrarySlice: Arbitrary[Slice] = - Arbitrary(genSlice) - - def tryEqual[A](got0: => A, expected0: => A): Unit = { - val got = Try(got0) - val expected = Try(expected0) - got match { - case Success(_) => got shouldBe expected - case Failure(_) => assert(expected.isFailure) - } - } - - property("Slice(s, i, j) ~ s.substring(i, j)") { - forAll { (s: String, i: Int, j: Int) => - tryEqual( - Slice(s, i, j).toString, - s.substring(i, j)) - } - } - - property("Slice(s, i, j).charAt(k) ~ s.substring(i, j).charAt(k)") { - forAll { (s: String, i: Int, j: Int, k: Int) => - tryEqual( - Slice(s, i, j).charAt(k), - s.substring(i, j).charAt(k)) - } - } - - property("slice.length >= 0") { - forAll { (cs: Slice) => - cs.length should be >= 0 - } - } - - property("slice.charAt(i) ~ slice.toString.charAt(i)") { - forAll { (cs: Slice, i: Int) => - tryEqual( - cs.charAt(i), - cs.toString.charAt(i)) - } - } - - property("Slice(s, i, j).subSequence(k, l) ~ s.substring(i, j).substring(k, l)") { - forAll { (s: String, i: Int, j: Int, k: Int, l: Int) => - tryEqual( - Slice(s, i, j).subSequence(k, l).toString, - s.substring(i, j).substring(k, l)) - } - } - - property("Slice(s) ~ Slice(s, 0, s.length)") { - forAll { (s: String) => - tryEqual( - Slice(s).toString, - Slice(s, 0, s.length).toString) - } - } - - property("Slice(s, i, j) => Slice.unsafe(s, i, j)") { - forAll { (s: String, i: Int, j: Int) => - Try(Slice(s, i, j).toString) match { - case Success(r) => r shouldBe Slice.unsafe(s, i, j).toString - case Failure(_) => succeed - } - } - } - - property("x == x") { - forAll { (x: Slice) => x shouldBe x } - } - - property("(x == y) = (x.toString == y.toString)") { - forAll { (x: Slice, y: Slice) => - (x == y) shouldBe (x.toString == y.toString) - } - } - - property("(x == y) -> (x.## == y.##)") { - forAll { (x: Slice, y: Slice) => - if (x == y) x.## shouldBe y.## - else (x.## == y.##) shouldBe false - } - } - - property("x == Slice(x.toString)") { - forAll { (x: Slice) => - Slice(x.toString) shouldBe x - } - } - - property("slice is serializable") { - import java.io._ - - forAll { (x: Slice) => - val baos = new ByteArrayOutputStream() - val oos = new ObjectOutputStream(baos) - oos.writeObject(x) - oos.close() - val bytes = baos.toByteArray - val bais = new ByteArrayInputStream(bytes) - val ois = new ObjectInputStream(bais) - Try(ois.readObject()) shouldBe Try(x) - ois.close() - } - } -} diff --git a/scalalib/src/test/resource/jawn/version.sbt b/scalalib/src/test/resource/jawn/version.sbt deleted file mode 100644 index 1b9f6b1b..00000000 --- a/scalalib/src/test/resource/jawn/version.sbt +++ /dev/null @@ -1 +0,0 @@ -version in ThisBuild := "0.11.1-SNAPSHOT" diff --git a/scalalib/src/test/resource/resolve-deps/src/main/scala/Main.scala b/scalalib/src/test/resource/resolve-deps/src/main/scala/Main.scala deleted file mode 100644 index 5dcbe39a..00000000 --- a/scalalib/src/test/resource/resolve-deps/src/main/scala/Main.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Main { - println("ResolveDeps Main: hello world!") -} diff --git a/scalalib/src/test/scala/mill/scalalib/AcyclicTests.scala b/scalalib/src/test/scala/mill/scalalib/AcyclicTests.scala deleted file mode 100644 index 44a43702..00000000 --- a/scalalib/src/test/scala/mill/scalalib/AcyclicTests.scala +++ /dev/null @@ -1,78 +0,0 @@ -package mill.scalalib - -import ammonite.ops.ImplicitWd._ -import ammonite.ops._ -import mill.define.Cross -import mill.discover.Discovered -import mill.scalalib.publish._ -import utest._ -import mill.util.JsonFormatters._ -import mill.util.TestEvaluator -object AcyclicBuild{ - val acyclic = - for(crossVersion <- Cross("2.10.6", "2.11.8", "2.12.3", "2.12.4")) - yield new SbtModule with PublishModule {outer => - def basePath = AcyclicTests.workspacePath - def artifactName = "acyclic" - def publishVersion = "0.1.7" - - def pomSettings = PomSettings( - description = artifactName(), - organization = "com.lihaoyi", - url = "https://github.com/lihaoyi/acyclic", - licenses = Seq( - License("MIT license", "http://www.opensource.org/licenses/mit-license.php") - ), - scm = SCM( - "git://github.com/lihaoyi/acyclic.git", - "scm:git://github.com/lihaoyi/acyclic.git" - ), - developers = Seq( - Developer("lihaoyi", "Li Haoyi","https://github.com/lihaoyi") - ) - ) - - def scalaVersion = crossVersion - def ivyDeps = Seq( - Dep.Java("org.scala-lang", "scala-compiler", scalaVersion()) - ) - object test extends this.Tests{ - def forkWorkingDir = pwd/'scalalib/'src/'test/'resource/'acyclic - def ivyDeps = Seq( - Dep("com.lihaoyi", "utest", "0.6.0") - ) - def testFramework = "utest.runner.Framework" - } - } -} -object AcyclicTests extends TestSuite{ - val workspacePath = pwd / 'target / 'workspace / 'acyclic - val srcPath = pwd / 'scalalib / 'src / 'test / 'resource / 'acyclic - val tests = Tests{ - rm(workspacePath) - mkdir(workspacePath/up) - cp(srcPath, workspacePath) - val mapping = Discovered.mapping(AcyclicBuild) - val eval = new TestEvaluator(mapping, workspacePath, srcPath) - - def check(scalaVersion: String) = { - // We can compile - val Right((pathRef, evalCount)) = eval(AcyclicBuild.acyclic(scalaVersion).compile) - val outputPath = pathRef.classes.path - val outputFiles = ls.rec(outputPath) - assert( - evalCount > 0, - outputFiles.contains(outputPath/'acyclic/'plugin/"GraphAnalysis.class"), - outputFiles.contains(outputPath/'acyclic/'plugin/"PluginPhase.class") - ) - - // Compilation is cached - val Right((_, evalCount2)) = eval(AcyclicBuild.acyclic(scalaVersion).compile) - assert(evalCount2 == 0) - } - - 'scala211 - check("2.11.8") - 'scala2123 - check("2.12.3") - - } -} diff --git a/scalalib/src/test/scala/mill/scalalib/BetterFilesTests.scala b/scalalib/src/test/scala/mill/scalalib/BetterFilesTests.scala deleted file mode 100644 index 1f0a3d70..00000000 --- a/scalalib/src/test/scala/mill/scalalib/BetterFilesTests.scala +++ /dev/null @@ -1,111 +0,0 @@ -package mill.scalalib - -import ammonite.ops.ImplicitWd._ -import ammonite.ops._ -import mill.discover.Discovered -import utest._ -import mill.util.JsonFormatters._ -import mill.util.TestEvaluator - -object BetterFilesBuild{ - trait BetterFilesModule extends SbtModule{ outer => - def scalaVersion = "2.12.4" - def scalacOptions = Seq( - "-deprecation", // Emit warning and location for usages of deprecated APIs. - "-encoding", "utf-8", // Specify character encoding used by source files. - "-explaintypes", // Explain type errors in more detail. - "-feature", // Emit warning and location for usages of features that should be imported explicitly. - "-language:existentials", // Existential types (besides wildcard types) can be written and inferred - "-language:experimental.macros", // Allow macro definition (besides implementation and application) - "-language:higherKinds", // Allow higher-kinded types - "-language:implicitConversions", // Allow definition of implicit functions called views - "-unchecked", // Enable additional warnings where generated code depends on assumptions. - "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access. - "-Xfatal-warnings", // Fail the compilation if there are any warnings. - "-Xfuture", // Turn on future language features. - "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver. - "-Xlint:by-name-right-associative", // By-name parameter of right associative operator. - "-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error. - "-Xlint:delayedinit-select", // Selecting member of DelayedInit. - "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element. - "-Xlint:inaccessible", // Warn about inaccessible types in method signatures. - "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`. - "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id. - "-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'. - "-Xlint:nullary-unit", // Warn when nullary methods return Unit. - "-Xlint:option-implicit", // Option.apply used implicit view. - "-Xlint:package-object-classes", // Class or object defined in package object. - "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds. - "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field. - "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component. - "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope. - "-Xlint:unsound-match", // Pattern match may not be typesafe. - "-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver. - "-Ypartial-unification", // Enable partial unification in type constructor inference - "-Ywarn-dead-code", // Warn when dead code is identified. - "-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined. - "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. - "-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`. - "-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'. - "-Ywarn-nullary-unit", // Warn when nullary methods return Unit. - "-Ywarn-numeric-widen", // Warn when numerics are widened. - "-Ywarn-unused:implicits", // Warn if an implicit parameter is unused. - "-Ywarn-unused:imports", // Warn if an import selector is not referenced. - "-Ywarn-unused:locals", // Warn if a local definition is unused. - "-Ywarn-unused:params", // Warn if a value parameter is unused. - "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused. - "-Ywarn-unused:privates", // Warn if a private member is unused. - "-Ywarn-value-discard" // Warn when non-Unit expression results are unused. - ) - override def javacOptions = Seq("-source", "1.8", "-target", "1.8", "-Xlint") - object test extends this.Tests{ - def projectDeps = - if (this == Core.test) Seq(Core) - else Seq(outer, Core.test) - def ivyDeps = Seq(Dep("org.scalatest", "scalatest", "3.0.4")) - def testFramework = "org.scalatest.tools.Framework" - } - } - object Core extends BetterFilesModule{ - def basePath = BetterFilesTests.srcPath/"core" - } - object Akka extends BetterFilesModule{ - def projectDeps = Seq(Core) - def basePath = BetterFilesTests.srcPath/"akka" - def ivyDeps = Seq(Dep("com.typesafe.akka", "akka-actor", "2.5.6")) - } - object ShapelessScanner extends BetterFilesModule{ - def projectDeps = Seq(Core) - def basePath = BetterFilesTests.srcPath/"shapeless" - def ivyDeps = Seq(Dep("com.chuusai", "shapeless", "2.3.2")) - } - object Benchmarks extends BetterFilesModule{ - def projectDeps = Seq(Core) - def basePath = BetterFilesTests.srcPath/"benchmarks" - def ivyDeps = Seq( - Dep.Java("commons-io", "commons-io", "2.5") - // "fastjavaio" % "fastjavaio" % "1.0" from "https://github.com/williamfiset/FastJavaIO/releases/download/v1.0/fastjavaio.jar" - ) - } -} -object BetterFilesTests extends TestSuite{ - val workspacePath = pwd / 'target / 'workspace / "better-files" - val srcPath = pwd / 'scalalib / 'src / 'test / 'resource / "better-files" - val tests = Tests{ - rm(workspacePath) - mkdir(workspacePath/up) - cp(srcPath, workspacePath) - val mapping = Discovered.mapping(BetterFilesBuild) - val eval = new TestEvaluator(mapping, workspacePath, srcPath) - - 'test - { - - val Right(_) = eval(BetterFilesBuild.Core.test.test()) - val Right(_) = eval(BetterFilesBuild.Akka.test.compile) - val Right(_) = eval(BetterFilesBuild.ShapelessScanner.test.compile) - // Doesn't work yet, need to support curling down a jar and caching it - val Left(_) = eval(BetterFilesBuild.Benchmarks.test.test()) - } - - } -} diff --git a/scalalib/src/test/scala/mill/scalalib/HelloWorldTests.scala b/scalalib/src/test/scala/mill/scalalib/HelloWorldTests.scala index 0b6ac62b..ee24e70a 100644 --- a/scalalib/src/test/scala/mill/scalalib/HelloWorldTests.scala +++ b/scalalib/src/test/scala/mill/scalalib/HelloWorldTests.scala @@ -201,7 +201,7 @@ object HelloWorldTests extends TestSuite { assert(evalCount > 0) - val runResult = basePath / "hello-mill" + val runResult = basePath / 'out / 'runMain / 'dest / "hello-mill" assert( exists(runResult), read(runResult) == "hello rockjam, your age is: 25" @@ -216,7 +216,7 @@ object HelloWorldTests extends TestSuite { assert(evalCount > 0) - val runResult = basePath / "hello-mill" + val runResult = basePath / 'out / 'cross / v / 'runMain / 'dest / "hello-mill" assert( exists(runResult), read(runResult) == "hello rockjam, your age is: 25" @@ -252,7 +252,7 @@ object HelloWorldTests extends TestSuite { assert(evalCount > 0) - val runResult = basePath / "hello-mill" + val runResult = basePath / 'out / 'run / 'dest / "hello-mill" assert( exists(runResult), read(runResult) == "hello rockjam, your age is: 25" @@ -295,7 +295,7 @@ object HelloWorldTests extends TestSuite { evalCount > 0 ) - %("scala", result.path) + %("scala", result.path)(wd = basePath) val runResult = basePath / "hello-mill" assert( diff --git a/scalalib/src/test/scala/mill/scalalib/JawnTests.scala b/scalalib/src/test/scala/mill/scalalib/JawnTests.scala deleted file mode 100644 index 8449cc02..00000000 --- a/scalalib/src/test/scala/mill/scalalib/JawnTests.scala +++ /dev/null @@ -1,92 +0,0 @@ -package mill.scalalib - -import ammonite.ops.ImplicitWd._ -import ammonite.ops._ -import mill.{Module, scalalib} -import mill.define.{Cross, Task} -import mill.discover.Discovered -import mill.eval.Result -import utest._ -import mill.util.JsonFormatters._ -import mill.util.TestEvaluator - -object JawnBuild{ - val Jawn = Cross("2.10.6", "2.11.11", "2.12.3").map(new Jawn(_)) - class Jawn(crossVersion: String) extends mill.Module{ - trait JawnModule extends scalalib.SbtModule{ outer => - def scalaVersion = crossVersion - def scalacOptions = Seq( - "-deprecation", - "-optimize", - "-unchecked" - ) - def testProjectDeps: Seq[TestModule] = Nil - object test extends this.Tests{ - def projectDeps = super.projectDeps ++ testProjectDeps - def ivyDeps = Seq( - Dep("org.scalatest", "scalatest", "3.0.3"), - Dep("org.scalacheck", "scalacheck", "1.13.5") - ) - def testFramework = "org.scalatest.tools.Framework" - } - } - object Parser extends JawnModule{ - def basePath = JawnTests.srcPath/"parser" - } - object Util extends JawnModule{ - def projectDeps = Seq(Parser) - def testProjectDeps = Seq(Parser.test) - def basePath = JawnTests.srcPath/"util" - } - object Ast extends JawnModule{ - def projectDeps = Seq(Parser, Util) - def testProjectDeps = Seq(Parser.test, Util.test) - def basePath = JawnTests.srcPath/"ast" - } - class Support(name: String, ivyDeps0: Dep*) extends JawnModule{ - def projectDeps = Seq[Module](Parser) - def basePath = JawnTests.srcPath/"support"/"argonaut" - def ivyDeps = ivyDeps0 - } - object Argonaut extends Support("argonaut", Dep("io.argonaut", "argonaut", "6.2")) - object Json4s extends Support("json4s", Dep("org.json4s", "json4s-ast", "3.5.2")) - - object Play extends Support("play"){ - def ivyDeps = mill.T{ - scalaBinaryVersion() match{ - case "2.10" => Seq(Dep("com.typesafe.play", "play-json", "2.4.11")) - case "2.11" => Seq(Dep("com.typesafe.play", "play-json", "2.5.15")) - case _ => Seq(Dep("com.typesafe.play", "play-json", "2.6.0")) - } - } - } - - object Rojoma extends Support("rojoma", Dep("com.rojoma", "rojoma-json", "2.4.3")) - object RojomaV3 extends Support("rojoma-v3", Dep("com.rojoma", "rojoma-json-v3", "3.7.2")) - object Spray extends Support("spray", Dep("io.spray", "spray-json", "1.3.3")) - } - -} -object JawnTests extends TestSuite{ - val workspacePath = pwd / 'target / 'workspace / "jawn" - val srcPath = pwd / 'scalalib / 'src / 'test / 'resource / "jawn" - val tests = Tests{ - rm(workspacePath) - mkdir(workspacePath/up) - cp(srcPath, workspacePath) - val mapping = Discovered.mapping(JawnBuild) - val eval = new TestEvaluator(mapping, workspacePath, srcPath) - - 'test - { - def compileOutput = workspacePath / 'jawn / "2.12.3" / 'Parser / 'compile - def testCompileOutput = workspacePath / 'jawn / "2.12.3" / 'Parser / 'test / 'compile - assert(!exists(compileOutput), !exists(testCompileOutput)) - val Right(_) = eval(JawnBuild.Jawn("2.12.3").Parser.test.test()) - assert( - ls.rec(compileOutput).exists(_.last == "AsyncParser.class"), - ls.rec(testCompileOutput).exists(_.last == "CharBuilderSpec.class") - ) - } - - } -} -- cgit v1.2.3