aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore22
-rw-r--r--.travis.yml9
-rw-r--r--LICENSE202
-rw-r--r--README.md1
-rw-r--r--build.sbt29
-rw-r--r--project/.sbtserver3
-rw-r--r--project/.sbtserver.lock0
-rw-r--r--project/build.properties4
-rw-r--r--project/plugins.sbt4
-rw-r--r--src/main/scala/xyz/driver/common/Config.scala22
-rw-r--r--src/main/scala/xyz/driver/common/TimeLogger.scala15
-rw-r--r--src/main/scala/xyz/driver/common/acl/ACL.scala202
-rw-r--r--src/main/scala/xyz/driver/common/auth/AnonymousRequestContext.scala12
-rw-r--r--src/main/scala/xyz/driver/common/auth/AuthenticatedRequestContext.scala32
-rw-r--r--src/main/scala/xyz/driver/common/auth/RequestId.scala15
-rw-r--r--src/main/scala/xyz/driver/common/compat/EitherOps.scala12
-rw-r--r--src/main/scala/xyz/driver/common/compat/Implicits.scala7
-rw-r--r--src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueue.scala88
-rw-r--r--src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapter.scala136
-rw-r--r--src/main/scala/xyz/driver/common/concurrent/Cron.scala97
-rw-r--r--src/main/scala/xyz/driver/common/concurrent/InMemoryBridgeUploadQueue.scala38
-rw-r--r--src/main/scala/xyz/driver/common/concurrent/MdcExecutionContext.scala35
-rw-r--r--src/main/scala/xyz/driver/common/concurrent/MdcThreadFactory.scala33
-rw-r--r--src/main/scala/xyz/driver/common/db/DbCommand.scala15
-rw-r--r--src/main/scala/xyz/driver/common/db/DbCommandFactory.scala14
-rw-r--r--src/main/scala/xyz/driver/common/db/EntityExtractorDerivation.scala71
-rw-r--r--src/main/scala/xyz/driver/common/db/EntityNotFoundException.scala10
-rw-r--r--src/main/scala/xyz/driver/common/db/MysqlQueryBuilder.scala90
-rw-r--r--src/main/scala/xyz/driver/common/db/Pagination.scala20
-rw-r--r--src/main/scala/xyz/driver/common/db/QueryBuilder.scala344
-rw-r--r--src/main/scala/xyz/driver/common/db/SearchFilterExpr.scala210
-rw-r--r--src/main/scala/xyz/driver/common/db/Sorting.scala62
-rw-r--r--src/main/scala/xyz/driver/common/db/SqlContext.scala184
-rw-r--r--src/main/scala/xyz/driver/common/db/Transactions.scala23
-rw-r--r--src/main/scala/xyz/driver/common/db/repositories/BridgeUploadQueueRepository.scala24
-rw-r--r--src/main/scala/xyz/driver/common/db/repositories/Repository.scala4
-rw-r--r--src/main/scala/xyz/driver/common/db/repositories/RepositoryLogging.scala62
-rw-r--r--src/main/scala/xyz/driver/common/domain/CaseId.scala10
-rw-r--r--src/main/scala/xyz/driver/common/domain/Category.scala21
-rw-r--r--src/main/scala/xyz/driver/common/domain/Email.scala3
-rw-r--r--src/main/scala/xyz/driver/common/domain/FuzzyValue.scala17
-rw-r--r--src/main/scala/xyz/driver/common/domain/Id.scala51
-rw-r--r--src/main/scala/xyz/driver/common/domain/Label.scala15
-rw-r--r--src/main/scala/xyz/driver/common/domain/PasswordHash.scala42
-rw-r--r--src/main/scala/xyz/driver/common/domain/RecordRequestId.scala16
-rw-r--r--src/main/scala/xyz/driver/common/domain/TextJson.scala14
-rw-r--r--src/main/scala/xyz/driver/common/domain/User.scala74
-rw-r--r--src/main/scala/xyz/driver/common/error/DomainError.scala31
-rw-r--r--src/main/scala/xyz/driver/common/error/ExceptionFormatter.scala19
-rw-r--r--src/main/scala/xyz/driver/common/error/FailedValidationException.scala5
-rw-r--r--src/main/scala/xyz/driver/common/error/IncorrectIdException.scala3
-rw-r--r--src/main/scala/xyz/driver/common/http/AsyncHttpClientFetcher.scala90
-rw-r--r--src/main/scala/xyz/driver/common/http/AsyncHttpClientUploader.scala116
-rw-r--r--src/main/scala/xyz/driver/common/http/package.scala9
-rw-r--r--src/main/scala/xyz/driver/common/logging/DefaultPhiLogger.scala17
-rw-r--r--src/main/scala/xyz/driver/common/logging/Implicits.scala62
-rw-r--r--src/main/scala/xyz/driver/common/logging/PhiLogger.scala15
-rw-r--r--src/main/scala/xyz/driver/common/logging/PhiLogging.scala20
-rw-r--r--src/main/scala/xyz/driver/common/logging/PhiString.scala6
-rw-r--r--src/main/scala/xyz/driver/common/logging/PhiStringContext.scala8
-rw-r--r--src/main/scala/xyz/driver/common/logging/Unsafe.scala6
-rw-r--r--src/main/scala/xyz/driver/common/logging/package.scala3
-rw-r--r--src/main/scala/xyz/driver/common/pdf/PdfRenderer.scala13
-rw-r--r--src/main/scala/xyz/driver/common/pdf/WkHtmlToPdfRenderer.scala106
-rw-r--r--src/main/scala/xyz/driver/common/resources/ResourcesStorage.scala39
-rw-r--r--src/main/scala/xyz/driver/common/utils/Computation.scala110
-rw-r--r--src/main/scala/xyz/driver/common/utils/FutureUtils.scala19
-rw-r--r--src/main/scala/xyz/driver/common/utils/Implicits.scala22
-rw-r--r--src/main/scala/xyz/driver/common/utils/JsonSerializer.scala27
-rw-r--r--src/main/scala/xyz/driver/common/utils/MapOps.scala10
-rw-r--r--src/main/scala/xyz/driver/common/utils/RandomUtils.scala20
-rw-r--r--src/main/scala/xyz/driver/common/utils/ServiceUtils.scala32
-rw-r--r--src/main/scala/xyz/driver/common/utils/Utils.scala23
-rw-r--r--src/main/scala/xyz/driver/common/validation/ValidationError.scala3
-rw-r--r--src/main/scala/xyz/driver/common/validation/Validators.scala41
-rw-r--r--src/test/scala/xyz/driver/common/BaseSuite.scala51
-rw-r--r--src/test/scala/xyz/driver/common/Mocks.scala89
-rw-r--r--src/test/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala221
-rw-r--r--src/test/scala/xyz/driver/common/db/QueryBuilderParametersSuite.scala249
-rw-r--r--src/test/scala/xyz/driver/common/db/SearchFilterExprSuite.scala32
-rw-r--r--src/test/scala/xyz/driver/common/error/UnexpectedFilterException.scala3
-rw-r--r--src/test/scala/xyz/driver/common/logging/PhiStringContextSuite.scala32
-rw-r--r--src/test/scala/xyz/driver/common/pdf/MockPdfRenderer.scala25
-rw-r--r--src/test/scala/xyz/driver/common/utils/DiffUtils.scala52
84 files changed, 4043 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..1ccf102
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,22 @@
+*.class
+*.log
+
+# sbt specific
+.cache
+.history
+.lib/
+dist/*
+target/
+lib_managed/
+src_managed/
+project/boot/
+project/plugins/project/
+.DS_Store
+# Scala-IDE specific
+.scala_dependencies
+.worksheet
+.idea/
+.idea
+scalafmt
+.scalafmt.conf
+scalastyle-config.xml
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..6155e77
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,9 @@
+language: scala
+
+jdk:
+ - oraclejdk8
+
+scala:
+ - 2.11.8
+
+script: "sbt clean test"
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..8f71f43
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..538ce94
--- /dev/null
+++ b/README.md
@@ -0,0 +1 @@
+# PDS UI Common Library
diff --git a/build.sbt b/build.sbt
new file mode 100644
index 0000000..8b0efa4
--- /dev/null
+++ b/build.sbt
@@ -0,0 +1,29 @@
+import sbt._
+import Keys._
+
+lazy val akkaHttpV = "10.0.5"
+
+lazy val core = (project in file("."))
+ .driverLibrary("pds-ui-common")
+ .settings(scalastyleSettings ++ /* wartRemoverSettings ++ */ formatSettings)
+ .settings(libraryDependencies ++= Seq(
+ "com.typesafe.akka" %% "akka-http-core" % akkaHttpV,
+ "com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpV,
+ "com.typesafe.akka" %% "akka-http-testkit" % akkaHttpV,
+ "org.asynchttpclient" % "async-http-client" % "2.0.24",
+ "io.github.cloudify" %% "spdf" % "1.4.0",
+ "com.github.pureconfig" %% "pureconfig" % "0.7.2",
+ "de.svenkubiak" % "jBCrypt" % "0.4.1",
+ "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.8.4",
+ "org.scalatest" % "scalatest_2.11" % "2.2.6" % "test",
+ "org.scalacheck" %% "scalacheck" % "1.12.5" % "test",
+ "org.mockito" % "mockito-core" % "1.9.5" % "test",
+ "ai.x" %% "diff" % "1.2.0-get-simple-name-fix" % "test",
+ "com.github.swagger-akka-http" %% "swagger-akka-http" % "0.9.1",
+ "com.google.cloud" % "google-cloud-storage" % "0.9.4-beta",
+ "io.getquill" %% "quill-jdbc" % "1.2.1",
+ "com.typesafe.slick" %% "slick" % "3.1.1",
+ "com.typesafe" % "config" % "1.2.1",
+ "com.typesafe.scala-logging" %% "scala-logging" % "3.4.0",
+ "ch.qos.logback" % "logback-classic" % "1.1.3"
+ ))
diff --git a/project/.sbtserver b/project/.sbtserver
new file mode 100644
index 0000000..8dcde7c
--- /dev/null
+++ b/project/.sbtserver
@@ -0,0 +1,3 @@
+#Server Startup at 2016-07-06T23:09+0000
+#Wed Jul 06 16:09:57 PDT 2016
+server.uri=http\://0.0.0.0\:58105
diff --git a/project/.sbtserver.lock b/project/.sbtserver.lock
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/project/.sbtserver.lock
diff --git a/project/build.properties b/project/build.properties
new file mode 100644
index 0000000..4c003f6
--- /dev/null
+++ b/project/build.properties
@@ -0,0 +1,4 @@
+#Activator-generated Properties
+#Wed Jul 06 16:08:49 PDT 2016
+template.uuid=a675a7df-bee3-48df-9eaa-688d99e5814e
+sbt.version=0.13.8
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000..a5ce3ec
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1,4 @@
+resolvers += "releases" at "https://drivergrp.jfrog.io/drivergrp/releases"
+credentials += Credentials("Artifactory Realm", "drivergrp.jfrog.io", "sbt-publisher", "ANC-d8X-Whm-USS")
+
+addSbtPlugin("xyz.driver" % "sbt-settings" % "0.7.34")
diff --git a/src/main/scala/xyz/driver/common/Config.scala b/src/main/scala/xyz/driver/common/Config.scala
new file mode 100644
index 0000000..d37a20a
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/Config.scala
@@ -0,0 +1,22 @@
+package xyz.driver.common
+
+import pureconfig._
+
+import scala.util.{Failure, Success, Try}
+
+object Config {
+
+ implicit def productHint[T]: ProductHint[T] = ProductHint(ConfigFieldMapping(CamelCase, CamelCase))
+
+ def loadConfig[Config](implicit reader: ConfigReader[Config]): Try[Config] = pureconfig.loadConfig match {
+ case Right(x) => Success(x)
+ case Left(e) => Failure(new RuntimeException(e.toString))
+ }
+
+ def loadConfig[Config](namespace: String)
+ (implicit reader: ConfigReader[Config]): Try[Config] = pureconfig.loadConfig(namespace) match {
+ case Right(x) => Success(x)
+ case Left(e) => Failure(new RuntimeException(e.toString))
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/TimeLogger.scala b/src/main/scala/xyz/driver/common/TimeLogger.scala
new file mode 100644
index 0000000..154847c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/TimeLogger.scala
@@ -0,0 +1,15 @@
+package xyz.driver.common
+
+import java.time.{LocalDateTime, ZoneId}
+
+import xyz.driver.common.domain.{LongId, User}
+import xyz.driver.common.logging._
+
+object TimeLogger extends PhiLogging {
+
+ def logTime(userId: LongId[User], label: String, obj: String): Unit = {
+ val now = LocalDateTime.now(ZoneId.of("Z"))
+ logger.info(phi"User id=$userId performed an action at ${Unsafe(label)}=$now with a ${Unsafe(obj)} ")
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/acl/ACL.scala b/src/main/scala/xyz/driver/common/acl/ACL.scala
new file mode 100644
index 0000000..35c2661
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/acl/ACL.scala
@@ -0,0 +1,202 @@
+package xyz.driver.common.acl
+
+import xyz.driver.common.logging._
+import xyz.driver.common.auth.AuthenticatedRequestContext
+
+/**
+ * @see https://driverinc.atlassian.net/wiki/display/RA/User+permissions#UserPermissions-AccessControlList
+ */
+object ACL extends PhiLogging {
+
+ import xyz.driver.common.domain.User.Role
+ import Role._
+
+ type AclCheck = Role => Boolean
+
+ val Forbid: AclCheck = _ => false
+
+ val Allow: AclCheck = _ => true
+
+ // Common
+
+ object User extends BaseACL(
+ label = "user",
+ create = Set(RecordAdmin, TrialAdmin, TreatmentMatchingAdmin),
+ read = Allow,
+ update = Allow,
+ delete = Set(RecordAdmin, TrialAdmin, TreatmentMatchingAdmin)
+ )
+
+ object Label extends BaseACL(
+ label = "label",
+ read = RepRoles ++ TcRoles ++ TreatmentMatchingRoles
+ )
+
+ // REP
+
+ object MedicalRecord extends BaseACL(
+ label = "medical record",
+ read = RepRoles + RoutesCurator + TreatmentMatchingAdmin,
+ update = RepRoles - DocumentExtractor
+ )
+
+ object Document extends BaseACL(
+ label = "document",
+ create = Set(RecordOrganizer, RecordAdmin),
+ read = RepRoles - RecordCleaner + RoutesCurator + TreatmentMatchingAdmin,
+ update = RepRoles - RecordCleaner,
+ delete = Set(RecordOrganizer, RecordAdmin)
+ )
+
+ object ExtractedData extends BaseACL(
+ label = "extracted data",
+ create = Set(DocumentExtractor, RecordAdmin),
+ read = Set(DocumentExtractor, RecordAdmin, RoutesCurator, TreatmentMatchingAdmin),
+ update = Set(DocumentExtractor, RecordAdmin),
+ delete = Set(DocumentExtractor, RecordAdmin)
+ )
+
+ object Keyword extends BaseACL(
+ label = "keyword",
+ read = Set(DocumentExtractor, RecordAdmin)
+ )
+
+ object ProviderType extends BaseACL(
+ label = "provider type",
+ read = RepRoles + RoutesCurator + TreatmentMatchingAdmin
+ )
+
+ object DocumentType extends BaseACL(
+ label = "document type",
+ read = RepRoles + RoutesCurator + TreatmentMatchingAdmin
+ )
+
+ object Message extends BaseACL(
+ label = "message",
+ create = RepRoles ++ TreatmentMatchingRoles ++ TcRoles,
+ read = RepRoles ++ TreatmentMatchingRoles ++ TcRoles,
+ update = RepRoles ++ TreatmentMatchingRoles ++ TcRoles,
+ delete = RepRoles ++ TreatmentMatchingRoles ++ TcRoles
+ )
+
+ // TC
+
+ object Trial extends BaseACL(
+ label = "trial",
+ read = TcRoles + RoutesCurator + TreatmentMatchingAdmin,
+ update = TcRoles
+ )
+
+ object StudyDesign extends BaseACL(
+ label = "study design",
+ read = Set(TrialSummarizer, TrialAdmin)
+ )
+
+ object Hypothesis extends BaseACL(
+ label = "hypothesis",
+ read = Set(TrialSummarizer, TrialAdmin) ++ TreatmentMatchingRoles
+ )
+
+ object Criterion extends BaseACL(
+ label = "criterion",
+ create = Set(CriteriaCurator, TrialAdmin),
+ read = Set(CriteriaCurator, TrialAdmin, RoutesCurator, TreatmentMatchingAdmin),
+ update = Set(CriteriaCurator, TrialAdmin),
+ delete = Set(CriteriaCurator, TrialAdmin)
+ )
+
+ object Arm extends BaseACL(
+ label = "arm",
+ create = Set(TrialSummarizer, TrialAdmin),
+ read = TcRoles,
+ update = Set(TrialSummarizer, TrialAdmin),
+ delete = Set(TrialSummarizer, TrialAdmin)
+ )
+
+ object Category extends BaseACL(
+ label = "category",
+ read = Set(DocumentExtractor, RecordAdmin, CriteriaCurator, TrialAdmin)
+ )
+
+ object Intervention extends BaseACL(
+ label = "intervention",
+ read = Set(TrialSummarizer, TrialAdmin),
+ update = Set(TrialSummarizer, TrialAdmin)
+ )
+
+ object InterventionType extends BaseACL(
+ label = "intervention type",
+ read = Set(TrialSummarizer, TrialAdmin)
+ )
+
+ // EV
+
+ object Patient extends BaseACL(
+ label = "patient",
+ read = TreatmentMatchingRoles,
+ update = TreatmentMatchingRoles
+ )
+
+ object PatientLabel extends BaseACL(
+ label = "patient label",
+ read = TreatmentMatchingRoles,
+ update = TreatmentMatchingRoles
+ )
+
+ object PatientCriterion extends BaseACL(
+ label = "patient criterion",
+ read = TreatmentMatchingRoles,
+ update = TreatmentMatchingRoles
+ )
+
+ object PatientLabelEvidence extends BaseACL(
+ label = "patient label evidence",
+ read = TreatmentMatchingRoles
+ )
+
+ object EligibleTrial extends BaseACL(
+ label = "eligible trial",
+ read = Set(RoutesCurator, TreatmentMatchingAdmin),
+ update = Set(RoutesCurator, TreatmentMatchingAdmin)
+ )
+
+ object PatientHypothesis extends BaseACL(
+ label = "patient hypothesis",
+ read = Set(RoutesCurator, TreatmentMatchingAdmin),
+ update = Set(RoutesCurator, TreatmentMatchingAdmin)
+ )
+
+ // Utility code
+
+ abstract class BaseACL(label: String,
+ create: AclCheck = Forbid,
+ read: AclCheck = Forbid,
+ update: AclCheck = Forbid,
+ delete: AclCheck = Forbid) {
+
+ def isCreateAllow()(implicit requestContext: AuthenticatedRequestContext): Boolean = {
+ check("create", create)(requestContext.executor.role)
+ }
+
+ def isReadAllow()(implicit requestContext: AuthenticatedRequestContext): Boolean = {
+ check("read", read)(requestContext.executor.role)
+ }
+
+ def isUpdateAllow()(implicit requestContext: AuthenticatedRequestContext): Boolean = {
+ check("update", update)(requestContext.executor.role)
+ }
+
+ def isDeleteAllow()(implicit requestContext: AuthenticatedRequestContext): Boolean = {
+ check("delete", delete)(requestContext.executor.role)
+ }
+
+ private def check(action: String, isAllowed: AclCheck)(executorRole: Role): Boolean = {
+ loggedError(
+ isAllowed(executorRole),
+ phi"$executorRole has no access to ${Unsafe(action)} a ${Unsafe(label)}"
+ )
+ }
+
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/auth/AnonymousRequestContext.scala b/src/main/scala/xyz/driver/common/auth/AnonymousRequestContext.scala
new file mode 100644
index 0000000..2e4b55c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/auth/AnonymousRequestContext.scala
@@ -0,0 +1,12 @@
+package xyz.driver.common.auth
+
+class AnonymousRequestContext(val requestId: RequestId) {
+
+ override def equals(that: Any): Boolean = {
+ that.getClass == classOf[AnonymousRequestContext] &&
+ that.asInstanceOf[AnonymousRequestContext].requestId == requestId
+ }
+
+ override def hashCode(): Int = requestId.hashCode()
+
+}
diff --git a/src/main/scala/xyz/driver/common/auth/AuthenticatedRequestContext.scala b/src/main/scala/xyz/driver/common/auth/AuthenticatedRequestContext.scala
new file mode 100644
index 0000000..b211e12
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/auth/AuthenticatedRequestContext.scala
@@ -0,0 +1,32 @@
+package xyz.driver.common.auth
+
+import xyz.driver.common.logging._
+import xyz.driver.common.domain.User
+
+class AuthenticatedRequestContext(val executor: User,
+ override val requestId: RequestId) extends AnonymousRequestContext(requestId) {
+
+ override def equals(that: Any): Boolean = {
+ that.getClass == this.getClass && {
+ val another = that.asInstanceOf[AuthenticatedRequestContext]
+ another.executor == executor && another.requestId == requestId
+ }
+ }
+
+ override def hashCode(): Int = {
+ val initial = 37
+ val first = initial * 17 + executor.hashCode()
+ first * 17 + requestId.hashCode()
+ }
+
+}
+
+object AuthenticatedRequestContext {
+
+ def apply(executor: User) = new AuthenticatedRequestContext(executor, RequestId())
+
+ implicit def toPhiString(x: AuthenticatedRequestContext): PhiString = {
+ phi"AuthenticatedRequestContext(executor=${x.executor}, requestId=${x.requestId})"
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/auth/RequestId.scala b/src/main/scala/xyz/driver/common/auth/RequestId.scala
new file mode 100644
index 0000000..771145c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/auth/RequestId.scala
@@ -0,0 +1,15 @@
+package xyz.driver.common.auth
+
+import xyz.driver.common.logging._
+import xyz.driver.common.auth.RequestId._
+import xyz.driver.common.utils.RandomUtils
+
+final case class RequestId(value: String = RandomUtils.randomString(IdLength))
+
+object RequestId {
+
+ private val IdLength = 20
+
+ implicit def toPhiString(x: RequestId): PhiString = phi"RequestId(${Unsafe(x.value)})"
+
+}
diff --git a/src/main/scala/xyz/driver/common/compat/EitherOps.scala b/src/main/scala/xyz/driver/common/compat/EitherOps.scala
new file mode 100644
index 0000000..b3b45e6
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/compat/EitherOps.scala
@@ -0,0 +1,12 @@
+package xyz.driver.common.compat
+
+final class EitherOps[A, B](val self: Either[A, B]) extends AnyVal {
+
+ def map[B2](f: B => B2): Either[A, B2] = flatMap { x => Right(f(x)) }
+
+ def flatMap[B2](f: B => Either[A, B2]): Either[A, B2] = self match {
+ case Left(x) => Left(x)
+ case Right(x) => f(x)
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/compat/Implicits.scala b/src/main/scala/xyz/driver/common/compat/Implicits.scala
new file mode 100644
index 0000000..860989b
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/compat/Implicits.scala
@@ -0,0 +1,7 @@
+package xyz.driver.common.compat
+
+object Implicits {
+
+ implicit def toEitherOps[A, B](self: Either[A, B]): EitherOps[A, B] = new EitherOps(self)
+
+}
diff --git a/src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueue.scala b/src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueue.scala
new file mode 100644
index 0000000..6ecb299
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueue.scala
@@ -0,0 +1,88 @@
+package xyz.driver.common.concurrent
+
+import java.time.LocalDateTime
+
+import xyz.driver.common.concurrent.BridgeUploadQueue.Item
+import xyz.driver.common.domain.LongId
+import xyz.driver.common.logging._
+
+import scala.concurrent.Future
+
+object BridgeUploadQueue {
+
+ /**
+ * @param kind For example documents
+ * @param tag For example, a patient's id: 1
+ * @param attempts Which attempt
+ * @param created When the task was created
+ * @param nextAttempt Time of the next attempt
+ */
+ final case class Item(id: LongId[Item],
+ kind: String,
+ tag: String,
+ created: LocalDateTime,
+ attempts: Int,
+ nextAttempt: LocalDateTime,
+ completed: Boolean,
+ dependencyKind: Option[String],
+ dependencyTag: Option[String]) {
+
+ def dependency: Option[Dependency] = {
+ dependencyKind.zip(dependencyTag)
+ .headOption
+ .map(Function.tupled(Dependency.apply))
+ }
+
+ }
+
+ object Item {
+
+ implicit def toPhiString(x: Item): PhiString = {
+ import x._
+ phi"BridgeUploadQueue.Item(id=$id, kind=${Unsafe(kind)}, tag=${Unsafe(tag)}, " +
+ phi"attempts=${Unsafe(attempts)}, start=$created, nextAttempt=$nextAttempt, completed=$completed, " +
+ phi"dependency=$dependency)"
+ }
+
+ def apply(kind: String, tag: String, dependency: Option[Dependency] = None): Item = {
+ val now = LocalDateTime.now()
+
+ Item(
+ id = LongId(0),
+ kind = kind,
+ tag = tag,
+ created = now,
+ attempts = 0,
+ nextAttempt = now,
+ completed = false,
+ dependencyKind = dependency.map(_.kind),
+ dependencyTag = dependency.map(_.tag)
+ )
+ }
+
+ }
+
+ final case class Dependency(kind: String, tag: String)
+
+ object Dependency {
+
+ implicit def toPhiString(x: Dependency): PhiString = {
+ import x._
+ phi"Dependency(kind=${Unsafe(kind)}, tag=${Unsafe(tag)})"
+ }
+
+ }
+
+}
+
+trait BridgeUploadQueue {
+
+ def add(item: Item): Future[Unit]
+
+ def get(kind: String): Future[Option[Item]]
+
+ def remove(item: LongId[Item]): Future[Unit]
+
+ def tryRetry(item: Item): Future[Option[Item]]
+
+}
diff --git a/src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapter.scala b/src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapter.scala
new file mode 100644
index 0000000..c6a2144
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapter.scala
@@ -0,0 +1,136 @@
+package xyz.driver.common.concurrent
+
+import java.time.LocalDateTime
+import java.time.temporal.ChronoUnit
+
+import xyz.driver.common.concurrent.BridgeUploadQueue.Item
+import xyz.driver.common.concurrent.BridgeUploadQueueRepositoryAdapter.Strategy
+import xyz.driver.common.db.Transactions
+import xyz.driver.common.db.repositories.BridgeUploadQueueRepository
+import xyz.driver.common.domain.LongId
+import xyz.driver.common.logging._
+
+import scala.concurrent.duration.{Duration, FiniteDuration}
+import scala.concurrent.{ExecutionContext, Future}
+
+object BridgeUploadQueueRepositoryAdapter {
+
+ sealed trait Strategy {
+
+ def onComplete: Strategy.OnComplete
+
+ def on(attempt: Int): Strategy.OnAttempt
+
+ }
+
+ object Strategy {
+
+ /**
+ * Works forever, but has a limit for intervals.
+ */
+ final case class LimitExponential(startInterval: FiniteDuration,
+ intervalFactor: Double,
+ maxInterval: FiniteDuration,
+ onComplete: OnComplete) extends Strategy {
+
+ override def on(attempt: Int): OnAttempt = {
+ OnAttempt.Continue(intervalFor(attempt).min(maxInterval))
+ }
+
+ private def intervalFor(attempt: Int): Duration = {
+ startInterval * Math.pow(intervalFactor, attempt.toDouble)
+ }
+ }
+
+ /**
+ * Used only in tests.
+ */
+ case object Ignore extends Strategy {
+
+ override val onComplete = OnComplete.Delete
+
+ override def on(attempt: Int) = OnAttempt.Complete
+
+ }
+
+ /**
+ * Used only in tests.
+ */
+ final case class Constant(interval: FiniteDuration) extends Strategy {
+
+ override val onComplete = OnComplete.Delete
+
+ override def on(attempt: Int) = OnAttempt.Continue(interval)
+
+ }
+
+ sealed trait OnComplete
+ object OnComplete {
+ case object Delete extends OnComplete
+ case object Mark extends OnComplete
+
+ implicit def toPhiString(x: OnAttempt): PhiString = Unsafe(x.toString)
+ }
+
+ sealed trait OnAttempt
+ object OnAttempt {
+ case object Complete extends OnAttempt
+ case class Continue(interval: Duration) extends OnAttempt
+
+ implicit def toPhiString(x: OnAttempt): PhiString = Unsafe(x.toString)
+ }
+ }
+}
+
+class BridgeUploadQueueRepositoryAdapter(strategy: Strategy,
+ repository: BridgeUploadQueueRepository,
+ transactions: Transactions)
+ (implicit executionContext: ExecutionContext)
+ extends BridgeUploadQueue with PhiLogging {
+
+ override def add(item: Item): Future[Unit] = transactions.run { _ =>
+ repository.add(item)
+ }
+
+ override def get(kind: String): Future[Option[Item]] = {
+ repository.getOne(kind)
+ }
+
+ override def remove(item: LongId[Item]): Future[Unit] = transactions.run { _ =>
+ import Strategy.OnComplete._
+
+ strategy.onComplete match {
+ case Delete => repository.delete(item)
+ case Mark =>
+ repository.getById(item) match {
+ case Some(x) => repository.update(x.copy(completed = true))
+ case None => throw new RuntimeException(s"Can not find the $item task")
+ }
+ }
+ }
+
+ override def tryRetry(item: Item): Future[Option[Item]] = transactions.run { _ =>
+ import Strategy.OnAttempt._
+
+ logger.trace(phi"tryRetry($item)")
+
+ val newAttempts = item.attempts + 1
+ val action = strategy.on(newAttempts)
+ logger.debug(phi"Action for ${Unsafe(newAttempts)}: $action")
+
+ action match {
+ case Continue(newInterval) =>
+ val draftItem = item.copy(
+ attempts = newAttempts,
+ nextAttempt = LocalDateTime.now().plus(newInterval.toMillis, ChronoUnit.MILLIS)
+ )
+
+ logger.debug(draftItem)
+ Some(repository.update(draftItem))
+
+ case Complete =>
+ repository.delete(item.id)
+ None
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/concurrent/Cron.scala b/src/main/scala/xyz/driver/common/concurrent/Cron.scala
new file mode 100644
index 0000000..9dd3155
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/concurrent/Cron.scala
@@ -0,0 +1,97 @@
+package xyz.driver.common.concurrent
+
+import java.io.Closeable
+import java.util.concurrent.ConcurrentHashMap
+import java.util.concurrent.atomic.AtomicBoolean
+import java.util.{Timer, TimerTask}
+
+import com.typesafe.scalalogging.StrictLogging
+import org.slf4j.MDC
+import xyz.driver.common.error.ExceptionFormatter
+import xyz.driver.common.utils.RandomUtils
+
+import scala.concurrent.duration.FiniteDuration
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.{Failure, Success, Try}
+
+class Cron(settings: Cron.Settings) extends Closeable with StrictLogging {
+
+ import Cron._
+
+ private val timer = new Timer("cronTimer", true)
+
+ private val jobs = ConcurrentHashMap.newKeySet[String]()
+
+ def register(name: String)(job: () => Future[Unit])(implicit ec: ExecutionContext): Unit = {
+ logger.trace("register({})", name)
+ val disableList = settings.disable.split(",").map(_.trim).toList
+ if (disableList.contains(name)) logger.info("The task '{}' is disabled", name)
+ else {
+ settings.intervals.get(name) match {
+ case None =>
+ logger.error("Can not find an interval for task '{}', check the settings", name)
+ throw new IllegalArgumentException(s"Can not find an interval for task '$name', check the settings")
+
+ case Some(period) =>
+ logger.info("register a new task '{}' with a period of {}ms", name, period.toMillis.asInstanceOf[AnyRef])
+ timer.schedule(new SingletonTask(name, job), 0, period.toMillis)
+ }
+ }
+
+ jobs.add(name)
+ }
+
+ /**
+ * Checks unused jobs
+ */
+ def verify(): Unit = {
+ import scala.collection.JavaConversions.asScalaSet
+
+ val unusedJobs = settings.intervals.keySet -- jobs.toSet
+ unusedJobs.foreach { job =>
+ logger.warn(s"The job '$job' is listed, but not registered or ignored")
+ }
+ }
+
+ override def close(): Unit = {
+ timer.cancel()
+ }
+
+}
+
+object Cron {
+
+ case class Settings(disable: String, intervals: Map[String, FiniteDuration])
+
+ private class SingletonTask(taskName: String,
+ job: () => Future[Unit])
+ (implicit ec: ExecutionContext)
+ extends TimerTask with StrictLogging {
+
+ private val isWorking = new AtomicBoolean(false)
+
+ override def run(): Unit = {
+ if (isWorking.compareAndSet(false, true)) {
+ MDC.put("userId", "cron")
+ MDC.put("requestId", RandomUtils.randomString(15))
+
+ logger.info("Start '{}'", taskName)
+ Try {
+ job()
+ .andThen {
+ case Success(_) => logger.info("'{}' is completed", taskName)
+ case Failure(e) => logger.error(s"Job '{}' is failed: ${ExceptionFormatter.format(e)}", taskName)
+ }
+ .onComplete(_ => isWorking.set(false))
+ } match {
+ case Success(_) =>
+ case Failure(e) =>
+ logger.error("Can't start '{}'", taskName, e)
+ }
+ } else {
+ logger.debug("The previous job '{}' is in progress", taskName)
+ }
+ }
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/concurrent/InMemoryBridgeUploadQueue.scala b/src/main/scala/xyz/driver/common/concurrent/InMemoryBridgeUploadQueue.scala
new file mode 100644
index 0000000..b19be42
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/concurrent/InMemoryBridgeUploadQueue.scala
@@ -0,0 +1,38 @@
+package xyz.driver.common.concurrent
+
+import java.util.concurrent.LinkedBlockingQueue
+
+import xyz.driver.common.concurrent.BridgeUploadQueue.Item
+import xyz.driver.common.domain.LongId
+import xyz.driver.common.logging.PhiLogging
+
+import scala.collection.JavaConverters._
+import scala.concurrent.Future
+
+/**
+ * Use it only for tests
+ */
+class InMemoryBridgeUploadQueue extends BridgeUploadQueue with PhiLogging {
+
+ private val queue = new LinkedBlockingQueue[Item]()
+
+ override def add(item: Item): Future[Unit] = {
+ queue.add(item)
+ done
+ }
+
+ override def tryRetry(item: Item): Future[Option[Item]] = Future.successful(Some(item))
+
+ override def get(kind: String): Future[Option[Item]] = {
+ val r = queue.iterator().asScala.find(_.kind == kind)
+ Future.successful(r)
+ }
+
+ override def remove(item: LongId[Item]): Future[Unit] = {
+ queue.remove(item)
+ done
+ }
+
+ private val done = Future.successful(())
+
+}
diff --git a/src/main/scala/xyz/driver/common/concurrent/MdcExecutionContext.scala b/src/main/scala/xyz/driver/common/concurrent/MdcExecutionContext.scala
new file mode 100644
index 0000000..cd2b394
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/concurrent/MdcExecutionContext.scala
@@ -0,0 +1,35 @@
+package xyz.driver.common.concurrent
+
+import org.slf4j.MDC
+
+import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
+
+object MdcExecutionContext {
+ def from(orig: ExecutionContext): ExecutionContext = new MdcExecutionContext(orig)
+}
+
+class MdcExecutionContext(orig: ExecutionContext) extends ExecutionContextExecutor {
+
+ def execute(runnable: Runnable): Unit = {
+ val parentMdcContext = MDC.getCopyOfContextMap
+
+ orig.execute(new Runnable {
+ def run(): Unit = {
+ val saveMdcContext = MDC.getCopyOfContextMap
+ setContextMap(parentMdcContext)
+
+ try {
+ runnable.run()
+ } finally {
+ setContextMap(saveMdcContext)
+ }
+ }
+ })
+ }
+
+ private[this] def setContextMap(context: java.util.Map[String, String]): Unit =
+ Option(context).fold(MDC.clear())(MDC.setContextMap)
+
+ def reportFailure(t: Throwable): Unit = orig.reportFailure(t)
+
+}
diff --git a/src/main/scala/xyz/driver/common/concurrent/MdcThreadFactory.scala b/src/main/scala/xyz/driver/common/concurrent/MdcThreadFactory.scala
new file mode 100644
index 0000000..9e59a64
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/concurrent/MdcThreadFactory.scala
@@ -0,0 +1,33 @@
+package xyz.driver.common.concurrent
+
+import java.util.concurrent.ThreadFactory
+
+import org.slf4j.MDC
+
+object MdcThreadFactory {
+ def from(orig: ThreadFactory): ThreadFactory = new MdcThreadFactory(orig)
+}
+
+class MdcThreadFactory(orig: ThreadFactory) extends ThreadFactory {
+
+ override def newThread(runnable: Runnable): Thread = {
+ val parentMdcContext = MDC.getCopyOfContextMap
+
+ orig.newThread(new Runnable {
+ def run(): Unit = {
+ val saveMdcContext = MDC.getCopyOfContextMap
+ setContextMap(parentMdcContext)
+
+ try {
+ runnable.run()
+ } finally {
+ setContextMap(saveMdcContext)
+ }
+ }
+ })
+ }
+
+ private[this] def setContextMap(context: java.util.Map[String, String]): Unit =
+ Option(context).fold(MDC.clear())(MDC.setContextMap)
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/DbCommand.scala b/src/main/scala/xyz/driver/common/db/DbCommand.scala
new file mode 100644
index 0000000..fec8b9f
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/DbCommand.scala
@@ -0,0 +1,15 @@
+package xyz.driver.common.db
+
+import scala.concurrent.Future
+
+trait DbCommand {
+ def runSync(): Unit
+ def runAsync(transactions: Transactions): Future[Unit]
+}
+
+object DbCommand {
+ val Empty: DbCommand = new DbCommand {
+ override def runSync(): Unit = {}
+ override def runAsync(transactions: Transactions): Future[Unit] = Future.successful(())
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/db/DbCommandFactory.scala b/src/main/scala/xyz/driver/common/db/DbCommandFactory.scala
new file mode 100644
index 0000000..84c1383
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/DbCommandFactory.scala
@@ -0,0 +1,14 @@
+package xyz.driver.common.db
+
+import scala.concurrent.{ExecutionContext, Future}
+
+trait DbCommandFactory[T] {
+ def createCommand(orig: T)(implicit ec: ExecutionContext): Future[DbCommand]
+}
+
+object DbCommandFactory {
+ def empty[T]: DbCommandFactory[T] = new DbCommandFactory[T] {
+ override def createCommand(orig: T)(implicit ec: ExecutionContext): Future[DbCommand] = Future.successful(DbCommand.Empty)
+ }
+}
+
diff --git a/src/main/scala/xyz/driver/common/db/EntityExtractorDerivation.scala b/src/main/scala/xyz/driver/common/db/EntityExtractorDerivation.scala
new file mode 100644
index 0000000..0396ea5
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/EntityExtractorDerivation.scala
@@ -0,0 +1,71 @@
+package xyz.driver.common.db
+
+import java.sql.ResultSet
+
+import io.getquill.NamingStrategy
+import io.getquill.dsl.EncodingDsl
+
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox
+
+trait EntityExtractorDerivation[Naming <: NamingStrategy] {
+ this: EncodingDsl =>
+
+ /**
+ * Simple Quill extractor derivation for [[T]]
+ * Only case classes available. Type parameters is not supported
+ *
+ * @tparam T
+ * @return
+ */
+ def entityExtractor[T]: (ResultSet => T) = macro EntityExtractorDerivation.impl[T]
+}
+
+object EntityExtractorDerivation {
+ def impl[T: c.WeakTypeTag](c: blackbox.Context): c.Tree = {
+ import c.universe._
+ val namingStrategy = c.prefix.actualType
+ .baseType(c.weakTypeOf[EntityExtractorDerivation[NamingStrategy]].typeSymbol)
+ .typeArgs
+ .head
+ .typeSymbol
+ .companion
+ val functionBody = {
+ val tpe = weakTypeOf[T]
+ val resultOpt = tpe.decls.collectFirst {
+ // Find first constructor of T
+ case cons: MethodSymbol if cons.isConstructor =>
+ // Create param list for constructor
+ val params = cons.paramLists.flatten.map { param =>
+ val t = param.typeSignature
+ val paramName = param.name.toString
+ val col = q"$namingStrategy.column($paramName)"
+ // Resolve implicit decoders (from SqlContext) and apply ResultSet for each
+ val d = q"implicitly[${c.prefix}.Decoder[$t]]"
+ // Minus 1 cause Quill JDBC decoders make plus one.
+ // ¯\_(ツ)_/¯
+ val i = q"row.findColumn($col) - 1"
+ val decoderName = TermName(paramName + "Decoder")
+ val valueName = TermName(paramName + "Value")
+ (
+ q"val $decoderName = $d",
+ q"val $valueName = $decoderName($i, row)",
+ valueName
+ )
+ }
+ // Call constructor with param list
+ q"""
+ ..${params.map(_._1)}
+ ..${params.map(_._2)}
+ new $tpe(..${params.map(_._3)})
+ """
+ }
+ resultOpt match {
+ case Some(result) => result
+ case None => c.abort(c.enclosingPosition,
+ s"Can not derive extractor for $tpe. Constructor not found.")
+ }
+ }
+ q"(row: java.sql.ResultSet) => $functionBody"
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/db/EntityNotFoundException.scala b/src/main/scala/xyz/driver/common/db/EntityNotFoundException.scala
new file mode 100644
index 0000000..d4c11ac
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/EntityNotFoundException.scala
@@ -0,0 +1,10 @@
+package xyz.driver.common.db
+
+import xyz.driver.common.domain.Id
+
+class EntityNotFoundException private(id: String, tableName: String)
+ extends RuntimeException(s"Entity with id $id is not found in $tableName table") {
+
+ def this(id: Id[_], tableName: String) = this(id.toString, tableName)
+ def this(id: Long, tableName: String) = this(id.toString, tableName)
+}
diff --git a/src/main/scala/xyz/driver/common/db/MysqlQueryBuilder.scala b/src/main/scala/xyz/driver/common/db/MysqlQueryBuilder.scala
new file mode 100644
index 0000000..d6b53d9
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/MysqlQueryBuilder.scala
@@ -0,0 +1,90 @@
+package xyz.driver.common.db
+
+import java.sql.ResultSet
+
+import io.getquill.{MySQLDialect, MysqlEscape}
+
+import scala.collection.breakOut
+import scala.concurrent.{ExecutionContext, Future}
+
+object MysqlQueryBuilder {
+ import xyz.driver.common.db.QueryBuilder._
+
+ def apply[T](tableName: String,
+ lastUpdateFieldName: Option[String],
+ nullableFields: Set[String],
+ links: Set[TableLink],
+ runner: Runner[T],
+ countRunner: CountRunner)
+ (implicit ec: ExecutionContext): MysqlQueryBuilder[T] = {
+ val parameters = MysqlQueryBuilderParameters(
+ tableData = TableData(tableName, lastUpdateFieldName, nullableFields),
+ links = links.map(x => x.foreignTableName -> x)(breakOut)
+ )
+ new MysqlQueryBuilder[T](parameters)(runner, countRunner, ec)
+ }
+
+ def apply[T](tableName: String,
+ lastUpdateFieldName: Option[String],
+ nullableFields: Set[String],
+ links: Set[TableLink],
+ extractor: (ResultSet) => T)
+ (implicit sqlContext: SqlContext): MysqlQueryBuilder[T] = {
+
+ val runner = (parameters: QueryBuilderParameters) => {
+ Future {
+ val (sql, binder) = parameters.toSql(namingStrategy = MysqlEscape)
+ sqlContext.executeQuery[T](sql, binder, { resultSet =>
+ extractor(resultSet)
+ })
+ }(sqlContext.executionContext)
+ }
+
+ val countRunner = (parameters: QueryBuilderParameters) => {
+ Future {
+ val (sql, binder) = parameters.toSql(countQuery = true, namingStrategy = MysqlEscape)
+ sqlContext.executeQuery[CountResult](sql, binder, { resultSet =>
+ val count = resultSet.getInt(1)
+ val lastUpdate = if (parameters.tableData.lastUpdateFieldName.isDefined) {
+ Option(sqlContext.localDateTimeDecoder.decoder(2, resultSet))
+ } else None
+
+ (count, lastUpdate)
+ }).head
+ }(sqlContext.executionContext)
+ }
+
+ apply[T](
+ tableName = tableName,
+ lastUpdateFieldName = lastUpdateFieldName,
+ nullableFields = nullableFields,
+ links = links,
+ runner = runner,
+ countRunner = countRunner
+ )(sqlContext.executionContext)
+ }
+}
+
+class MysqlQueryBuilder[T](parameters: MysqlQueryBuilderParameters)
+ (implicit runner: QueryBuilder.Runner[T],
+ countRunner: QueryBuilder.CountRunner,
+ ec: ExecutionContext)
+ extends QueryBuilder[T, MySQLDialect, MysqlEscape](parameters) {
+
+ def withFilter(newFilter: SearchFilterExpr): QueryBuilder[T, MySQLDialect, MysqlEscape] = {
+ new MysqlQueryBuilder[T](parameters.copy(filter = newFilter))
+ }
+
+ def withSorting(newSorting: Sorting): QueryBuilder[T, MySQLDialect, MysqlEscape] = {
+ new MysqlQueryBuilder[T](parameters.copy(sorting = newSorting))
+ }
+
+ def withPagination(newPagination: Pagination): QueryBuilder[T, MySQLDialect, MysqlEscape] = {
+ new MysqlQueryBuilder[T](parameters.copy(pagination = Some(newPagination)))
+ }
+
+ def resetPagination: QueryBuilder[T, MySQLDialect, MysqlEscape] = {
+ new MysqlQueryBuilder[T](parameters.copy(pagination = None))
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/Pagination.scala b/src/main/scala/xyz/driver/common/db/Pagination.scala
new file mode 100644
index 0000000..d4a96d3
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/Pagination.scala
@@ -0,0 +1,20 @@
+package xyz.driver.common.db
+
+import xyz.driver.common.logging._
+
+/**
+ * @param pageNumber Starts with 1
+ */
+case class Pagination(pageSize: Int, pageNumber: Int)
+
+object Pagination {
+
+ // @see https://driverinc.atlassian.net/wiki/display/RA/REST+API+Specification#RESTAPISpecification-CommonRequestQueryParametersForWebServices
+ val Default = Pagination(pageSize = 100, pageNumber = 1)
+
+ implicit def toPhiString(x: Pagination): PhiString = {
+ import x._
+ phi"Pagination(pageSize=${Unsafe(pageSize)}, pageNumber=${Unsafe(pageNumber)})"
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/QueryBuilder.scala b/src/main/scala/xyz/driver/common/db/QueryBuilder.scala
new file mode 100644
index 0000000..f0beca6
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/QueryBuilder.scala
@@ -0,0 +1,344 @@
+package xyz.driver.common.db
+
+import java.sql.PreparedStatement
+import java.time.LocalDateTime
+
+import io.getquill.NamingStrategy
+import io.getquill.context.sql.idiom.SqlIdiom
+import xyz.driver.common.db.Sorting.{Dimension, Sequential}
+import xyz.driver.common.db.SortingOrder.{Ascending, Descending}
+
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.{ExecutionContext, Future}
+
+object QueryBuilder {
+
+ type Runner[T] = (QueryBuilderParameters) => Future[Seq[T]]
+
+ type CountResult = (Int, Option[LocalDateTime])
+
+ type CountRunner = (QueryBuilderParameters) => Future[CountResult]
+
+ /**
+ * Binder for PreparedStatement
+ */
+ type Binder = PreparedStatement => PreparedStatement
+
+ case class TableData(tableName: String,
+ lastUpdateFieldName: Option[String] = None,
+ nullableFields: Set[String] = Set.empty)
+
+ val AllFields = Set("*")
+
+}
+
+case class TableLink(keyColumnName: String,
+ foreignTableName: String,
+ foreignKeyColumnName: String)
+
+object QueryBuilderParameters {
+ val AllFields = Set("*")
+}
+
+sealed trait QueryBuilderParameters {
+
+ def tableData: QueryBuilder.TableData
+ def links: Map[String, TableLink]
+ def filter: SearchFilterExpr
+ def sorting: Sorting
+ def pagination: Option[Pagination]
+
+ def findLink(tableName: String): TableLink = links.get(tableName) match {
+ case None => throw new IllegalArgumentException(s"Cannot find a link for `$tableName`")
+ case Some(link) => link
+ }
+
+ def toSql(countQuery: Boolean = false, namingStrategy: NamingStrategy): (String, QueryBuilder.Binder) = {
+ toSql(countQuery, QueryBuilderParameters.AllFields, namingStrategy)
+ }
+
+ def toSql(countQuery: Boolean,
+ fields: Set[String],
+ namingStrategy: NamingStrategy): (String, QueryBuilder.Binder) = {
+ val escapedTableName = namingStrategy.table(tableData.tableName)
+ val fieldsSql: String = if (countQuery) {
+ "count(*)" + (tableData.lastUpdateFieldName match {
+ case Some(lastUpdateField) => s", max($escapedTableName.${namingStrategy.column(lastUpdateField)})"
+ case None => ""
+ })
+ } else {
+ if (fields == QueryBuilderParameters.AllFields) {
+ s"$escapedTableName.*"
+ } else {
+ fields
+ .map { field =>
+ s"$escapedTableName.${namingStrategy.column(field)}"
+ }
+ .mkString(", ")
+ }
+ }
+ val (where, bindings) = filterToSql(escapedTableName, filter, namingStrategy)
+ val orderBy = sortingToSql(escapedTableName, sorting, namingStrategy)
+
+ val limitSql = limitToSql()
+
+ val sql = new StringBuilder()
+ sql.append("select ")
+ sql.append(fieldsSql)
+ sql.append("\nfrom ")
+ sql.append(escapedTableName)
+
+ val filtersTableLinks: Seq[TableLink] = {
+ import SearchFilterExpr._
+ def aux(expr: SearchFilterExpr): Seq[TableLink] = expr match {
+ case Atom.TableName(tableName) => List(findLink(tableName))
+ case Intersection(xs) => xs.flatMap(aux)
+ case Union(xs) => xs.flatMap(aux)
+ case _ => Nil
+ }
+ aux(filter)
+ }
+
+ val sortingTableLinks: Seq[TableLink] = Sorting.collect(sorting) {
+ case Dimension(Some(foreignTableName), _, _) => findLink(foreignTableName)
+ }
+
+ // Combine links from sorting and filter without duplicates
+ val foreignTableLinks = (filtersTableLinks ++ sortingTableLinks).distinct
+
+ foreignTableLinks.foreach {
+ case TableLink(keyColumnName, foreignTableName, foreignKeyColumnName) =>
+ val escapedForeignTableName = namingStrategy.table(foreignTableName)
+
+ sql.append("\ninner join ")
+ sql.append(escapedForeignTableName)
+ sql.append(" on ")
+
+ sql.append(escapedTableName)
+ sql.append('.')
+ sql.append(namingStrategy.column(keyColumnName))
+
+ sql.append(" = ")
+
+ sql.append(escapedForeignTableName)
+ sql.append('.')
+ sql.append(namingStrategy.column(foreignKeyColumnName))
+ }
+
+ if (where.nonEmpty) {
+ sql.append("\nwhere ")
+ sql.append(where)
+ }
+
+ if (orderBy.nonEmpty && !countQuery) {
+ sql.append("\norder by ")
+ sql.append(orderBy)
+ }
+
+ if (limitSql.nonEmpty && !countQuery) {
+ sql.append("\n")
+ sql.append(limitSql)
+ }
+
+ (sql.toString, binder(bindings))
+ }
+
+ /**
+ * Converts filter expression to SQL expression.
+ *
+ * @return Returns SQL string and list of values for binding in prepared statement.
+ */
+ protected def filterToSql(escapedTableName: String,
+ filter: SearchFilterExpr,
+ namingStrategy: NamingStrategy): (String, List[AnyRef]) = {
+ import SearchFilterBinaryOperation._
+ import SearchFilterExpr._
+
+ def isNull(string: AnyRef) = Option(string).isEmpty || string.toString.toLowerCase == "null"
+
+ def placeholder(field: String) = "?"
+
+ def escapeDimension(dimension: SearchFilterExpr.Dimension) = {
+ val tableName = dimension.tableName.fold(escapedTableName)(namingStrategy.table)
+ s"$tableName.${namingStrategy.column(dimension.name)}"
+ }
+
+ def filterToSqlMultiple(operands: Seq[SearchFilterExpr]) = operands.collect {
+ case x if !SearchFilterExpr.isEmpty(x) => filterToSql(escapedTableName, x, namingStrategy)
+ }
+
+ filter match {
+ case x if isEmpty(x) =>
+ ("", List.empty)
+
+ case AllowAll =>
+ ("1", List.empty)
+
+ case DenyAll =>
+ ("0", List.empty)
+
+ case Atom.Binary(dimension, Eq, value) if isNull(value) =>
+ (s"${escapeDimension(dimension)} is NULL", List.empty)
+
+ case Atom.Binary(dimension, NotEq, value) if isNull(value) =>
+ (s"${escapeDimension(dimension)} is not NULL", List.empty)
+
+ case Atom.Binary(dimension, NotEq, value) if tableData.nullableFields.contains(dimension.name) =>
+ // In MySQL NULL <> Any === NULL
+ // So, to handle NotEq for nullable fields we need to use more complex SQL expression.
+ // http://dev.mysql.com/doc/refman/5.7/en/working-with-null.html
+ val escapedColumn = escapeDimension(dimension)
+ val sql = s"($escapedColumn is null or $escapedColumn != ${placeholder(dimension.name)})"
+ (sql, List(value))
+
+ case Atom.Binary(dimension, op, value) =>
+ val operator = op match {
+ case Eq => "="
+ case NotEq => "!="
+ case Like => "like"
+ case Gt => ">"
+ case GtEq => ">="
+ case Lt => "<"
+ case LtEq => "<="
+ }
+ (s"${escapeDimension(dimension)} $operator ${placeholder(dimension.name)}", List(value))
+
+ case Atom.NAry(dimension, op, values) =>
+ val sqlOp = op match {
+ case SearchFilterNAryOperation.In => "in"
+ case SearchFilterNAryOperation.NotIn => "not in"
+ }
+
+ val bindings = ListBuffer[AnyRef]()
+ val sqlPlaceholder = placeholder(dimension.name)
+ val formattedValues = values.map { value =>
+ bindings += value
+ sqlPlaceholder
+ }.mkString(", ")
+ (s"${escapeDimension(dimension)} $sqlOp ($formattedValues)", bindings.toList)
+
+ case Intersection(operands) =>
+ val (sql, bindings) = filterToSqlMultiple(operands).unzip
+ (sql.mkString("(", " and ", ")"), bindings.flatten.toList)
+
+ case Union(operands) =>
+ val (sql, bindings) = filterToSqlMultiple(operands).unzip
+ (sql.mkString("(", " or ", ")"), bindings.flatten.toList)
+ }
+ }
+
+ protected def limitToSql(): String
+
+ /**
+ * @param escapedMainTableName Should be escaped
+ */
+ protected def sortingToSql(escapedMainTableName: String,
+ sorting: Sorting,
+ namingStrategy: NamingStrategy): String = {
+ sorting match {
+ case Dimension(optSortingTableName, field, order) =>
+ val sortingTableName = optSortingTableName.map(namingStrategy.table).getOrElse(escapedMainTableName)
+ val fullName = s"$sortingTableName.${namingStrategy.column(field)}"
+
+ s"$fullName ${orderToSql(order)}"
+
+ case Sequential(xs) =>
+ xs.map(sortingToSql(escapedMainTableName, _, namingStrategy)).mkString(", ")
+ }
+ }
+
+ protected def orderToSql(x: SortingOrder): String = x match {
+ case Ascending => "asc"
+ case Descending => "desc"
+ }
+
+ protected def binder(bindings: List[AnyRef])
+ (bind: PreparedStatement): PreparedStatement = {
+ bindings.zipWithIndex.foreach { case (binding, index) =>
+ bind.setObject(index + 1, binding)
+ }
+
+ bind
+ }
+
+}
+
+case class PostgresQueryBuilderParameters(tableData: QueryBuilder.TableData,
+ links: Map[String, TableLink] = Map.empty,
+ filter: SearchFilterExpr = SearchFilterExpr.Empty,
+ sorting: Sorting = Sorting.Empty,
+ pagination: Option[Pagination] = None) extends QueryBuilderParameters {
+
+ def limitToSql(): String = {
+ pagination.map { pagination =>
+ val startFrom = (pagination.pageNumber - 1) * pagination.pageSize
+ s"limit ${pagination.pageSize} OFFSET $startFrom"
+ } getOrElse ""
+ }
+
+}
+
+/**
+ * @param links Links to another tables grouped by foreignTableName
+ */
+case class MysqlQueryBuilderParameters(tableData: QueryBuilder.TableData,
+ links: Map[String, TableLink] = Map.empty,
+ filter: SearchFilterExpr = SearchFilterExpr.Empty,
+ sorting: Sorting = Sorting.Empty,
+ pagination: Option[Pagination] = None) extends QueryBuilderParameters {
+
+ def limitToSql(): String = pagination.map { pagination =>
+ val startFrom = (pagination.pageNumber - 1) * pagination.pageSize
+ s"limit $startFrom, ${pagination.pageSize}"
+ }.getOrElse("")
+
+}
+
+abstract class QueryBuilder[T, D <: SqlIdiom, N <: NamingStrategy](val parameters: QueryBuilderParameters)
+ (implicit runner: QueryBuilder.Runner[T],
+ countRunner: QueryBuilder.CountRunner,
+ ec: ExecutionContext) {
+
+ def run: Future[Seq[T]] = runner(parameters)
+
+ def runCount: Future[QueryBuilder.CountResult] = countRunner(parameters)
+
+ /**
+ * Runs the query and returns total found rows without considering of pagination.
+ */
+ def runWithCount: Future[(Seq[T], Int, Option[LocalDateTime])] = {
+ val countFuture = runCount
+ val selectAllFuture = run
+ for {
+ (total, lastUpdate) <- countFuture
+ all <- selectAllFuture
+ } yield (all, total, lastUpdate)
+ }
+
+ def withFilter(newFilter: SearchFilterExpr): QueryBuilder[T, D, N]
+
+ def withFilter(filter: Option[SearchFilterExpr]): QueryBuilder[T, D, N] = {
+ filter.fold(this)(withFilter)
+ }
+
+ def resetFilter: QueryBuilder[T, D, N] = withFilter(SearchFilterExpr.Empty)
+
+
+ def withSorting(newSorting: Sorting): QueryBuilder[T, D, N]
+
+ def withSorting(sorting: Option[Sorting]): QueryBuilder[T, D, N] = {
+ sorting.fold(this)(withSorting)
+ }
+
+ def resetSorting: QueryBuilder[T, D, N] = withSorting(Sorting.Empty)
+
+
+ def withPagination(newPagination: Pagination): QueryBuilder[T, D, N]
+
+ def withPagination(pagination: Option[Pagination]): QueryBuilder[T, D, N] = {
+ pagination.fold(this)(withPagination)
+ }
+
+ def resetPagination: QueryBuilder[T, D, N]
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/SearchFilterExpr.scala b/src/main/scala/xyz/driver/common/db/SearchFilterExpr.scala
new file mode 100644
index 0000000..06b21cd
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/SearchFilterExpr.scala
@@ -0,0 +1,210 @@
+package xyz.driver.common.db
+
+import xyz.driver.common.logging._
+
+sealed trait SearchFilterExpr {
+ def find(p: SearchFilterExpr => Boolean): Option[SearchFilterExpr]
+ def replace(f: PartialFunction[SearchFilterExpr, SearchFilterExpr]): SearchFilterExpr
+}
+
+object SearchFilterExpr {
+
+ val Empty = Intersection.Empty
+ val Forbid = Atom.Binary(
+ dimension = Dimension(None, "true"),
+ op = SearchFilterBinaryOperation.Eq,
+ value = "false"
+ )
+
+ case class Dimension(tableName: Option[String], name: String) {
+ def isForeign: Boolean = tableName.isDefined
+ }
+
+ sealed trait Atom extends SearchFilterExpr {
+ override def find(p: SearchFilterExpr => Boolean): Option[SearchFilterExpr] = {
+ if (p(this)) Some(this)
+ else None
+ }
+
+ override def replace(f: PartialFunction[SearchFilterExpr, SearchFilterExpr]): SearchFilterExpr = {
+ if (f.isDefinedAt(this)) f(this)
+ else this
+ }
+ }
+
+ object Atom {
+ case class Binary(dimension: Dimension, op: SearchFilterBinaryOperation, value: AnyRef) extends Atom
+ object Binary {
+ def apply(field: String, op: SearchFilterBinaryOperation, value: AnyRef): Binary =
+ Binary(Dimension(None, field), op, value)
+ }
+
+ case class NAry(dimension: Dimension, op: SearchFilterNAryOperation, values: Seq[AnyRef]) extends Atom
+ object NAry {
+ def apply(field: String, op: SearchFilterNAryOperation, values: Seq[AnyRef]): NAry =
+ NAry(Dimension(None, field), op, values)
+ }
+
+ /** dimension.tableName extractor */
+ object TableName {
+ def unapply(value: Atom): Option[String] = value match {
+ case Binary(Dimension(tableNameOpt, _), _, _) => tableNameOpt
+ case NAry(Dimension(tableNameOpt, _), _, _) => tableNameOpt
+ }
+ }
+ }
+
+ case class Intersection private(operands: Seq[SearchFilterExpr])
+ extends SearchFilterExpr with SearchFilterExprSeqOps {
+
+ override def replace(f: PartialFunction[SearchFilterExpr, SearchFilterExpr]): SearchFilterExpr = {
+ if (f.isDefinedAt(this)) f(this)
+ else {
+ this.copy(operands.map(_.replace(f)))
+ }
+ }
+
+ }
+
+ object Intersection {
+
+ val Empty = Intersection(Seq())
+
+ def create(operands: SearchFilterExpr*): SearchFilterExpr = {
+ val filtered = operands.filterNot(SearchFilterExpr.isEmpty)
+ filtered.size match {
+ case 0 => Empty
+ case 1 => filtered.head
+ case _ => Intersection(filtered)
+ }
+ }
+ }
+
+
+ case class Union private(operands: Seq[SearchFilterExpr]) extends SearchFilterExpr with SearchFilterExprSeqOps {
+
+ override def replace(f: PartialFunction[SearchFilterExpr, SearchFilterExpr]): SearchFilterExpr = {
+ if (f.isDefinedAt(this)) f(this)
+ else {
+ this.copy(operands.map(_.replace(f)))
+ }
+ }
+
+ }
+
+ object Union {
+
+ val Empty = Union(Seq())
+
+ def create(operands: SearchFilterExpr*): SearchFilterExpr = {
+ val filtered = operands.filterNot(SearchFilterExpr.isEmpty)
+ filtered.size match {
+ case 0 => Empty
+ case 1 => filtered.head
+ case _ => Union(filtered)
+ }
+ }
+
+ def create(dimension: Dimension, values: String*): SearchFilterExpr = values.size match {
+ case 0 => SearchFilterExpr.Empty
+ case 1 => SearchFilterExpr.Atom.Binary(dimension, SearchFilterBinaryOperation.Eq, values.head)
+ case _ =>
+ val filters = values.map { value =>
+ SearchFilterExpr.Atom.Binary(dimension, SearchFilterBinaryOperation.Eq, value)
+ }
+
+ create(filters: _*)
+ }
+
+ def create(dimension: Dimension, values: Set[String]): SearchFilterExpr =
+ create(dimension, values.toSeq: _*)
+
+ // Backwards compatible API
+
+ /** Create SearchFilterExpr with empty tableName */
+ def create(field: String, values: String*): SearchFilterExpr =
+ create(Dimension(None, field), values:_*)
+
+ /** Create SearchFilterExpr with empty tableName */
+ def create(field: String, values: Set[String]): SearchFilterExpr =
+ create(Dimension(None, field), values)
+ }
+
+
+ case object AllowAll extends SearchFilterExpr {
+ override def find(p: SearchFilterExpr => Boolean): Option[SearchFilterExpr] = {
+ if (p(this)) Some(this)
+ else None
+ }
+
+ override def replace(f: PartialFunction[SearchFilterExpr, SearchFilterExpr]): SearchFilterExpr = {
+ if (f.isDefinedAt(this)) f(this)
+ else this
+ }
+ }
+
+ case object DenyAll extends SearchFilterExpr {
+ override def find(p: SearchFilterExpr => Boolean): Option[SearchFilterExpr] = {
+ if (p(this)) Some(this)
+ else None
+ }
+
+ override def replace(f: PartialFunction[SearchFilterExpr, SearchFilterExpr]): SearchFilterExpr = {
+ if (f.isDefinedAt(this)) f(this)
+ else this
+ }
+ }
+
+ def isEmpty(expr: SearchFilterExpr): Boolean = {
+ expr == Intersection.Empty || expr == Union.Empty
+ }
+
+ sealed trait SearchFilterExprSeqOps {
+ this: SearchFilterExpr =>
+
+ val operands: Seq[SearchFilterExpr]
+
+ override def find(p: SearchFilterExpr => Boolean): Option[SearchFilterExpr] = {
+ if (p(this)) Some(this)
+ else {
+ // Search the first expr among operands, which satisfy p
+ // Is's ok to use foldLeft. If there will be performance issues, replace it by recursive loop
+ operands.foldLeft(Option.empty[SearchFilterExpr]) {
+ case (None, expr) => expr.find(p)
+ case (x, _) => x
+ }
+ }
+ }
+
+ }
+
+ // There is no case, when this is unsafe. At this time.
+ implicit def toPhiString(x: SearchFilterExpr): PhiString = {
+ if (isEmpty(x)) Unsafe("SearchFilterExpr.Empty")
+ else Unsafe(x.toString)
+ }
+
+}
+
+sealed trait SearchFilterBinaryOperation
+
+object SearchFilterBinaryOperation {
+
+ case object Eq extends SearchFilterBinaryOperation
+ case object NotEq extends SearchFilterBinaryOperation
+ case object Like extends SearchFilterBinaryOperation
+ case object Gt extends SearchFilterBinaryOperation
+ case object GtEq extends SearchFilterBinaryOperation
+ case object Lt extends SearchFilterBinaryOperation
+ case object LtEq extends SearchFilterBinaryOperation
+
+}
+
+sealed trait SearchFilterNAryOperation
+
+object SearchFilterNAryOperation {
+
+ case object In extends SearchFilterNAryOperation
+ case object NotIn extends SearchFilterNAryOperation
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/Sorting.scala b/src/main/scala/xyz/driver/common/db/Sorting.scala
new file mode 100644
index 0000000..70c25f2
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/Sorting.scala
@@ -0,0 +1,62 @@
+package xyz.driver.common.db
+
+import xyz.driver.common.logging._
+
+import scala.collection.generic.CanBuildFrom
+
+sealed trait SortingOrder
+object SortingOrder {
+
+ case object Ascending extends SortingOrder
+ case object Descending extends SortingOrder
+
+}
+
+sealed trait Sorting
+
+object Sorting {
+
+ val Empty = Sequential(Seq.empty)
+
+ /**
+ * @param tableName None if the table is default (same)
+ * @param name Dimension name
+ * @param order Order
+ */
+ case class Dimension(tableName: Option[String], name: String, order: SortingOrder) extends Sorting {
+ def isForeign: Boolean = tableName.isDefined
+ }
+
+ case class Sequential(sorting: Seq[Dimension]) extends Sorting {
+ override def toString: String = if (isEmpty(this)) "Empty" else super.toString
+ }
+
+ def isEmpty(input: Sorting): Boolean = {
+ input match {
+ case Sequential(Seq()) => true
+ case _ => false
+ }
+ }
+
+ def filter(sorting: Sorting, p: Dimension => Boolean): Seq[Dimension] = sorting match {
+ case x: Dimension if p(x) => Seq(x)
+ case x: Dimension => Seq.empty
+ case Sequential(xs) => xs.filter(p)
+ }
+
+ def collect[B, That](sorting: Sorting)
+ (f: PartialFunction[Dimension, B])
+ (implicit bf: CanBuildFrom[Seq[Dimension], B, That]): That = sorting match {
+ case x: Dimension if f.isDefinedAt(x) =>
+ val r = bf.apply()
+ r += f(x)
+ r.result()
+
+ case x: Dimension => bf.apply().result()
+ case Sequential(xs) => xs.collect(f)
+ }
+
+ // Contains dimensions and ordering only, thus it is safe.
+ implicit def toPhiString(x: Sorting): PhiString = Unsafe(x.toString)
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/SqlContext.scala b/src/main/scala/xyz/driver/common/db/SqlContext.scala
new file mode 100644
index 0000000..4b9d676
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/SqlContext.scala
@@ -0,0 +1,184 @@
+package xyz.driver.common.db
+
+import java.io.Closeable
+import java.net.URI
+import java.time._
+import java.util.UUID
+import java.util.concurrent.Executors
+import javax.sql.DataSource
+
+import xyz.driver.common.logging.{PhiLogging, Unsafe}
+import xyz.driver.common.concurrent.MdcExecutionContext
+import xyz.driver.common.db.SqlContext.Settings
+import xyz.driver.common.domain._
+import xyz.driver.common.error.IncorrectIdException
+import xyz.driver.common.utils.JsonSerializer
+import com.typesafe.config.Config
+import io.getquill._
+
+import scala.concurrent.ExecutionContext
+import scala.util.control.NonFatal
+import scala.util.{Failure, Success, Try}
+
+object SqlContext extends PhiLogging {
+
+ case class DbCredentials(user: String,
+ password: String,
+ host: String,
+ port: Int,
+ dbName: String,
+ dbCreateFlag: Boolean,
+ dbContext: String,
+ connectionParams: String,
+ url: String)
+
+ case class Settings(credentials: DbCredentials,
+ connection: Config,
+ connectionAttemptsOnStartup: Int,
+ threadPoolSize: Int)
+
+ def apply(settings: Settings): SqlContext = {
+ // Prevent leaking credentials to a log
+ Try(JdbcContextConfig(settings.connection).dataSource) match {
+ case Success(dataSource) => new SqlContext(dataSource, settings)
+ case Failure(NonFatal(e)) =>
+ logger.error(phi"Can not load dataSource, error: ${Unsafe(e.getClass.getName)}")
+ throw new IllegalArgumentException("Can not load dataSource from config. Check your database and config")
+ }
+ }
+
+}
+
+class SqlContext(dataSource: DataSource with Closeable, settings: Settings)
+ extends MysqlJdbcContext[MysqlEscape](dataSource)
+ with EntityExtractorDerivation[Literal] {
+
+ private val tpe = Executors.newFixedThreadPool(settings.threadPoolSize)
+
+ implicit val executionContext: ExecutionContext = {
+ val orig = ExecutionContext.fromExecutor(tpe)
+ MdcExecutionContext.from(orig)
+ }
+
+ override def close(): Unit = {
+ super.close()
+ tpe.shutdownNow()
+ }
+
+ // ///////// Encodes/Decoders ///////////
+
+ /**
+ * Overrode, because Quill JDBC optionDecoder pass null inside decoders.
+ * If custom decoder don't have special null handler, it will failed.
+ *
+ * @see https://github.com/getquill/quill/issues/535
+ */
+ implicit override def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
+ decoder(
+ sqlType = d.sqlType,
+ row => index => {
+ try {
+ val res = d(index - 1, row)
+ if (row.wasNull) {
+ None
+ }
+ else {
+ Some(res)
+ }
+ } catch {
+ case _: NullPointerException => None
+ case _: IncorrectIdException => None
+ }
+ }
+ )
+
+ implicit def encodeStringId[T] = MappedEncoding[StringId[T], String](_.id)
+ implicit def decodeStringId[T] = MappedEncoding[String, StringId[T]] {
+ case "" => throw IncorrectIdException("'' is an invalid Id value")
+ case x => StringId(x)
+ }
+
+ def decodeOptStringId[T] = MappedEncoding[Option[String], Option[StringId[T]]] {
+ case None | Some("") => None
+ case Some(x) => Some(StringId(x))
+ }
+
+ implicit def encodeLongId[T] = MappedEncoding[LongId[T], Long](_.id)
+ implicit def decodeLongId[T] = MappedEncoding[Long, LongId[T]] {
+ case 0 => throw IncorrectIdException("0 is an invalid Id value")
+ case x => LongId(x)
+ }
+
+ // TODO Dirty hack, see REP-475
+ def decodeOptLongId[T] = MappedEncoding[Option[Long], Option[LongId[T]]] {
+ case None | Some(0) => None
+ case Some(x) => Some(LongId(x))
+ }
+
+ implicit def encodeUuidId[T] = MappedEncoding[UuidId[T], String](_.toString)
+ implicit def decodeUuidId[T] = MappedEncoding[String, UuidId[T]] {
+ case "" => throw IncorrectIdException("'' is an invalid Id value")
+ case x => UuidId(x)
+ }
+
+ def decodeOptUuidId[T] = MappedEncoding[Option[String], Option[UuidId[T]]] {
+ case None | Some("") => None
+ case Some(x) => Some(UuidId(x))
+ }
+
+ implicit def encodeTextJson[T: Manifest] = MappedEncoding[TextJson[T], String](x => JsonSerializer.serialize(x.content))
+ implicit def decodeTextJson[T: Manifest] = MappedEncoding[String, TextJson[T]] { x =>
+ TextJson(JsonSerializer.deserialize[T](x))
+ }
+
+ implicit val encodeUserRole = MappedEncoding[User.Role, Int](_.bit)
+ implicit val decodeUserRole = MappedEncoding[Int, User.Role] {
+ // 0 is treated as null for numeric types
+ case 0 => throw new NullPointerException("0 means no roles. A user must have a role")
+ case x => User.Role(x)
+ }
+
+ implicit val encodeEmail = MappedEncoding[Email, String](_.value.toString)
+ implicit val decodeEmail = MappedEncoding[String, Email](Email)
+
+ implicit val encodePasswordHash = MappedEncoding[PasswordHash, Array[Byte]](_.value)
+ implicit val decodePasswordHash = MappedEncoding[Array[Byte], PasswordHash](PasswordHash(_))
+
+ implicit val encodeUri = MappedEncoding[URI, String](_.toString)
+ implicit val decodeUri = MappedEncoding[String, URI](URI.create)
+
+ implicit val encodeCaseId = MappedEncoding[CaseId, String](_.id.toString)
+ implicit val decodeCaseId = MappedEncoding[String, CaseId](CaseId(_))
+
+ implicit val encodeFuzzyValue = {
+ MappedEncoding[FuzzyValue, String] {
+ case FuzzyValue.No => "No"
+ case FuzzyValue.Yes => "Yes"
+ case FuzzyValue.Maybe => "Maybe"
+ }
+ }
+ implicit val decodeFuzzyValue = MappedEncoding[String, FuzzyValue] {
+ case "Yes" => FuzzyValue.Yes
+ case "No" => FuzzyValue.No
+ case "Maybe" => FuzzyValue.Maybe
+ case x =>
+ Option(x).fold {
+ throw new NullPointerException("FuzzyValue is null") // See catch in optionDecoder
+ } { _ =>
+ throw new IllegalStateException(s"Unknown fuzzy value: $x")
+ }
+ }
+
+
+ implicit val encodeRecordRequestId = MappedEncoding[RecordRequestId, String](_.id.toString)
+ implicit val decodeRecordRequestId = MappedEncoding[String, RecordRequestId] { x =>
+ RecordRequestId(UUID.fromString(x))
+ }
+
+ final implicit class LocalDateTimeDbOps(val left: LocalDateTime) {
+
+ // scalastyle:off
+ def <=(right: LocalDateTime): Quoted[Boolean] = quote(infix"$left <= $right".as[Boolean])
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/db/Transactions.scala b/src/main/scala/xyz/driver/common/db/Transactions.scala
new file mode 100644
index 0000000..2f5a2cc
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/Transactions.scala
@@ -0,0 +1,23 @@
+package xyz.driver.common.db
+
+import xyz.driver.common.logging.PhiLogging
+
+import scala.concurrent.Future
+import scala.util.{Failure, Success, Try}
+
+class Transactions()(implicit context: SqlContext) extends PhiLogging {
+ def run[T](f: SqlContext => T): Future[T] = {
+ import context.executionContext
+
+ Future(context.transaction(f(context))).andThen {
+ case Failure(e) => logger.error(phi"Can't run a transaction: $e")
+ }
+ }
+
+ def runSync[T](f: SqlContext => T): Unit = {
+ Try(context.transaction(f(context))) match {
+ case Success(_) =>
+ case Failure(e) => logger.error(phi"Can't run a transaction: $e")
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/db/repositories/BridgeUploadQueueRepository.scala b/src/main/scala/xyz/driver/common/db/repositories/BridgeUploadQueueRepository.scala
new file mode 100644
index 0000000..e0d6ff2
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/repositories/BridgeUploadQueueRepository.scala
@@ -0,0 +1,24 @@
+package xyz.driver.common.db.repositories
+
+import xyz.driver.common.concurrent.BridgeUploadQueue
+import xyz.driver.common.domain.LongId
+
+import scala.concurrent.Future
+
+trait BridgeUploadQueueRepository extends Repository {
+
+ type EntityT = BridgeUploadQueue.Item
+ type IdT = LongId[EntityT]
+
+ def add(draft: EntityT): EntityT
+
+ def getById(id: LongId[EntityT]): Option[EntityT]
+
+ def isCompleted(kind: String, tag: String): Future[Boolean]
+
+ def getOne(kind: String): Future[Option[BridgeUploadQueue.Item]]
+
+ def update(entity: EntityT): EntityT
+
+ def delete(id: IdT): Unit
+}
diff --git a/src/main/scala/xyz/driver/common/db/repositories/Repository.scala b/src/main/scala/xyz/driver/common/db/repositories/Repository.scala
new file mode 100644
index 0000000..ae2a3e6
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/repositories/Repository.scala
@@ -0,0 +1,4 @@
+package xyz.driver.common.db.repositories
+
+// For further usage and migration to Postgres and slick
+trait Repository extends RepositoryLogging
diff --git a/src/main/scala/xyz/driver/common/db/repositories/RepositoryLogging.scala b/src/main/scala/xyz/driver/common/db/repositories/RepositoryLogging.scala
new file mode 100644
index 0000000..cb2c438
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/db/repositories/RepositoryLogging.scala
@@ -0,0 +1,62 @@
+package xyz.driver.common.db.repositories
+
+import xyz.driver.common.logging._
+
+trait RepositoryLogging extends PhiLogging {
+
+ protected def logCreatedOne[T](x: T)(implicit toPhiString: T => PhiString): T = {
+ logger.info(phi"An entity was created: $x")
+ x
+ }
+
+ protected def logCreatedMultiple[T <: Iterable[_]](xs: T)(implicit toPhiString: T => PhiString): T = {
+ if (xs.nonEmpty) {
+ logger.info(phi"Entities were created: $xs")
+ }
+ xs
+ }
+
+ protected def logUpdatedOne(rowsAffected: Long): Long = {
+ rowsAffected match {
+ case 0 => logger.trace(phi"The entity is up to date")
+ case 1 => logger.info(phi"The entity was updated")
+ case x => logger.warn(phi"The ${Unsafe(x)} entities were updated")
+ }
+ rowsAffected
+ }
+
+ protected def logUpdatedOneUnimportant(rowsAffected: Long): Long = {
+ rowsAffected match {
+ case 0 => logger.trace(phi"The entity is up to date")
+ case 1 => logger.trace(phi"The entity was updated")
+ case x => logger.warn(phi"The ${Unsafe(x)} entities were updated")
+ }
+ rowsAffected
+ }
+
+ protected def logUpdatedMultiple(rowsAffected: Long): Long = {
+ rowsAffected match {
+ case 0 => logger.trace(phi"All entities are up to date")
+ case x => logger.info(phi"The ${Unsafe(x)} entities were updated")
+ }
+ rowsAffected
+ }
+
+ protected def logDeletedOne(rowsAffected: Long): Long = {
+ rowsAffected match {
+ case 0 => logger.trace(phi"The entity does not exist")
+ case 1 => logger.info(phi"The entity was deleted")
+ case x => logger.warn(phi"Deleted ${Unsafe(x)} entities, expected one")
+ }
+ rowsAffected
+ }
+
+ protected def logDeletedMultiple(rowsAffected: Long): Long = {
+ rowsAffected match {
+ case 0 => logger.trace(phi"Entities do not exist")
+ case x => logger.info(phi"Deleted ${Unsafe(x)} entities")
+ }
+ rowsAffected
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/domain/CaseId.scala b/src/main/scala/xyz/driver/common/domain/CaseId.scala
new file mode 100644
index 0000000..bb11f90
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/CaseId.scala
@@ -0,0 +1,10 @@
+package xyz.driver.common.domain
+
+import java.util.UUID
+
+case class CaseId(id: String)
+
+object CaseId {
+
+ def apply() = new CaseId(UUID.randomUUID().toString)
+}
diff --git a/src/main/scala/xyz/driver/common/domain/Category.scala b/src/main/scala/xyz/driver/common/domain/Category.scala
new file mode 100644
index 0000000..e130367
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/Category.scala
@@ -0,0 +1,21 @@
+package xyz.driver.common.domain
+
+import xyz.driver.common.logging._
+
+case class Category(id: LongId[Category], name: String)
+
+object Category {
+ implicit def toPhiString(x: Category): PhiString = {
+ import x._
+ phi"Category(id=$id, name=${Unsafe(name)})"
+ }
+}
+
+case class CategoryWithLabels(category: Category, labels: List[Label])
+
+object CategoryWithLabels {
+ implicit def toPhiString(x: CategoryWithLabels): PhiString = {
+ import x._
+ phi"CategoryWithLabels(category=$category, labels=$labels)"
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/domain/Email.scala b/src/main/scala/xyz/driver/common/domain/Email.scala
new file mode 100644
index 0000000..c3bcf3f
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/Email.scala
@@ -0,0 +1,3 @@
+package xyz.driver.common.domain
+
+case class Email(value: String)
diff --git a/src/main/scala/xyz/driver/common/domain/FuzzyValue.scala b/src/main/scala/xyz/driver/common/domain/FuzzyValue.scala
new file mode 100644
index 0000000..584f8f7
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/FuzzyValue.scala
@@ -0,0 +1,17 @@
+package xyz.driver.common.domain
+
+import xyz.driver.common.logging._
+import xyz.driver.common.utils.Utils
+
+sealed trait FuzzyValue
+object FuzzyValue {
+ case object Yes extends FuzzyValue
+ case object No extends FuzzyValue
+ case object Maybe extends FuzzyValue
+
+ val All: Set[FuzzyValue] = Set(Yes, No, Maybe)
+
+ def fromBoolean(x: Boolean): FuzzyValue = if (x) Yes else No
+
+ implicit def toPhiString(x: FuzzyValue): PhiString = Unsafe(Utils.getClassSimpleName(x.getClass))
+}
diff --git a/src/main/scala/xyz/driver/common/domain/Id.scala b/src/main/scala/xyz/driver/common/domain/Id.scala
new file mode 100644
index 0000000..9f9604e
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/Id.scala
@@ -0,0 +1,51 @@
+package xyz.driver.common.domain
+
+import java.util.UUID
+
+import xyz.driver.common.logging._
+
+sealed trait Id[+T]
+
+case class CompoundId[Id1 <: Id[_], Id2 <: Id[_]](part1: Id1, part2: Id2) extends Id[(Id1, Id2)]
+
+case class LongId[+T](id: Long) extends Id[T] {
+ override def toString: String = id.toString
+
+ def is(longId: Long): Boolean = {
+ id == longId
+ }
+}
+
+object LongId {
+ implicit def toPhiString[T](x: LongId[T]): PhiString = Unsafe(s"LongId(${x.id})")
+}
+
+case class StringId[+T](id: String) extends Id[T] {
+ override def toString: String = id
+
+ def is(stringId: String): Boolean = {
+ id == stringId
+ }
+}
+
+object StringId {
+ implicit def toPhiString[T](x: StringId[T]): PhiString = Unsafe(s"StringId(${x.id})")
+}
+
+case class UuidId[+T](id: UUID) extends Id[T] {
+ override def toString: String = id.toString
+}
+
+object UuidId {
+
+ /**
+ * @note May fail, if `string` is invalid UUID.
+ */
+ def apply[T](string: String): UuidId[T] = new UuidId[T](UUID.fromString(string))
+
+ def apply[T](): UuidId[T] = new UuidId[T](UUID.randomUUID())
+
+ implicit def ordering[T] = Ordering.by[UuidId[T], String](_.toString)
+
+ implicit def toPhiString[T](x: UuidId[T]): PhiString = Unsafe(s"UuidId(${x.id})")
+}
diff --git a/src/main/scala/xyz/driver/common/domain/Label.scala b/src/main/scala/xyz/driver/common/domain/Label.scala
new file mode 100644
index 0000000..2214216
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/Label.scala
@@ -0,0 +1,15 @@
+package xyz.driver.common.domain
+
+import xyz.driver.common.logging._
+
+case class Label(id: LongId[Label],
+ categoryId: LongId[Category],
+ name: String,
+ description: String)
+
+object Label {
+ implicit def toPhiString(x: Label): PhiString = {
+ import x._
+ phi"Label($id, categoryId=${Unsafe(categoryId)}, name=${Unsafe(name)}, description=${Unsafe(description)})"
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/domain/PasswordHash.scala b/src/main/scala/xyz/driver/common/domain/PasswordHash.scala
new file mode 100644
index 0000000..7b25799
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/PasswordHash.scala
@@ -0,0 +1,42 @@
+package xyz.driver.common.domain
+
+import java.nio.charset.Charset
+
+import org.mindrot.jbcrypt.BCrypt
+
+case class PasswordHash(value: Array[Byte]) {
+
+ lazy val hashString: String = new String(value, Charset.forName("UTF-8"))
+
+ override def toString: String = {
+ s"${this.getClass.getSimpleName}($hashString)"
+ }
+
+ override def equals(that: Any): Boolean = {
+ that match {
+ case thatHash: PasswordHash => java.util.Arrays.equals(this.value, thatHash.value)
+ case _ => false
+ }
+ }
+
+ override def hashCode(): Int = {
+ 42 + java.util.Arrays.hashCode(this.value)
+ }
+
+ def is(password: String): Boolean = {
+ BCrypt.checkpw(password, hashString)
+ }
+
+}
+
+object PasswordHash {
+
+ def apply(password: String): PasswordHash = {
+ new PasswordHash(getHash(password))
+ }
+
+ private def getHash(str: String): Array[Byte] = {
+ BCrypt.hashpw(str, BCrypt.gensalt()).getBytes(Charset.forName("UTF-8"))
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/domain/RecordRequestId.scala b/src/main/scala/xyz/driver/common/domain/RecordRequestId.scala
new file mode 100644
index 0000000..901ff66
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/RecordRequestId.scala
@@ -0,0 +1,16 @@
+package xyz.driver.common.domain
+
+import java.util.UUID
+
+import xyz.driver.common.logging._
+
+case class RecordRequestId(id: UUID) {
+ override def toString: String = id.toString
+}
+
+object RecordRequestId {
+
+ def apply() = new RecordRequestId(UUID.randomUUID())
+
+ implicit def toPhiString(x: RecordRequestId): PhiString = phi"${x.id}"
+}
diff --git a/src/main/scala/xyz/driver/common/domain/TextJson.scala b/src/main/scala/xyz/driver/common/domain/TextJson.scala
new file mode 100644
index 0000000..af18723
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/TextJson.scala
@@ -0,0 +1,14 @@
+package xyz.driver.common.domain
+
+import xyz.driver.common.logging._
+
+case class TextJson[+T](content: T) {
+ def map[U](f: T => U): TextJson[U] = copy(f(content))
+}
+
+object TextJson {
+
+ implicit def toPhiString[T](x: TextJson[T])(implicit inner: T => PhiString): PhiString = {
+ phi"TextJson(${x.content})"
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/domain/User.scala b/src/main/scala/xyz/driver/common/domain/User.scala
new file mode 100644
index 0000000..83d861f
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/domain/User.scala
@@ -0,0 +1,74 @@
+package xyz.driver.common.domain
+
+import java.time.LocalDateTime
+
+import xyz.driver.common.logging._
+import xyz.driver.common.domain.User.Role
+import xyz.driver.common.utils.Utils
+
+case class User(id: LongId[User],
+ email: Email,
+ name: String,
+ role: Role,
+ passwordHash: PasswordHash,
+ latestActivity: Option[LocalDateTime],
+ deleted: Option[LocalDateTime])
+
+object User {
+
+ sealed trait Role extends Product with Serializable {
+
+ /**
+ * Bit representation of this role
+ */
+ def bit: Int
+
+ def is(that: Role): Boolean = this == that
+
+ def oneOf(roles: Role*): Boolean = roles.contains(this)
+
+ def oneOf(roles: Set[Role]): Boolean = roles.contains(this)
+ }
+
+ object Role extends PhiLogging {
+ case object RecordAdmin extends Role {val bit = 1 << 0}
+ case object RecordCleaner extends Role {val bit = 1 << 1}
+ case object RecordOrganizer extends Role {val bit = 1 << 2}
+ case object DocumentExtractor extends Role {val bit = 1 << 3}
+ case object TrialSummarizer extends Role {val bit = 1 << 4}
+ case object CriteriaCurator extends Role {val bit = 1 << 5}
+ case object TrialAdmin extends Role {val bit = 1 << 6}
+ case object EligibilityVerifier extends Role{val bit = 1 << 7}
+ case object TreatmentMatchingAdmin extends Role{val bit = 1 << 8}
+ case object RoutesCurator extends Role{val bit = 1 << 9}
+
+ val RepRoles = Set[Role](RecordAdmin, RecordCleaner, RecordOrganizer, DocumentExtractor)
+
+ val TcRoles = Set[Role](TrialSummarizer, CriteriaCurator, TrialAdmin)
+
+ val TreatmentMatchingRoles = Set[Role](RoutesCurator, EligibilityVerifier, TreatmentMatchingAdmin)
+
+ val All = RepRoles ++ TcRoles ++ TreatmentMatchingRoles
+
+ def apply(bitMask: Int): Role = {
+ def extractRole(role: Role): Set[Role] =
+ if ((bitMask & role.bit) != 0) Set(role) else Set.empty[Role]
+
+ val roles = All.flatMap(extractRole)
+ roles.size match {
+ case 1 => roles.head
+ case _ =>
+ logger.error(phi"Can't convert a bit mask ${Unsafe(bitMask)} to any role")
+ throw new IllegalArgumentException()
+ }
+ }
+
+ implicit def toPhiString(x: Role): PhiString = Unsafe(Utils.getClassSimpleName(x.getClass))
+ }
+
+ implicit def toPhiString(x: User): PhiString = {
+ import x._
+ phi"User(id=$id, role=$role)"
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/error/DomainError.scala b/src/main/scala/xyz/driver/common/error/DomainError.scala
new file mode 100644
index 0000000..d277543
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/error/DomainError.scala
@@ -0,0 +1,31 @@
+package xyz.driver.common.error
+
+import xyz.driver.common.logging.{PhiString, Unsafe}
+import xyz.driver.common.utils.Utils
+
+trait DomainError {
+
+ protected def userMessage: String
+
+ def getMessage: String = userMessage
+
+}
+
+object DomainError {
+
+ // 404 error
+ trait NotFoundError extends DomainError
+
+ // 401 error
+ trait AuthenticationError extends DomainError
+
+ // 403 error
+ trait AuthorizationError extends DomainError
+
+ implicit def toPhiString(x: DomainError): PhiString = {
+ // userMessage possibly can contain a personal information,
+ // so we should prevent it to be printed in logs.
+ Unsafe(Utils.getClassSimpleName(x.getClass))
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/error/ExceptionFormatter.scala b/src/main/scala/xyz/driver/common/error/ExceptionFormatter.scala
new file mode 100644
index 0000000..33dd94c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/error/ExceptionFormatter.scala
@@ -0,0 +1,19 @@
+package xyz.driver.common.error
+
+import java.io.{ByteArrayOutputStream, PrintStream}
+
+object ExceptionFormatter {
+
+ def format(e: Throwable): String = s"$e\n${printStackTrace(e)}"
+
+ def printStackTrace(e: Throwable): String = {
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ e.printStackTrace(ps)
+
+ ps.close()
+ baos.toString()
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/error/FailedValidationException.scala b/src/main/scala/xyz/driver/common/error/FailedValidationException.scala
new file mode 100644
index 0000000..018ce58
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/error/FailedValidationException.scala
@@ -0,0 +1,5 @@
+package xyz.driver.common.error
+
+import xyz.driver.common.validation.ValidationError
+
+class FailedValidationException(val error: ValidationError) extends RuntimeException("The validation is failed")
diff --git a/src/main/scala/xyz/driver/common/error/IncorrectIdException.scala b/src/main/scala/xyz/driver/common/error/IncorrectIdException.scala
new file mode 100644
index 0000000..a91065c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/error/IncorrectIdException.scala
@@ -0,0 +1,3 @@
+package xyz.driver.common.error
+
+case class IncorrectIdException(message: String) extends Exception(message)
diff --git a/src/main/scala/xyz/driver/common/http/AsyncHttpClientFetcher.scala b/src/main/scala/xyz/driver/common/http/AsyncHttpClientFetcher.scala
new file mode 100644
index 0000000..5d54f0d
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/http/AsyncHttpClientFetcher.scala
@@ -0,0 +1,90 @@
+package xyz.driver.common.http
+
+import java.io.Closeable
+import java.net.URL
+import java.util.concurrent.{ExecutorService, Executors}
+
+import com.typesafe.scalalogging.StrictLogging
+import org.asynchttpclient._
+import org.slf4j.MDC
+import xyz.driver.common.concurrent.MdcThreadFactory
+import xyz.driver.common.utils.RandomUtils
+
+import scala.concurrent.duration.FiniteDuration
+import scala.concurrent.{ExecutionContext, Future, Promise}
+
+class AsyncHttpClientFetcher(settings: AsyncHttpClientFetcher.Settings)
+ extends HttpFetcher with Closeable with StrictLogging {
+
+ private val es: ExecutorService = {
+ val threadFactory = MdcThreadFactory.from(Executors.defaultThreadFactory())
+ Executors.newSingleThreadExecutor(threadFactory)
+ }
+
+ private implicit val executionContext = ExecutionContext.fromExecutor(es)
+
+ private def httpClientConfig: DefaultAsyncHttpClientConfig = {
+ val builder = new DefaultAsyncHttpClientConfig.Builder()
+ builder.setConnectTimeout(settings.connectTimeout.toMillis.toInt)
+ builder.setReadTimeout(settings.readTimeout.toMillis.toInt)
+ // builder.setThreadFactory(threadFactory) // Doesn't help to push MDC context into AsyncCompletionHandler
+ builder.build()
+ }
+
+ private val httpClient = new DefaultAsyncHttpClient(httpClientConfig)
+
+ override def apply(url: URL): Future[Array[Byte]] = {
+ val fingerPrint = RandomUtils.randomString(10)
+
+ // log all outcome connections
+ logger.info("{}, apply({})", fingerPrint, url)
+ val promise = Promise[Response]()
+
+ httpClient.prepareGet(url.toString).execute(new AsyncCompletionHandler[Response] {
+ override def onCompleted(response: Response): Response = {
+ promise.success(response)
+ response
+ }
+
+ override def onThrowable(t: Throwable): Unit = {
+ promise.failure(t)
+ super.onThrowable(t)
+ }
+ })
+
+ // Promises have their own ExecutionContext
+ // So, we have to hack it.
+ val parentMdcContext = MDC.getCopyOfContextMap
+ promise.future.flatMap { response =>
+ setContextMap(parentMdcContext)
+
+ if (response.getStatusCode == 200) {
+ // DO NOT LOG body, it could be PHI
+ // logger.trace(response.getResponseBody())
+ val bytes = response.getResponseBodyAsBytes
+ logger.debug("{}, size is {}B", fingerPrint, bytes.size.asInstanceOf[AnyRef])
+ Future.successful(bytes)
+ } else {
+ logger.error("{}, HTTP {}", fingerPrint, response.getStatusCode.asInstanceOf[AnyRef])
+ logger.trace(response.getResponseBody().take(100))
+ Future.failed(new IllegalStateException("An unexpected response from the server"))
+ }
+ }
+ }
+
+ private[this] def setContextMap(context: java.util.Map[String, String]): Unit =
+ Option(context).fold(MDC.clear())(MDC.setContextMap)
+
+ override def close(): Unit = {
+ httpClient.close()
+ es.shutdown()
+ }
+
+}
+
+object AsyncHttpClientFetcher {
+
+ case class Settings(connectTimeout: FiniteDuration,
+ readTimeout: FiniteDuration)
+
+}
diff --git a/src/main/scala/xyz/driver/common/http/AsyncHttpClientUploader.scala b/src/main/scala/xyz/driver/common/http/AsyncHttpClientUploader.scala
new file mode 100644
index 0000000..97c96cd
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/http/AsyncHttpClientUploader.scala
@@ -0,0 +1,116 @@
+package xyz.driver.common.http
+
+import java.io.Closeable
+import java.net.URI
+import java.util.concurrent.{ExecutorService, Executors}
+
+import xyz.driver.common.concurrent.MdcThreadFactory
+import xyz.driver.common.http.AsyncHttpClientUploader._
+import xyz.driver.common.utils.RandomUtils
+import com.typesafe.scalalogging.StrictLogging
+import org.asynchttpclient._
+import org.slf4j.MDC
+
+import scala.concurrent.duration.FiniteDuration
+import scala.concurrent.{ExecutionContext, Future, Promise}
+
+class AsyncHttpClientUploader(settings: Settings) extends Closeable with StrictLogging {
+
+ private val es: ExecutorService = {
+ val threadFactory = MdcThreadFactory.from(Executors.defaultThreadFactory())
+ Executors.newSingleThreadExecutor(threadFactory)
+ }
+
+ private implicit val executionContext = ExecutionContext.fromExecutor(es)
+
+ private def httpClientConfig: DefaultAsyncHttpClientConfig = {
+ val builder = new DefaultAsyncHttpClientConfig.Builder()
+ builder.setConnectTimeout(settings.connectTimeout.toMillis.toInt)
+ builder.setRequestTimeout(settings.requestTimeout.toMillis.toInt)
+ // builder.setThreadFactory(threadFactory) // Doesn't help to push MDC context into AsyncCompletionHandler
+ builder.build()
+ }
+
+ private val httpClient = new DefaultAsyncHttpClient(httpClientConfig)
+
+ def run(method: Method, uri: URI, contentType: String, data: String): Future[Unit] = {
+ // log all outcome connections
+ val fingerPrint = RandomUtils.randomString(10)
+ logger.info("{}, apply(method={}, uri={}, contentType={})", fingerPrint, method, uri, contentType)
+ val promise = Promise[Response]()
+
+ val q = new RequestBuilder(method.toString)
+ .setUrl(uri.toString)
+ .setBody(data)
+
+ settings.defaultHeaders.foreach {
+ case (k, v) =>
+ q.setHeader(k, v)
+ }
+
+ q.addHeader("Content-Type", contentType)
+
+ httpClient.prepareRequest(q).execute(new AsyncCompletionHandler[Unit] {
+ override def onCompleted(response: Response): Unit = {
+ promise.success(response)
+ }
+
+ override def onThrowable(t: Throwable): Unit = {
+ promise.failure(t)
+ super.onThrowable(t)
+ }
+ })
+
+ // see AsyncHttpClientFetcher
+ val parentMdcContext = MDC.getCopyOfContextMap
+ promise.future.flatMap { response =>
+ setContextMap(parentMdcContext)
+
+ val statusCode = response.getStatusCode
+ // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#2xx_Success
+ if (statusCode >= 200 && statusCode < 300) {
+ logger.debug("{}, success", fingerPrint)
+ Future.successful(())
+ } else {
+ logger.error(
+ "{}, HTTP {}, BODY:\n{}",
+ fingerPrint,
+ response.getStatusCode.asInstanceOf[AnyRef],
+ response.getResponseBody.take(100)
+ )
+ Future.failed(new IllegalStateException("An unexpected response from the server"))
+ }
+ }
+ }
+
+ private[this] def setContextMap(context: java.util.Map[String, String]): Unit =
+ Option(context).fold(MDC.clear())(MDC.setContextMap)
+
+ override def close(): Unit = {
+ httpClient.close()
+ es.shutdown()
+ }
+
+}
+
+object AsyncHttpClientUploader {
+
+ case class Settings(connectTimeout: FiniteDuration,
+ requestTimeout: FiniteDuration,
+ defaultHeaders: Map[String, String] = Map.empty)
+
+ sealed trait Method
+
+ object Method {
+
+ case object Put extends Method {
+ override val toString = "PUT"
+ }
+
+ case object Post extends Method {
+ override val toString = "POST"
+ }
+
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/http/package.scala b/src/main/scala/xyz/driver/common/http/package.scala
new file mode 100644
index 0000000..3aff80c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/http/package.scala
@@ -0,0 +1,9 @@
+package xyz.driver.common
+
+import java.net.URL
+
+import scala.concurrent.Future
+
+package object http {
+ type HttpFetcher = URL => Future[Array[Byte]]
+}
diff --git a/src/main/scala/xyz/driver/common/logging/DefaultPhiLogger.scala b/src/main/scala/xyz/driver/common/logging/DefaultPhiLogger.scala
new file mode 100644
index 0000000..ca25c44
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/DefaultPhiLogger.scala
@@ -0,0 +1,17 @@
+package xyz.driver.common.logging
+
+import org.slf4j.{Logger => Underlying}
+
+class DefaultPhiLogger private[logging](underlying: Underlying) extends PhiLogger {
+
+ def error(message: PhiString): Unit = underlying.error(message.text)
+
+ def warn(message: PhiString): Unit = underlying.warn(message.text)
+
+ def info(message: PhiString): Unit = underlying.info(message.text)
+
+ def debug(message: PhiString): Unit = underlying.debug(message.text)
+
+ def trace(message: PhiString): Unit = underlying.trace(message.text)
+
+}
diff --git a/src/main/scala/xyz/driver/common/logging/Implicits.scala b/src/main/scala/xyz/driver/common/logging/Implicits.scala
new file mode 100644
index 0000000..e486cc1
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/Implicits.scala
@@ -0,0 +1,62 @@
+package xyz.driver.common.logging
+
+import java.io.File
+import java.net.{URI, URL}
+import java.nio.file.Path
+import java.time.LocalDateTime
+import java.util.UUID
+
+import scala.concurrent.duration.Duration
+
+trait Implicits {
+
+ // DO NOT ADD!
+ // phi"$fullName" is easier to write, than phi"${Unsafe(fullName)}"
+ // If you wrote the second version, it means that you know, what you doing.
+ // implicit def toPhiString(s: String): PhiString = Unsafe(s)
+
+ implicit def toPhiStringContext(sc: StringContext): PhiStringContext = new PhiStringContext(sc)
+
+ implicit def booleanToPhiString(x: Boolean): PhiString = Unsafe(x.toString)
+
+ implicit def uriToPhiString(x: URI): PhiString = Unsafe(x.toString)
+
+ implicit def urlToPhiString(x: URL): PhiString = Unsafe(x.toString)
+
+ implicit def pathToPhiString(x: Path): PhiString = Unsafe(x.toString)
+
+ implicit def fileToPhiString(x: File): PhiString = Unsafe(x.toString)
+
+ implicit def localDateTimeToPhiString(x: LocalDateTime): PhiString = Unsafe(x.toString)
+
+ implicit def durationToPhiString(x: Duration): PhiString = Unsafe(x.toString)
+
+ implicit def uuidToPhiString(x: UUID): PhiString = Unsafe(x.toString)
+
+ implicit def tuple2ToPhiString[T1, T2](x: (T1, T2))
+ (implicit inner1: T1 => PhiString,
+ inner2: T2 => PhiString): PhiString = x match {
+ case (a, b) => phi"($a, $b)"
+ }
+
+ implicit def tuple3ToPhiString[T1, T2, T3](x: (T1, T2, T3))
+ (implicit inner1: T1 => PhiString,
+ inner2: T2 => PhiString,
+ inner3: T3 => PhiString): PhiString = x match {
+ case (a, b, c) => phi"($a, $b, $c)"
+ }
+
+ implicit def optionToPhiString[T](opt: Option[T])(implicit inner: T => PhiString): PhiString = opt match {
+ case None => phi"None"
+ case Some(x) => phi"Some($x)"
+ }
+
+ implicit def iterableToPhiString[T](xs: Iterable[T])(implicit inner: T => PhiString): PhiString = {
+ Unsafe(xs.map(inner(_).text).mkString("Col(", ", ", ")"))
+ }
+
+ implicit def throwableToPhiString(x: Throwable): PhiString = {
+ Unsafe(Option(x.getMessage).getOrElse(x.getClass.getName))
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/logging/PhiLogger.scala b/src/main/scala/xyz/driver/common/logging/PhiLogger.scala
new file mode 100644
index 0000000..c8907a8
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/PhiLogger.scala
@@ -0,0 +1,15 @@
+package xyz.driver.common.logging
+
+trait PhiLogger {
+
+ def error(message: PhiString): Unit
+
+ def warn(message: PhiString): Unit
+
+ def info(message: PhiString): Unit
+
+ def debug(message: PhiString): Unit
+
+ def trace(message: PhiString): Unit
+
+}
diff --git a/src/main/scala/xyz/driver/common/logging/PhiLogging.scala b/src/main/scala/xyz/driver/common/logging/PhiLogging.scala
new file mode 100644
index 0000000..b8cdcf0
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/PhiLogging.scala
@@ -0,0 +1,20 @@
+package xyz.driver.common.logging
+
+import org.slf4j.LoggerFactory
+
+trait PhiLogging extends Implicits {
+
+ protected val logger: PhiLogger = new DefaultPhiLogger(LoggerFactory.getLogger(getClass.getName))
+
+ /**
+ * Logs the failMessage on an error level, if isSuccessful is false.
+ * @return isSuccessful
+ */
+ protected def loggedError(isSuccessful: Boolean, failMessage: PhiString): Boolean = {
+ if (!isSuccessful) {
+ logger.error(failMessage)
+ }
+ isSuccessful
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/logging/PhiString.scala b/src/main/scala/xyz/driver/common/logging/PhiString.scala
new file mode 100644
index 0000000..ce1b90c
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/PhiString.scala
@@ -0,0 +1,6 @@
+package xyz.driver.common.logging
+
+class PhiString(private[logging] val text: String) {
+ // scalastyle:off
+ @inline def +(that: PhiString) = new PhiString(this.text + that.text)
+}
diff --git a/src/main/scala/xyz/driver/common/logging/PhiStringContext.scala b/src/main/scala/xyz/driver/common/logging/PhiStringContext.scala
new file mode 100644
index 0000000..8b3c9d0
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/PhiStringContext.scala
@@ -0,0 +1,8 @@
+package xyz.driver.common.logging
+
+final class PhiStringContext(val sc: StringContext) extends AnyVal {
+ def phi(args: PhiString*): PhiString = {
+ val phiArgs = args.map(_.text)
+ new PhiString(sc.s(phiArgs: _*))
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/logging/Unsafe.scala b/src/main/scala/xyz/driver/common/logging/Unsafe.scala
new file mode 100644
index 0000000..c605c85
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/Unsafe.scala
@@ -0,0 +1,6 @@
+package xyz.driver.common.logging
+
+/**
+ * Use it with care!
+ */
+case class Unsafe[T](private[logging] val value: T) extends PhiString(Option(value).map(_.toString).getOrElse("<null>"))
diff --git a/src/main/scala/xyz/driver/common/logging/package.scala b/src/main/scala/xyz/driver/common/logging/package.scala
new file mode 100644
index 0000000..479f59e
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/logging/package.scala
@@ -0,0 +1,3 @@
+package xyz.driver.common
+
+package object logging extends Implicits
diff --git a/src/main/scala/xyz/driver/common/pdf/PdfRenderer.scala b/src/main/scala/xyz/driver/common/pdf/PdfRenderer.scala
new file mode 100644
index 0000000..9882f5d
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/pdf/PdfRenderer.scala
@@ -0,0 +1,13 @@
+package xyz.driver.common.pdf
+
+import java.nio.file.Path
+
+trait PdfRenderer {
+
+ def render(html: String, documentName: String, force: Boolean = false): Path
+
+ def delete(documentName: String): Unit
+
+ def getPath(fileName: String): Path
+
+}
diff --git a/src/main/scala/xyz/driver/common/pdf/WkHtmlToPdfRenderer.scala b/src/main/scala/xyz/driver/common/pdf/WkHtmlToPdfRenderer.scala
new file mode 100644
index 0000000..0e0b338
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/pdf/WkHtmlToPdfRenderer.scala
@@ -0,0 +1,106 @@
+package xyz.driver.common.pdf
+
+import java.io.IOException
+import java.nio.file._
+
+import io.github.cloudify.scala.spdf._
+import xyz.driver.common.logging._
+import xyz.driver.common.pdf.WkHtmlToPdfRenderer.Settings
+
+object WkHtmlToPdfRenderer {
+
+ final case class Settings(downloadsDir: String) {
+
+ lazy val downloadsPath: Path = getPathFrom(downloadsDir)
+
+ private def getPathFrom(x: String): Path = {
+ val dirPath = if (x.startsWith("/")) Paths.get(x)
+ else {
+ val workingDir = Paths.get(".")
+ workingDir.resolve(x)
+ }
+
+ dirPath.toAbsolutePath.normalize()
+ }
+
+ }
+
+}
+
+class WkHtmlToPdfRenderer(settings: Settings) extends PdfRenderer with PhiLogging {
+
+ private val pdf = Pdf(new PdfConfig {
+ disableJavascript := true
+ disableExternalLinks := true
+ disableInternalLinks := true
+ printMediaType := Some(true)
+ orientation := Portrait
+ pageSize := "A4"
+ lowQuality := true
+ })
+
+ override def render(html: String, documentName: String, force: Boolean = false): Path = {
+ checkedCreate(html, documentName, force)
+ }
+
+ override def delete(documentName: String): Unit = {
+ logger.trace(phi"delete(${Unsafe(documentName)})")
+
+ val file = getPath(documentName)
+ logger.debug(phi"File: $file")
+ if (Files.deleteIfExists(file)) {
+ logger.info(phi"Deleted")
+ } else {
+ logger.warn(phi"Doesn't exist")
+ }
+ }
+
+ override def getPath(documentName: String): Path = {
+ settings.downloadsPath.resolve(s"$documentName.pdf").toAbsolutePath
+ }
+
+ protected def checkedCreate[SourceT: SourceDocumentLike](src: SourceT, fileName: String, force: Boolean): Path = {
+ logger.trace(phi"checkedCreate(fileName=${Unsafe(fileName)}, force=$force)")
+
+ val dest = getPath(fileName)
+ logger.debug(phi"Destination file: $dest")
+
+ if (force || !dest.toFile.exists()) {
+ logger.trace(phi"Force refresh the file")
+ val newDocPath = forceCreate(src, dest)
+ logger.info(phi"Updated")
+ newDocPath
+ } else if (dest.toFile.exists()) {
+ logger.trace(phi"Already exists")
+ dest
+ } else {
+ logger.trace(phi"The file does not exist")
+ val newDocPath = forceCreate(src, dest)
+ logger.info(phi"Created")
+ newDocPath
+ }
+ }
+
+ protected def forceCreate[SourceT: SourceDocumentLike](src: SourceT, dest: Path): Path = {
+ logger.trace(phi"forceCreate[${Unsafe(src.getClass.getName)}](dest=$dest)")
+
+ val destTemp = Files.createTempFile("driver", ".pdf")
+ val destTempFile = destTemp.toFile
+
+ Files.createDirectories(dest.getParent)
+
+ val retCode = pdf.run(src, destTempFile)
+ lazy val pdfSize = destTempFile.length()
+ if (retCode != 0) {
+ // Try to google "wkhtmltopdf returns {retCode}"
+ throw new IOException(s"Can create the document, the return code is $retCode")
+ } else if (pdfSize == 0) {
+ // Anything could happen, e.g. https://github.com/wkhtmltopdf/wkhtmltopdf/issues/2540
+ throw new IOException("The pdf is empty")
+ } else {
+ logger.debug(phi"Size: ${Unsafe(pdfSize)}B")
+ Files.move(destTemp, dest, StandardCopyOption.REPLACE_EXISTING)
+ dest
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/resources/ResourcesStorage.scala b/src/main/scala/xyz/driver/common/resources/ResourcesStorage.scala
new file mode 100644
index 0000000..f52d992
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/resources/ResourcesStorage.scala
@@ -0,0 +1,39 @@
+package xyz.driver.common.resources
+
+import scala.io.{Codec, Source}
+
+trait ResourcesStorage {
+
+ /**
+ * @param resourcePath Don't forget / at start
+ */
+ def getFirstLine(resourcePath: String): String
+
+}
+
+object RealResourcesStorage extends ResourcesStorage {
+
+ def getFirstLine(resourcePath: String): String = {
+ val resourceUrl = getClass.getResource(resourcePath)
+ Option(resourceUrl) match {
+ case Some(url) =>
+ val source = Source.fromURL(resourceUrl)(Codec.UTF8)
+ try {
+ val lines = source.getLines()
+ if (lines.isEmpty) throw new RuntimeException(s"'$resourcePath' is empty")
+ else lines.next()
+ } finally {
+ source.close()
+ }
+ case None =>
+ throw new RuntimeException(s"Can not find the '$resourcePath'!")
+ }
+ }
+
+}
+
+object FakeResourcesStorage extends ResourcesStorage {
+
+ def getFirstLine(resourcePath: String): String = ""
+
+}
diff --git a/src/main/scala/xyz/driver/common/utils/Computation.scala b/src/main/scala/xyz/driver/common/utils/Computation.scala
new file mode 100644
index 0000000..a435afe
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/Computation.scala
@@ -0,0 +1,110 @@
+package xyz.driver.common.utils
+
+import scala.concurrent.{ExecutionContext, Future}
+
+/**
+ * Takes care of computations
+ *
+ * Success(either) - the computation will be continued.
+ * Failure(error) - the computation was failed with unhandled error.
+ *
+ * Either[Result, T]:
+ * Left(result) is a final and handled result, another computations (map, flatMap) will be ignored.
+ * Right(T) is a current result. Functions in map/flatMap will continue the computation.
+ *
+ * Example:
+ * {{{
+ * import scala.concurrent.ExecutionContext.Implicits.global
+ * import scala.concurrent.{ExecutionContext, Future}
+ * import com.drivergrp.server.com.drivergrp.server.common.utils.Computation
+ *
+ * def successful = for {
+ * x <- Computation.continue(1)
+ * y <- Computation.continue(2)
+ * } yield s"\$x + \$y"
+ *
+ * // Prints "Success(1 + 2)"
+ * successful.join.onComplete(print)
+ *
+ * def failed = for {
+ * x <- Computation.abort("Failed on x")
+ * _ = print("Second step")
+ * y <- Computation.continue(2)
+ * } yield s"\$x + \$y"
+ *
+ * // Prints "Success(Failed on x)"
+ * failed.join.onComplete(print)
+ * }}}
+ *
+ * TODO: Make future private
+ *
+ * @param future The final flow in a future.
+ * @tparam R Type of result for aborted computation.
+ * @tparam T Type of result for continued computation.
+ */
+final case class Computation[+R, +T](future: Future[Either[R, T]]) {
+
+ def flatMap[R2, T2](f: T => Computation[R2, T2])(implicit ec: ExecutionContext, ev: R <:< R2): Computation[R2, T2] = {
+ Computation(future.flatMap {
+ case Left(x) => Future.successful(Left(x))
+ case Right(x) => f(x).future
+ })
+ }
+
+ def processExceptions[R2](f: PartialFunction[Throwable, R2])
+ (implicit ev1: R <:< R2,
+ ec: ExecutionContext): Computation[R2, T] = {
+ val strategy = f.andThen(x => Left(x): Either[R2, T])
+ val castedFuture: Future[Either[R2, T]] = future.map {
+ case Left(x) => Left(x)
+ case Right(x) => Right(x)
+ }
+ Computation(castedFuture.recover(strategy))
+ }
+
+ def map[T2](f: T => T2)(implicit ec: ExecutionContext): Computation[R, T2] = flatMap { a =>
+ Computation.continue(f(a))
+ }
+
+ def andThen(f: T => Any)(implicit ec: ExecutionContext): Computation[R, T] = map { a =>
+ f(a)
+ a
+ }
+
+ def filter(f: T => Boolean)(implicit ec: ExecutionContext): Computation[R, T] = map { a =>
+ if (f(a)) a
+ else throw new NoSuchElementException("When filtering")
+ }
+
+ def withFilter(f: T => Boolean)(implicit ec: ExecutionContext): Computation[R, T] = filter(f)
+
+ def foreach[T2](f: T => T2)(implicit ec: ExecutionContext): Unit = future.foreach {
+ case Right(x) => f(x)
+ case _ =>
+ }
+
+ def toFuture[R2](resultFormatter: T => R2)(implicit ec: ExecutionContext, ev: R <:< R2): Future[R2] = future.map {
+ case Left(x) => x
+ case Right(x) => resultFormatter(x)
+ }
+
+ def toFuture[R2](implicit ec: ExecutionContext, ev1: R <:< R2, ev2: T <:< R2): Future[R2] = future.map {
+ case Left(x) => x
+ case Right(x) => x
+ }
+
+}
+
+object Computation {
+
+ def continue[T](x: T): Computation[Nothing, T] = Computation(Future.successful(Right(x)))
+
+ def abort[R](result: R): Computation[R, Nothing] = Computation(Future.successful(Left(result)))
+
+ def fail(exception: Throwable): Computation[Nothing, Nothing] = Computation(Future.failed(exception))
+
+ def fromFuture[T](input: Future[T])(implicit ec: ExecutionContext): Computation[Nothing, T] = Computation {
+ input.map { x => Right(x) }
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/utils/FutureUtils.scala b/src/main/scala/xyz/driver/common/utils/FutureUtils.scala
new file mode 100644
index 0000000..6933c63
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/FutureUtils.scala
@@ -0,0 +1,19 @@
+package xyz.driver.common.utils
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Try
+
+object FutureUtils {
+
+ def executeSynchronously[T](f: ExecutionContext => Future[T]): Try[T] = {
+ val future = f {
+ new ExecutionContext {
+ override def reportFailure(cause: Throwable): Unit = cause.printStackTrace()
+
+ override def execute(runnable: Runnable): Unit = runnable.run()
+ }
+ }
+ future.value.get
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/utils/Implicits.scala b/src/main/scala/xyz/driver/common/utils/Implicits.scala
new file mode 100644
index 0000000..bdccaac
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/Implicits.scala
@@ -0,0 +1,22 @@
+package xyz.driver.common.utils
+
+import scala.collection.generic.CanBuildFrom
+
+object Implicits {
+
+ final class ConditionalAppend[U, T[U] <: TraversableOnce[U]](val c: T[U]) extends AnyVal {
+ def condAppend(cond: => Boolean, value: U)(implicit cbf: CanBuildFrom[T[U], U, T[U]]): T[U] = {
+ val col = cbf()
+ if (cond) {
+ ((col ++= c) += value).result
+ } else {
+ c.asInstanceOf[T[U]]
+ }
+ }
+ }
+
+ implicit def traversableConditionalAppend[U, T[U] <: TraversableOnce[U]](c: T[U]): ConditionalAppend[U, T] =
+ new ConditionalAppend[U, T](c)
+
+ implicit def toMapOps[K, V](x: Map[K, V]): MapOps[K, V] = new MapOps(x)
+}
diff --git a/src/main/scala/xyz/driver/common/utils/JsonSerializer.scala b/src/main/scala/xyz/driver/common/utils/JsonSerializer.scala
new file mode 100644
index 0000000..936a983
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/JsonSerializer.scala
@@ -0,0 +1,27 @@
+package xyz.driver.common.utils
+
+import java.text.SimpleDateFormat
+
+import com.fasterxml.jackson.annotation.JsonInclude
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
+import com.fasterxml.jackson.module.scala.DefaultScalaModule
+import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
+
+object JsonSerializer {
+
+ private val mapper = new ObjectMapper() with ScalaObjectMapper
+ mapper.registerModule(DefaultScalaModule)
+ mapper.registerModule(new JavaTimeModule)
+ mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"))
+ mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
+
+ def serialize(value: Any): String = {
+ mapper.writeValueAsString(value)
+ }
+
+ def deserialize[T](value: String)(implicit m: Manifest[T]): T = {
+ mapper.readValue(value)
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/utils/MapOps.scala b/src/main/scala/xyz/driver/common/utils/MapOps.scala
new file mode 100644
index 0000000..759fdea
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/MapOps.scala
@@ -0,0 +1,10 @@
+package xyz.driver.common.utils
+
+final class MapOps[K, V](val self: Map[K, V]) extends AnyVal {
+ def swap: Map[V, Set[K]] = {
+ self
+ .toList
+ .groupBy { case (_, v) => v }
+ .mapValues(_.map { case (k, _) => k }.toSet)
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/utils/RandomUtils.scala b/src/main/scala/xyz/driver/common/utils/RandomUtils.scala
new file mode 100644
index 0000000..d4cd4dc
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/RandomUtils.scala
@@ -0,0 +1,20 @@
+package xyz.driver.common.utils
+
+import java.util.concurrent.ThreadLocalRandom
+
+import scala.collection._
+
+object RandomUtils {
+
+ private def Random = ThreadLocalRandom.current()
+
+ private val chars: Seq[Char] = ('0' to '9') ++ ('a' to 'z')
+
+ def randomString(len: Int): String = {
+ (0 until len).map({ _ =>
+ val i = Random.nextInt(0, chars.size)
+ chars(i)
+ })(breakOut)
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/common/utils/ServiceUtils.scala b/src/main/scala/xyz/driver/common/utils/ServiceUtils.scala
new file mode 100644
index 0000000..dd309fb
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/ServiceUtils.scala
@@ -0,0 +1,32 @@
+package xyz.driver.common.utils
+
+import xyz.driver.common.db.SearchFilterBinaryOperation.Eq
+import xyz.driver.common.db.SearchFilterExpr
+import xyz.driver.common.db.SearchFilterExpr.{Atom, Dimension}
+import xyz.driver.common.logging._
+
+import scala.util.{Failure, Success, Try}
+
+
+object ServiceUtils extends PhiLogging {
+ def findEqFilter(filter: SearchFilterExpr, fieldName: String): Option[SearchFilterExpr] = {
+ findEqFilter(filter, Dimension(None, fieldName))
+ }
+
+ def findEqFilter(filter: SearchFilterExpr, dimension: Dimension): Option[SearchFilterExpr] = {
+ filter.find {
+ case Atom.Binary(dimension, Eq, _) => true
+ case _ => false
+ }
+ }
+
+ def convertIdInFilterToLong(value: AnyRef): Option[Long] = {
+ Try(value.toString.toLong) match {
+ case Success(id) =>
+ Option(id)
+ case Failure(e) =>
+ logger.error(phi"Incorrect id format in filter $e")
+ None
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/utils/Utils.scala b/src/main/scala/xyz/driver/common/utils/Utils.scala
new file mode 100644
index 0000000..39f1294
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/utils/Utils.scala
@@ -0,0 +1,23 @@
+package xyz.driver.common.utils
+
+import java.time.LocalDateTime
+
+object Utils {
+
+ implicit val localDateTimeOrdering: Ordering[LocalDateTime] = Ordering.fromLessThan(_ isBefore _)
+
+ /**
+ * Hack to avoid scala compiler bug with getSimpleName
+ * @see https://issues.scala-lang.org/browse/SI-2034
+ */
+ def getClassSimpleName(klass: Class[_]): String = {
+ try {
+ klass.getSimpleName
+ } catch {
+ case _: InternalError =>
+ val fullName = klass.getName.stripSuffix("$")
+ val fullClassName = fullName.substring(fullName.lastIndexOf(".") + 1)
+ fullClassName.substring(fullClassName.lastIndexOf("$") + 1)
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/common/validation/ValidationError.scala b/src/main/scala/xyz/driver/common/validation/ValidationError.scala
new file mode 100644
index 0000000..6db445d
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/validation/ValidationError.scala
@@ -0,0 +1,3 @@
+package xyz.driver.common.validation
+
+final case class ValidationError(message: String)
diff --git a/src/main/scala/xyz/driver/common/validation/Validators.scala b/src/main/scala/xyz/driver/common/validation/Validators.scala
new file mode 100644
index 0000000..8d807f4
--- /dev/null
+++ b/src/main/scala/xyz/driver/common/validation/Validators.scala
@@ -0,0 +1,41 @@
+package xyz.driver.common.validation
+
+import xyz.driver.common.logging._
+import xyz.driver.common.utils.JsonSerializer
+
+import scala.util.control.NonFatal
+
+object Validators extends PhiLogging {
+
+ type Validator[Input, Refined] = Input => Either[ValidationError, Refined]
+
+ def generic[T, R](message: String)(f: PartialFunction[T, R]): Validator[T, R] = { value =>
+ if (f.isDefinedAt(value)) Right(f(value))
+ else Left(ValidationError(message))
+ }
+
+ def nonEmpty[T](field: String): Validator[Option[T], T] = generic(s"$field is empty") {
+ case Some(x) => x
+ }
+
+ def nonEmptyString(field: String): Validator[String, String] = generic(s"$field is empty") {
+ case x if x.nonEmpty => x
+ }
+
+ def deserializableTo[Refined](field: String)
+ (value: String)
+ (implicit m: Manifest[Refined]): Either[ValidationError, Refined] = {
+ try {
+ Right(JsonSerializer.deserialize[Refined](value))
+ } catch {
+ case NonFatal(e) =>
+ logger.error(phi"Can not deserialize the ${Unsafe(field)}: $e")
+ Left(ValidationError(s"$field is invalid"))
+ }
+ }
+
+ def success[T](result: T): Either[Nothing, T] = Right(result)
+
+ def fail(message: String): Either[ValidationError, Nothing] = Left(ValidationError(message))
+
+}
diff --git a/src/test/scala/xyz/driver/common/BaseSuite.scala b/src/test/scala/xyz/driver/common/BaseSuite.scala
new file mode 100644
index 0000000..b4c3d03
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/BaseSuite.scala
@@ -0,0 +1,51 @@
+package xyz.driver.common
+
+import java.time.{LocalDateTime, ZoneId}
+
+import org.scalatest.FreeSpecLike
+import org.scalatest.concurrent.ScalaFutures
+import org.scalatest.time.{Millis, Span}
+import xyz.driver.common.db.{MysqlQueryBuilder, SearchFilterExpr, SqlContext, Transactions}
+import xyz.driver.common.domain.{Email, LongId, PasswordHash, User}
+import xyz.driver.common.error.UnexpectedFilterException
+import xyz.driver.common.utils.DiffUtils
+
+import scala.concurrent.ExecutionContext.Implicits._
+import scala.concurrent.Future
+
+trait BaseSuite extends FreeSpecLike with DiffUtils with ScalaFutures {
+
+ implicit val defaultPatience = PatienceConfig(timeout = Span(1000, Millis), interval = Span(20, Millis))
+ implicit val sqlContext = new MockSqlContext(global)
+
+ def sampleUser(role: User.Role,
+ email: String = "test@example.com",
+ password: String = "123") = User(
+ id = LongId(2001),
+ email = Email(email),
+ name = "Test",
+ role = role,
+ passwordHash = PasswordHash(password),
+ latestActivity = Some(LocalDateTime.now(ZoneId.of("Z"))),
+ deleted = None
+ )
+
+ def createMockQueryBuilder[T](isExpectedFilter: SearchFilterExpr => Boolean): MysqlQueryBuilder[T] = {
+ MockQueryBuilder[T] {
+ case (filter, _, _) if isExpectedFilter(filter) =>
+ Future.successful(Seq.empty)
+ case (filter, _, _) =>
+ Future.failed(new UnexpectedFilterException(s"Filter is unexpected: $filter"))
+ } {
+ case _ =>
+ Future.successful((0, Option.empty[LocalDateTime]))
+ }
+ }
+
+ def transactions = new Transactions {
+ override def run[T](f: (SqlContext) => T): Future[T] = {
+ Future(f(sqlContext))
+ }
+ }
+
+}
diff --git a/src/test/scala/xyz/driver/common/Mocks.scala b/src/test/scala/xyz/driver/common/Mocks.scala
new file mode 100644
index 0000000..480ab48
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/Mocks.scala
@@ -0,0 +1,89 @@
+package xyz.driver.common
+
+import java.io.{Closeable, PrintWriter}
+import java.net.URL
+import java.sql.Connection
+import java.util.logging.Logger
+import javax.sql.DataSource
+
+import com.typesafe.config.ConfigFactory
+import xyz.driver.common.db._
+import xyz.driver.common.http.HttpFetcher
+
+import scala.concurrent.{ExecutionContext, Future}
+
+class MockDataSource extends DataSource with Closeable {
+ override def getConnection: Connection = throw new NotImplementedError("MockDataSource.getConnection")
+ override def getConnection(username: String, password: String): Connection = {
+ throw new NotImplementedError(s"MockDataSource.getConnection($username, $password)")
+ }
+ override def close(): Unit = throw new NotImplementedError("MockDataSource.close")
+ override def setLogWriter(out: PrintWriter): Unit = throw new NotImplementedError("MockDataSource.setLogWriter")
+ override def getLoginTimeout: Int = throw new NotImplementedError("MockDataSource.getLoginTimeout")
+ override def setLoginTimeout(seconds: Int): Unit = throw new NotImplementedError("MockDataSource.setLoginTimeout")
+ override def getParentLogger: Logger = throw new NotImplementedError("MockDataSource.getParentLogger")
+ override def getLogWriter: PrintWriter = throw new NotImplementedError("MockDataSource.getLogWriter")
+ override def unwrap[T](iface: Class[T]): T = throw new NotImplementedError("MockDataSource.unwrap")
+ override def isWrapperFor(iface: Class[_]): Boolean = throw new NotImplementedError("MockDataSource.isWrapperFor")
+}
+
+object MockSqlContext {
+
+ val Settings = SqlContext.Settings(
+ credentials = SqlContext.DbCredentials(
+ user = "test",
+ password = "test",
+ host = "localhost",
+ port = 3248,
+ dbName = "test",
+ dbCreateFlag = false,
+ dbContext = "test",
+ connectionParams = "",
+ url = ""
+ ),
+ connection = ConfigFactory.empty(),
+ connectionAttemptsOnStartup = 1,
+ threadPoolSize = 10
+ )
+
+}
+
+class MockSqlContext(ec: ExecutionContext) extends SqlContext(new MockDataSource, MockSqlContext.Settings) {
+ override implicit val executionContext = ec
+ override protected def withConnection[T](f: Connection => T) = {
+ throw new NotImplementedError("MockSqlContext.withConnection")
+ }
+}
+
+class MockFactory()(implicit val sqlContext: SqlContext) {
+ val MockHttpFetcher: HttpFetcher = (url: URL) => {
+ Future.successful(Array.empty[Byte])
+ }
+}
+
+object MockQueryBuilder {
+
+ type MockRunnerIn = (SearchFilterExpr, Sorting, Option[Pagination])
+ type MockRunnerOut[T] = Future[Seq[T]]
+ type MockCountRunnerOut = Future[QueryBuilder.CountResult]
+
+ def apply[T](matcher: PartialFunction[MockRunnerIn, MockRunnerOut[T]])
+ (countMatcher: PartialFunction[MockRunnerIn, MockCountRunnerOut])
+ (implicit context: SqlContext): MysqlQueryBuilder[T] = {
+ def runner(parameters: QueryBuilderParameters): MockRunnerOut[T] = {
+ matcher((parameters.filter, parameters.sorting, parameters.pagination))
+ }
+ def countRunner(parameters: QueryBuilderParameters): MockCountRunnerOut = {
+ countMatcher((parameters.filter, parameters.sorting, parameters.pagination))
+ }
+ MysqlQueryBuilder[T](
+ tableName = "",
+ lastUpdateFieldName = Option.empty[String],
+ nullableFields = Set.empty[String],
+ links = Set.empty[TableLink],
+ runner = runner _,
+ countRunner = countRunner _
+ )(context.executionContext)
+ }
+}
+
diff --git a/src/test/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala b/src/test/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala
new file mode 100644
index 0000000..e81d0b3
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala
@@ -0,0 +1,221 @@
+package xyz.driver.common.concurrent
+
+import java.util.concurrent.ThreadLocalRandom
+
+import xyz.driver.common.BaseSuite
+import xyz.driver.common.concurrent.BridgeUploadQueue.Item
+import xyz.driver.common.concurrent.BridgeUploadQueueRepositoryAdapter.Strategy
+import xyz.driver.common.concurrent.BridgeUploadQueueRepositoryAdapter.Strategy.{OnAttempt, OnComplete}
+import xyz.driver.common.db.repositories.BridgeUploadQueueRepository
+import xyz.driver.common.domain.LongId
+
+import scala.concurrent.Future
+import scala.concurrent.duration.DurationInt
+
+class BridgeUploadQueueRepositoryAdapterSuite extends BaseSuite {
+
+ // IDEA have some issue here with imports
+ private implicit val executionContext = scala.concurrent.ExecutionContext.global
+
+ "Strategy" - {
+ "LimitExponential" - {
+ "calculateNextInterval" - {
+ val strategy = Strategy.LimitExponential(
+ startInterval = 10.seconds,
+ intervalFactor = 1.4,
+ maxInterval = 50.seconds,
+ onComplete = OnComplete.Delete
+ )
+
+ "a new interval should be greater than the previous one if the limit not reached" in {
+ val previous = strategy.on(1)
+ val current = strategy.on(2)
+
+ (previous, current) match {
+ case (OnAttempt.Continue(a), OnAttempt.Continue(b)) => assert(a < b)
+ case x => fail(s"Unexpected result: $x")
+ }
+ }
+
+ "should limit intervals" in {
+ assert(strategy.on(20) == OnAttempt.Continue(strategy.maxInterval))
+ }
+ }
+ }
+ }
+
+ "tryRetry" - {
+
+ "when all attempts are not out" - {
+
+ val defaultStrategy = Strategy.Constant(10.seconds)
+
+ "should return an updated item" in {
+ val repository = new BridgeUploadQueueRepository {
+ override def update(draft: EntityT): EntityT = draft
+ override def delete(id: IdT): Unit = {}
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ val item = defaultItem
+ val r = adapter.tryRetry(item).futureValue
+ assert(r.isDefined)
+ assert(!r.contains(item))
+ }
+
+ "should add an item with increased attempts" in {
+ val item = defaultItem
+
+ val repository = new BridgeUploadQueueRepository {
+ override def update(draft: EntityT): EntityT = {
+ assert(draft.attempts === (item.attempts + 1), "repository.add")
+ draft
+ }
+ override def delete(id: IdT): Unit = {}
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ adapter.tryRetry(item).isReadyWithin(100.millis)
+ }
+
+ "should remove an old item" in {
+ val item = defaultItem
+
+ val repository = new BridgeUploadQueueRepository {
+ override def update(draft: EntityT): EntityT = draft
+ override def delete(id: IdT): Unit = {
+ assert(id == item.id, "repository.delete")
+ }
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ adapter.tryRetry(item).isReadyWithin(100.millis)
+ }
+
+ "should update time of the next attempt" in {
+ val item = defaultItem
+
+ val repository = new BridgeUploadQueueRepository {
+ override def update(draft: EntityT): EntityT = {
+ assert(draft.nextAttempt.isAfter(item.nextAttempt), "repository.add")
+ draft
+ }
+ override def delete(id: IdT): Unit = {}
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ adapter.tryRetry(item).isReadyWithin(100.millis)
+ }
+
+ }
+
+ "when all attempts are out" - {
+
+ val defaultStrategy = Strategy.Ignore
+
+ "should not return an item" in {
+ val repository = new BridgeUploadQueueRepository {
+ override def delete(id: IdT): Unit = {}
+ override def update(entity: EntityT): EntityT = fail("update should not be used!")
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ val r = adapter.tryRetry(defaultItem).futureValue
+ assert(r.isEmpty)
+ }
+
+ "should not add any item to the queue" in {
+ val repository = new BridgeUploadQueueRepository {
+ override def update(draft: EntityT): EntityT = throw new IllegalAccessException("add should not be called")
+ override def delete(id: IdT): Unit = {}
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ adapter.tryRetry(defaultItem).isReadyWithin(100.millis)
+ }
+
+ "should remove the item from the queue" in {
+ val repository = new BridgeUploadQueueRepository {
+ override def delete(id: IdT): Unit = {
+ assert(id == defaultItem.id, "repository.delete")
+ }
+ override def update(entity: EntityT): EntityT = fail("update should not be used!")
+ override def add(draft: EntityT): EntityT = fail("add should not be used!")
+ override def getById(id: LongId[EntityT]): Option[EntityT] = fail("getById should not be used!")
+ override def isCompleted(kind: String, tag: String): Future[Boolean] = fail("isCompleted should not be used!")
+ override def getOne(kind: String): Future[Option[Item]] = fail("getOne should not be used!")
+ }
+
+ val adapter = new BridgeUploadQueueRepositoryAdapter(
+ strategy = defaultStrategy,
+ repository = repository,
+ transactions = transactions
+ )
+
+ adapter.tryRetry(defaultItem).isReadyWithin(100.millis)
+ }
+
+ }
+
+ }
+
+ private def defaultItem = BridgeUploadQueue.Item(
+ "test",
+ ThreadLocalRandom.current().nextInt().toString
+ )
+
+}
diff --git a/src/test/scala/xyz/driver/common/db/QueryBuilderParametersSuite.scala b/src/test/scala/xyz/driver/common/db/QueryBuilderParametersSuite.scala
new file mode 100644
index 0000000..e49ccd9
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/db/QueryBuilderParametersSuite.scala
@@ -0,0 +1,249 @@
+package xyz.driver.common.db
+
+import java.time.LocalDateTime
+
+import io.getquill.MysqlEscape
+import org.scalatest.FreeSpecLike
+import xyz.driver.common.db.QueryBuilder.TableData
+import xyz.driver.common.domain.{Email, LongId, User}
+
+class QueryBuilderParametersSuite extends FreeSpecLike {
+
+ import SearchFilterBinaryOperation._
+ import SearchFilterExpr.{Dimension => _, _}
+ import SearchFilterNAryOperation._
+ import Sorting._
+ import SortingOrder._
+
+ val tableName = "Entity"
+
+ case class Entity(id: LongId[Entity],
+ name: String,
+ email: Email,
+ optionUser: Option[LongId[User]],
+ date: LocalDateTime,
+ optionDate: Option[LocalDateTime],
+ kindId: Long)
+
+ def queryBuilderParameters = MysqlQueryBuilderParameters(
+ tableData = TableData(
+ tableName = tableName,
+ nullableFields = Set("optionUser", "optionDate")
+ ),
+ links = Map(
+ "Kind" -> TableLink("kindId", "Kind", "id"),
+ "User" -> TableLink("optionUser", "User", "id")
+ )
+ )
+
+ val queryBasis =
+ s"""select `$tableName`.*
+ |from `$tableName`""".stripMargin.trim
+
+ "toSql" - {
+ "should generate correct SQL query" - {
+ "with default parameters" in {
+ val (sql, _) = queryBuilderParameters.toSql(namingStrategy = MysqlEscape)
+ assert(sql == queryBasis)
+ }
+
+ "with filtering: " - {
+ "single atom filter" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Atom.Binary("name", Eq, "x")).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |where `$tableName`.`name` = ?""".stripMargin)
+ }
+
+ "single atom filter for optional field with NotEq operation" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Atom.Binary("optionUser", NotEq, "x")).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |where (`$tableName`.`optionUser` is null or `$tableName`.`optionUser` != ?)""".stripMargin)
+ }
+
+ "single atom filter for field with IN operation" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Atom.NAry("date", In, Seq("x", "x", "x"))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |where `$tableName`.`date` in (?, ?, ?)""".stripMargin)
+ }
+
+ "multiple intersected filters" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Intersection(Seq(
+ Atom.Binary("name", Gt, "x"),
+ Atom.Binary("optionDate", GtEq, "x")
+ ))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |where (`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?)""".stripMargin)
+ }
+
+ "multiple intersected nested filters" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Intersection(Seq(
+ Atom.Binary("name", Gt, "x"),
+ Atom.Binary("optionDate", GtEq, "x"),
+ Intersection(Seq(
+ Atom.Binary("optionUser", Eq, "x"),
+ Atom.Binary("date", LtEq, "x")
+ ))
+ ))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"$queryBasis\nwhere (`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?" +
+ s" and (`$tableName`.`optionUser` = ? and `$tableName`.`date` <= ?))")
+ }
+
+ "multiple unionized filters" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Union(Seq(
+ Atom.Binary("name", Gt, "x"),
+ Atom.Binary("optionDate", GtEq, "x")
+ ))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |where (`$tableName`.`name` > ? or `$tableName`.`optionDate` >= ?)""".stripMargin.trim)
+ }
+
+ "multiple unionized nested filters" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Union(Seq(
+ Atom.Binary("name", Gt, "x"),
+ Atom.Binary("optionDate", GtEq, "x"),
+ Union(Seq(
+ Atom.Binary("optionUser", Eq, "x"),
+ Atom.Binary("date", LtEq, "x")
+ ))
+ ))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |where (`$tableName`.`name` > ? or `$tableName`.`optionDate` >= ? or (`$tableName`.`optionUser` = ? or `$tableName`.`date` <= ?))""".stripMargin)
+ }
+
+ "multiple unionized and intersected nested filters" in {
+ val (sql, _) = queryBuilderParameters.copy(filter = Union(Seq(
+ Intersection(Seq(
+ Atom.Binary("name", Gt, "x"),
+ Atom.Binary("optionDate", GtEq, "x")
+ )),
+ Intersection(Seq(
+ Atom.Binary("optionUser", Eq, "x"),
+ Atom.Binary("date", LtEq, "x")
+ ))
+ ))).toSql(namingStrategy = MysqlEscape)
+
+ assert(sql ==
+ s"$queryBasis\nwhere ((`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?) " +
+ s"or (`$tableName`.`optionUser` = ? and `$tableName`.`date` <= ?))")
+ }
+
+ "single field from foreign table" in {
+ val (sql, _) = queryBuilderParameters
+ .copy(filter = Atom.Binary(SearchFilterExpr.Dimension(Some("Kind"), "name"), Eq, "x"))
+ .toSql(namingStrategy = MysqlEscape)
+ val pattern =
+ s"""select `$tableName`.*
+ |from `$tableName`
+ |inner join `Kind` on `Entity`.`kindId` = `Kind`.`id`
+ |where `Kind`.`name` = ?""".stripMargin
+ assert(sql == pattern)
+ }
+ }
+
+ "with sorting:" - {
+ "single field sorting" in {
+ val (sql, _) = queryBuilderParameters.copy(sorting = Dimension(None, "name", Ascending)).toSql(namingStrategy = MysqlEscape)
+
+ assert(sql ==
+ s"""$queryBasis
+ |order by `$tableName`.`name` asc""".stripMargin)
+ }
+
+ "single foreign sorting field" in {
+ val (sql, _) = queryBuilderParameters.copy(sorting = Dimension(Some("Kind"), "name", Ascending)).toSql(namingStrategy = MysqlEscape)
+
+ assert(sql ==
+ s"""select `$tableName`.*
+ |from `$tableName`
+ |inner join `Kind` on `Entity`.`kindId` = `Kind`.`id`
+ |order by `Kind`.`name` asc""".stripMargin)
+ }
+
+ "multiple fields sorting" in {
+ val (sql, _) = queryBuilderParameters.copy(sorting = Sequential(Seq(
+ Dimension(None, "name", Ascending),
+ Dimension(None, "date", Descending)
+ ))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |order by `$tableName`.`name` asc, `$tableName`.`date` desc""".stripMargin)
+ }
+
+ "multiple foreign sorting field" in {
+ val (sql, _) = queryBuilderParameters.copy(sorting = Sequential(Seq(
+ Dimension(Some("Kind"), "name", Ascending),
+ Dimension(Some("User"), "name", Descending)
+ ))).toSql(namingStrategy = MysqlEscape)
+
+ assert(sql ==
+ s"""select `$tableName`.*
+ |from `$tableName`
+ |inner join `Kind` on `$tableName`.`kindId` = `Kind`.`id`
+ |inner join `User` on `$tableName`.`optionUser` = `User`.`id`
+ |order by `Kind`.`name` asc, `User`.`name` desc""".stripMargin)
+ }
+
+ "multiple field sorting (including foreign tables)" in {
+ val (sql, _) = queryBuilderParameters.copy(sorting = Sequential(Seq(
+ Dimension(Some("Kind"), "name", Ascending),
+ Dimension(None, "date", Descending)
+ ))).toSql(namingStrategy = MysqlEscape)
+
+ assert(sql ==
+ s"""select `$tableName`.*
+ |from `$tableName`
+ |inner join `Kind` on `$tableName`.`kindId` = `Kind`.`id`
+ |order by `Kind`.`name` asc, `$tableName`.`date` desc""".stripMargin)
+ }
+ }
+
+ "with pagination" in {
+ val (sql, _) = queryBuilderParameters.copy(pagination = Some(Pagination(5, 3))).toSql(namingStrategy = MysqlEscape)
+ assert(sql ==
+ s"""$queryBasis
+ |limit 10, 5""".stripMargin)
+ }
+
+ "combined" in {
+ val filter = Union(Seq(
+ Intersection(Seq(
+ Atom.Binary("name", Gt, "x"),
+ Atom.Binary("optionDate", GtEq, "x")
+ )),
+ Intersection(Seq(
+ Atom.Binary("optionUser", Eq, "x"),
+ Atom.Binary("date", LtEq, "x")
+ ))
+ ))
+ val sorting = Sequential(Seq(
+ Dimension(Some("Kind"), "name", Ascending),
+ Dimension(None, "name", Ascending),
+ Dimension(None, "date", Descending)
+ ))
+
+ val (sql, _) = queryBuilderParameters.copy(
+ filter = filter,
+ sorting = sorting,
+ pagination = Some(Pagination(5, 3))
+ ).toSql(namingStrategy = MysqlEscape)
+
+ assert(sql ==
+ s"""select `$tableName`.*
+ |from `$tableName`
+ |inner join `Kind` on `$tableName`.`kindId` = `Kind`.`id`
+ |where ((`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?) or (`$tableName`.`optionUser` = ? and `$tableName`.`date` <= ?))
+ |order by `Kind`.`name` asc, `$tableName`.`name` asc, `$tableName`.`date` desc
+ |limit 10, 5""".stripMargin)
+ }
+
+ }
+ }
+
+}
diff --git a/src/test/scala/xyz/driver/common/db/SearchFilterExprSuite.scala b/src/test/scala/xyz/driver/common/db/SearchFilterExprSuite.scala
new file mode 100644
index 0000000..3073b61
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/db/SearchFilterExprSuite.scala
@@ -0,0 +1,32 @@
+package xyz.driver.common.db
+
+import org.scalatest.{FreeSpecLike, MustMatchers}
+
+class SearchFilterExprSuite extends FreeSpecLike with MustMatchers {
+
+ "replace" - {
+ "all entities are changed" in {
+ val ast = SearchFilterExpr.Union(Seq(
+ SearchFilterExpr.Intersection(Seq(
+ SearchFilterExpr.Atom.Binary("foo", SearchFilterBinaryOperation.Gt, "10"),
+ SearchFilterExpr.Atom.Binary("foo", SearchFilterBinaryOperation.Lt, "20")
+ )),
+ SearchFilterExpr.Atom.NAry("bar", SearchFilterNAryOperation.In, Seq("x", "y", "z")),
+ SearchFilterExpr.Atom.Binary("foo", SearchFilterBinaryOperation.Eq, "40")
+ ))
+
+ val newAst = ast.replace {
+ case x: SearchFilterExpr.Atom.Binary if x.dimension.name == "foo" =>
+ x.copy(dimension = x.dimension.copy(name = "bar"))
+ }
+
+ val result = newAst.find {
+ case x: SearchFilterExpr.Atom.Binary => x.dimension.name == "foo"
+ case _ => false
+ }
+
+ result mustBe empty
+ }
+ }
+
+}
diff --git a/src/test/scala/xyz/driver/common/error/UnexpectedFilterException.scala b/src/test/scala/xyz/driver/common/error/UnexpectedFilterException.scala
new file mode 100644
index 0000000..0562b8e
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/error/UnexpectedFilterException.scala
@@ -0,0 +1,3 @@
+package xyz.driver.common.error
+
+class UnexpectedFilterException(message: String) extends RuntimeException(message)
diff --git a/src/test/scala/xyz/driver/common/logging/PhiStringContextSuite.scala b/src/test/scala/xyz/driver/common/logging/PhiStringContextSuite.scala
new file mode 100644
index 0000000..de60cc9
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/logging/PhiStringContextSuite.scala
@@ -0,0 +1,32 @@
+package xyz.driver.common.logging
+
+import org.scalatest.FreeSpecLike
+
+class PhiStringContextSuite extends FreeSpecLike {
+
+ class Foo(x: Int, y: String) {
+ val z: Boolean = true
+ }
+
+ case class Bar(y: Boolean)
+
+ implicit def fooToPhiString(foo: Foo): PhiString = new PhiString(s"Foo(z=${foo.z})")
+
+ "should not compile if there is no PhiString implicit" in assertDoesNotCompile(
+ """val bar = Bar(true)
+ |phi"bar is $bar"""".stripMargin
+ )
+
+ "should compile if there is a PhiString implicit" in assertCompiles(
+ """val foo = new Foo(1, "test")
+ |println(phi"foo is $foo}")""".stripMargin
+ )
+
+ "should not contain private info" in {
+ val foo = new Foo(42, "test")
+ val result = phi"foo is $foo".text
+ assert(!result.contains("test"))
+ assert(!result.contains("42"))
+ }
+
+}
diff --git a/src/test/scala/xyz/driver/common/pdf/MockPdfRenderer.scala b/src/test/scala/xyz/driver/common/pdf/MockPdfRenderer.scala
new file mode 100644
index 0000000..c22817f
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/pdf/MockPdfRenderer.scala
@@ -0,0 +1,25 @@
+package xyz.driver.common.pdf
+
+import java.nio.file.{Path, Paths}
+
+import xyz.driver.common.logging._
+
+object MockPdfRenderer extends PdfRenderer with PhiLogging {
+
+ private lazy val defaultDocument: Path = {
+ val uri = getClass.getResource("/pdf/example.pdf").toURI
+ Paths.get(uri)
+ }
+
+ override def render(html: String, documentName: String, force: Boolean = false): Path = {
+ logger.trace(phi"render(html, documentName=${Unsafe(documentName)})")
+ defaultDocument
+ }
+
+ override def delete(documentName: String): Unit = {
+ logger.trace(phi"delete(${Unsafe(documentName)})")
+ }
+
+ override def getPath(documentName: String): Path = defaultDocument
+
+}
diff --git a/src/test/scala/xyz/driver/common/utils/DiffUtils.scala b/src/test/scala/xyz/driver/common/utils/DiffUtils.scala
new file mode 100644
index 0000000..06199bb
--- /dev/null
+++ b/src/test/scala/xyz/driver/common/utils/DiffUtils.scala
@@ -0,0 +1,52 @@
+package xyz.driver.common.utils
+
+import java.net.URI
+import java.time.{LocalDate, LocalDateTime}
+
+import ai.x.diff._
+import org.scalatest.Assertions
+import xyz.driver.common.domain.PasswordHash
+
+import scala.io.AnsiColor
+
+trait DiffUtils {
+
+ this: Assertions =>
+
+ def assertIdentical[T: DiffShow](left: T, right: T): Unit = {
+ val diff = DiffShow.diff(left, right)
+ assert(diff.isIdentical, s"\n${AnsiColor.RESET}$diff") // reset red color
+ }
+
+ implicit def localTimeDiffShow: DiffShow[LocalDateTime] = new DiffShow[LocalDateTime] {
+ def show(x: LocalDateTime): String = s"LocalTime($x)"
+ def diff(left: LocalDateTime, right: LocalDateTime): Comparison = {
+ if (left.isEqual(right)) Identical(show(left))
+ else Different(showChange(left, right))
+ }
+ }
+
+ implicit def localDateDiffShow: DiffShow[LocalDate] = new DiffShow[LocalDate] {
+ def show(x: LocalDate): String = s"LocalDate($x)"
+ def diff(left: LocalDate, right: LocalDate): Comparison = {
+ if (left.isEqual(right)) Identical(show(left))
+ else Different(showChange(left, right))
+ }
+ }
+
+ implicit def urlDiffShow: DiffShow[URI] = new DiffShow[URI] {
+ def show(x: URI): String = s"URI($x)"
+ def diff(left: URI, right: URI): Comparison = {
+ if (left.equals(right)) Identical(show(left))
+ else Different(showChange(left, right))
+ }
+ }
+
+ implicit def passwordHashDiffShow: DiffShow[PasswordHash] = new DiffShow[PasswordHash] {
+ def show(x: PasswordHash): String = s"PasswordHash($x)"
+ def diff(left: PasswordHash, right: PasswordHash): Comparison = {
+ if (left.equals(right)) Identical(show(left))
+ else Different(showChange(left, right))
+ }
+ }
+}