aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorKseniya Tomskikh <ktomskih@datamonsters.co>2017-10-23 15:34:13 +0700
committerKseniya Tomskikh <ktomskih@datamonsters.co>2017-10-23 17:15:39 +0700
commite3b8e6ce8f659bff81b9a4bc3a79e3d2c3dd734f (patch)
tree430986fa7935b1bd03659f4542ce15803b617e51 /src
parent1d2131e98330284ce5a3dcfc0cfd13e012914d6a (diff)
downloadrest-query-e3b8e6ce8f659bff81b9a4bc3a79e3d2c3dd734f.tar.gz
rest-query-e3b8e6ce8f659bff81b9a4bc3a79e3d2c3dd734f.tar.bz2
rest-query-e3b8e6ce8f659bff81b9a4bc3a79e3d2c3dd734f.zip
Removed quill query builder
Diffstat (limited to 'src')
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapter.scala140
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/DbCommand.scala15
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/DbCommandFactory.scala14
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/DbIo.scala13
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/EntityExtractorDerivation.scala66
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/FakeDbIo.scala9
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/JdbcDbIo.scala28
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala90
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/MysqlQueryBuilder.scala88
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala27
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala108
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/QueryBuilder.scala340
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/SlickQueryBuilder.scala2
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/repositories/BridgeUploadQueueRepository.scala21
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/repositories/Repository.scala4
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/repositories/RepositoryLogging.scala62
-rw-r--r--src/test/scala/xyz/driver/pdsuicommon/BaseSuite.scala12
-rw-r--r--src/test/scala/xyz/driver/pdsuicommon/Mocks.scala51
-rw-r--r--src/test/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala285
-rw-r--r--src/test/scala/xyz/driver/pdsuicommon/db/QueryBuilderParametersSuite.scala310
20 files changed, 14 insertions, 1671 deletions
diff --git a/src/main/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapter.scala b/src/main/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapter.scala
deleted file mode 100644
index 3bf9192..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapter.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-package xyz.driver.pdsuicommon.concurrent
-
-import java.time.LocalDateTime
-import java.time.temporal.ChronoUnit
-
-import xyz.driver.pdsuicommon.concurrent.BridgeUploadQueue.Item
-import xyz.driver.pdsuicommon.concurrent.BridgeUploadQueueRepositoryAdapter.Strategy
-import xyz.driver.pdsuicommon.db._
-import xyz.driver.pdsuicommon.db.repositories.BridgeUploadQueueRepository
-import xyz.driver.pdsuicommon.logging._
-
-import scala.concurrent.duration.{Duration, FiniteDuration}
-import scala.concurrent.{ExecutionContext, Future}
-import scala.util.Try
-
-object BridgeUploadQueueRepositoryAdapter {
-
- /**
- * Defines how we work with queue, when an user attempts to remove/tryRetry an item.
- */
- sealed trait Strategy {
-
- def onComplete: Strategy.OnComplete
-
- def on(attempt: Int): Strategy.OnAttempt
-
- }
-
- object Strategy {
-
- /**
- * Works forever, but has a limit for intervals.
- */
- final case class LimitExponential(startInterval: FiniteDuration,
- intervalFactor: Double,
- maxInterval: FiniteDuration,
- onComplete: OnComplete)
- extends Strategy {
-
- override def on(attempt: Int): OnAttempt = {
- OnAttempt.Continue(intervalFor(attempt).min(maxInterval))
- }
-
- private def intervalFor(attempt: Int): Duration = {
- Try(startInterval * Math.pow(intervalFactor, attempt.toDouble))
- .getOrElse(maxInterval)
- }
- }
-
- /**
- * Used only in tests.
- */
- final case class Stop(onComplete: OnComplete = OnComplete.Delete) extends Strategy {
-
- override def on(attempt: Int) = OnAttempt.Complete
-
- }
-
- /**
- * Used only in tests.
- */
- final case class Constant(interval: FiniteDuration) extends Strategy {
-
- override val onComplete = OnComplete.Delete
-
- override def on(attempt: Int) = OnAttempt.Continue(interval)
-
- }
-
- sealed trait OnComplete
- object OnComplete {
- case object Delete extends OnComplete
- case object Mark extends OnComplete
-
- implicit def toPhiString(x: OnAttempt): PhiString = Unsafe(x.toString)
- }
-
- sealed trait OnAttempt
- object OnAttempt {
- case object Complete extends OnAttempt
- final case class Continue(interval: Duration) extends OnAttempt
-
- implicit def toPhiString(x: OnAttempt): PhiString = Unsafe(x.toString)
- }
- }
-}
-
-class BridgeUploadQueueRepositoryAdapter(strategy: Strategy, repository: BridgeUploadQueueRepository, dbIo: DbIo)(
- implicit executionContext: ExecutionContext)
- extends BridgeUploadQueue with PhiLogging {
-
- override def add(item: Item): Future[Item] = dbIo.runAsync(repository.add(item))
-
- override def get(kind: String): Future[Option[Item]] = dbIo.runAsync(repository.getOne(kind))
-
- override def complete(kind: String, tag: String): Future[Unit] = {
- import Strategy.OnComplete._
-
- strategy.onComplete match {
- case Delete => dbIo.runAsync(repository.delete(kind, tag))
- case Mark =>
- dbIo.runAsyncTx {
- repository.getById(kind, tag) match {
- case Some(x) => repository.update(x.copy(completed = true))
- case None => throw new RuntimeException(s"Can not find the task: kind=$kind, tag=$tag")
- }
- }
- }
- }
-
- /**
- * Tries to continue the task or complete it
- */
- override def tryRetry(item: Item): Future[Option[Item]] = {
- import Strategy.OnAttempt._
-
- logger.trace(phi"tryRetry($item)")
-
- val newAttempts = item.attempts + 1
- val action = strategy.on(newAttempts)
- logger.debug(phi"Action for ${Unsafe(newAttempts)}: $action")
-
- action match {
- case Continue(newInterval) =>
- val draftItem = item.copy(
- attempts = newAttempts,
- nextAttempt = LocalDateTime.now().plus(newInterval.toMillis, ChronoUnit.MILLIS)
- )
-
- logger.debug(draftItem)
- dbIo.runAsync {
- Some(repository.update(draftItem))
- }
-
- case Complete =>
- logger.warn(phi"All attempts are out for $item, complete the task")
- complete(item.kind, item.tag).map(_ => None)
- }
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/DbCommand.scala b/src/main/scala/xyz/driver/pdsuicommon/db/DbCommand.scala
deleted file mode 100644
index 0af104e..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/DbCommand.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import scala.concurrent.Future
-
-trait DbCommand {
- def runSync(): Unit
- def runAsync(transactions: DbIo): Future[Unit]
-}
-
-object DbCommand {
- val Empty: DbCommand = new DbCommand {
- override def runSync(): Unit = {}
- override def runAsync(transactions: DbIo): Future[Unit] = Future.successful(())
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/DbCommandFactory.scala b/src/main/scala/xyz/driver/pdsuicommon/db/DbCommandFactory.scala
deleted file mode 100644
index 1e2fb74..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/DbCommandFactory.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import scala.concurrent.{ExecutionContext, Future}
-
-trait DbCommandFactory[T] {
- def createCommand(orig: T)(implicit ec: ExecutionContext): Future[DbCommand]
-}
-
-object DbCommandFactory {
- def empty[T]: DbCommandFactory[T] = new DbCommandFactory[T] {
- override def createCommand(orig: T)(implicit ec: ExecutionContext): Future[DbCommand] =
- Future.successful(DbCommand.Empty)
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/DbIo.scala b/src/main/scala/xyz/driver/pdsuicommon/db/DbIo.scala
deleted file mode 100644
index 7c290d1..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/DbIo.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import scala.concurrent.Future
-
-/**
- * Where queries should run
- */
-trait DbIo {
- def runAsync[T](f: => T): Future[T]
- def runSync[T](f: => T): T = f
- def runAsyncTx[T](f: => T): Future[T]
- def runSyncTx[T](f: => T): Unit
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/EntityExtractorDerivation.scala b/src/main/scala/xyz/driver/pdsuicommon/db/EntityExtractorDerivation.scala
deleted file mode 100644
index 02ba322..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/EntityExtractorDerivation.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import java.sql.ResultSet
-
-import io.getquill.NamingStrategy
-import io.getquill.dsl.EncodingDsl
-
-import scala.language.experimental.macros
-import scala.reflect.macros.blackbox
-
-trait EntityExtractorDerivation[Naming <: NamingStrategy] { this: EncodingDsl =>
-
- /*
- Simple Quill extractor derivation for [[T]]
- Only case classes available. Type parameters is not supported
- */
- def entityExtractor[T]: (ResultSet => T) = macro EntityExtractorDerivation.impl[T]
-}
-
-object EntityExtractorDerivation {
- def impl[T: c.WeakTypeTag](c: blackbox.Context): c.Tree = {
- import c.universe._
- val namingStrategy = c.prefix.actualType
- .baseType(c.weakTypeOf[EntityExtractorDerivation[NamingStrategy]].typeSymbol)
- .typeArgs
- .head
- .typeSymbol
- .companion
- val functionBody = {
- val tpe = weakTypeOf[T]
- val resultOpt = tpe.decls.collectFirst {
- // Find first constructor of T
- case cons: MethodSymbol if cons.isConstructor =>
- // Create param list for constructor
- val params = cons.paramLists.flatten.map { param =>
- val t = param.typeSignature
- val paramName = param.name.toString
- val col = q"$namingStrategy.column($paramName)"
- // Resolve implicit decoders (from SqlContext) and apply ResultSet for each
- val d = q"implicitly[${c.prefix}.Decoder[$t]]"
- // Minus 1 cause Quill JDBC decoders make plus one.
- // ¯\_(ツ)_/¯
- val i = q"row.findColumn($col) - 1"
- val decoderName = TermName(paramName + "Decoder")
- val valueName = TermName(paramName + "Value")
- (
- q"val $decoderName = $d",
- q"val $valueName = $decoderName($i, row)",
- valueName
- )
- }
- // Call constructor with param list
- q"""
- ..${params.map(_._1)}
- ..${params.map(_._2)}
- new $tpe(..${params.map(_._3)})
- """
- }
- resultOpt match {
- case Some(result) => result
- case None => c.abort(c.enclosingPosition, s"Can not derive extractor for $tpe. Constructor not found.")
- }
- }
- q"(row: java.sql.ResultSet) => $functionBody"
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/FakeDbIo.scala b/src/main/scala/xyz/driver/pdsuicommon/db/FakeDbIo.scala
deleted file mode 100644
index ac42a34..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/FakeDbIo.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import scala.concurrent.Future
-
-object FakeDbIo extends DbIo {
- override def runAsync[T](f: => T): Future[T] = Future.successful(f)
- override def runAsyncTx[T](f: => T): Future[T] = Future.successful(f)
- override def runSyncTx[T](f: => T): Unit = f
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/JdbcDbIo.scala b/src/main/scala/xyz/driver/pdsuicommon/db/JdbcDbIo.scala
deleted file mode 100644
index 44f177c..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/JdbcDbIo.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import xyz.driver.pdsuicommon.logging._
-
-import scala.concurrent.Future
-import scala.util.{Failure, Success, Try}
-
-class JdbcDbIo(sqlContext: TransactionalContext) extends DbIo with PhiLogging {
-
- override def runAsync[T](f: => T): Future[T] = {
- Future(f)(sqlContext.executionContext)
- }
-
- override def runAsyncTx[T](f: => T): Future[T] = {
- import sqlContext.executionContext
-
- Future(sqlContext.transaction(f)).andThen {
- case Failure(e) => logger.error(phi"Can't run a transaction: $e")
- }
- }
-
- override def runSyncTx[T](f: => T): Unit = {
- Try(sqlContext.transaction(f)) match {
- case Success(_) =>
- case Failure(e) => logger.error(phi"Can't run a transaction: $e")
- }
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala b/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala
deleted file mode 100644
index 9d2664d..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import java.io.Closeable
-import java.time._
-import java.util.concurrent.Executors
-import javax.sql.DataSource
-
-import com.typesafe.config.Config
-import io.getquill._
-import xyz.driver.pdsuicommon.concurrent.MdcExecutionContext
-import xyz.driver.pdsuicommon.db.MySqlContext.Settings
-import xyz.driver.pdsuicommon.error.IncorrectIdException
-import xyz.driver.pdsuicommon.logging._
-
-import scala.concurrent.ExecutionContext
-import scala.util.control.NonFatal
-import scala.util.{Failure, Success, Try}
-
-object MySqlContext extends PhiLogging {
-
- final case class DbCredentials(user: String,
- password: String,
- host: String,
- port: Int,
- dbName: String,
- dbCreateFlag: Boolean,
- dbContext: String,
- connectionParams: String,
- url: String)
-
- final case class Settings(credentials: DbCredentials, connection: Config, threadPoolSize: Int)
-
- def apply(settings: Settings): MySqlContext = {
- // Prevent leaking credentials to a log
- Try(JdbcContextConfig(settings.connection).dataSource) match {
- case Success(dataSource) => new MySqlContext(dataSource, settings)
- case Failure(NonFatal(e)) =>
- logger.error(phi"Can not load dataSource, error: ${Unsafe(e.getClass.getName)}")
- throw new IllegalArgumentException("Can not load dataSource from config. Check your database and config")
- }
- }
-}
-
-class MySqlContext(dataSource: DataSource with Closeable, settings: Settings)
- extends MysqlJdbcContext[MysqlEscape](dataSource) with TransactionalContext
- with EntityExtractorDerivation[Literal] {
-
- private val tpe = Executors.newFixedThreadPool(settings.threadPoolSize)
-
- implicit val executionContext: ExecutionContext = {
- val orig = ExecutionContext.fromExecutor(tpe)
- MdcExecutionContext.from(orig)
- }
-
- override def close(): Unit = {
- super.close()
- tpe.shutdownNow()
- }
-
- /**
- * Overrode, because Quill JDBC optionDecoder pass null inside decoders.
- * If custom decoder don't have special null handler, it will failed.
- *
- * @see https://github.com/getquill/quill/issues/535
- */
- implicit override def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
- decoder(
- sqlType = d.sqlType,
- row =>
- index => {
- try {
- val res = d(index - 1, row)
- if (row.wasNull) {
- None
- } else {
- Some(res)
- }
- } catch {
- case _: NullPointerException => None
- case _: IncorrectIdException => None
- }
- }
- )
-
- final implicit class LocalDateTimeDbOps(val left: LocalDateTime) {
-
- // scalastyle:off
- def <=(right: LocalDateTime): Quoted[Boolean] = quote(infix"$left <= $right".as[Boolean])
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/MysqlQueryBuilder.scala b/src/main/scala/xyz/driver/pdsuicommon/db/MysqlQueryBuilder.scala
deleted file mode 100644
index e2936e3..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/MysqlQueryBuilder.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import java.sql.ResultSet
-
-import xyz.driver.pdsuicommon.logging._
-import io.getquill.{MySQLDialect, MysqlEscape}
-
-import scala.collection.breakOut
-
-object MysqlQueryBuilder extends PhiLogging {
- import xyz.driver.pdsuicommon.db.QueryBuilder._
-
- def apply[T](tableName: String,
- lastUpdateFieldName: Option[String],
- nullableFields: Set[String],
- links: Set[TableLink],
- runner: Runner[T],
- countRunner: CountRunner): MysqlQueryBuilder[T] = {
- val parameters = MysqlQueryBuilderParameters(
- tableData = TableData(tableName, lastUpdateFieldName, nullableFields),
- links = links.map(x => x.foreignTableName -> x)(breakOut)
- )
- new MysqlQueryBuilder[T](parameters)(runner, countRunner)
- }
-
- def apply[T](tableName: String,
- lastUpdateFieldName: Option[String],
- nullableFields: Set[String],
- links: Set[TableLink],
- extractor: (ResultSet) => T)(implicit sqlContext: MySqlContext): MysqlQueryBuilder[T] = {
-
- val runner: Runner[T] = { parameters =>
- val (sql, binder) = parameters.toSql(namingStrategy = MysqlEscape)
- logger.trace(phi"Query for execute: ${Unsafe(sql)}")
- sqlContext.executeQuery[T](sql, binder, { resultSet =>
- extractor(resultSet)
- })
- }
-
- val countRunner: CountRunner = { parameters =>
- val (sql, binder) = parameters.toSql(countQuery = true, namingStrategy = MysqlEscape)
- logger.trace(phi"Query for execute: ${Unsafe(sql)}")
- sqlContext
- .executeQuery[CountResult](
- sql,
- binder, { resultSet =>
- val count = resultSet.getInt(1)
- val lastUpdate = if (parameters.tableData.lastUpdateFieldName.isDefined) {
- Option(sqlContext.localDateTimeDecoder.decoder(2, resultSet))
- } else None
-
- (count, lastUpdate)
- }
- )
- .head
- }
-
- apply[T](
- tableName = tableName,
- lastUpdateFieldName = lastUpdateFieldName,
- nullableFields = nullableFields,
- links = links,
- runner = runner,
- countRunner = countRunner
- )
- }
-}
-
-class MysqlQueryBuilder[T](parameters: MysqlQueryBuilderParameters)(implicit runner: QueryBuilder.Runner[T],
- countRunner: QueryBuilder.CountRunner)
- extends QueryBuilder[T, MySQLDialect, MysqlEscape](parameters) {
-
- def withFilter(newFilter: SearchFilterExpr): QueryBuilder[T, MySQLDialect, MysqlEscape] = {
- new MysqlQueryBuilder[T](parameters.copy(filter = newFilter))
- }
-
- def withSorting(newSorting: Sorting): QueryBuilder[T, MySQLDialect, MysqlEscape] = {
- new MysqlQueryBuilder[T](parameters.copy(sorting = newSorting))
- }
-
- def withPagination(newPagination: Pagination): QueryBuilder[T, MySQLDialect, MysqlEscape] = {
- new MysqlQueryBuilder[T](parameters.copy(pagination = Some(newPagination)))
- }
-
- def resetPagination: QueryBuilder[T, MySQLDialect, MysqlEscape] = {
- new MysqlQueryBuilder[T](parameters.copy(pagination = None))
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala
index 7bdfd1b..bb8d322 100644
--- a/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala
+++ b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala
@@ -1,15 +1,12 @@
package xyz.driver.pdsuicommon.db
import java.io.Closeable
-import java.time._
-import java.util.UUID
import java.util.concurrent.Executors
import javax.sql.DataSource
import io.getquill._
import xyz.driver.pdsuicommon.concurrent.MdcExecutionContext
import xyz.driver.pdsuicommon.db.PostgresContext.Settings
-import xyz.driver.pdsuicommon.domain.UuidId
import xyz.driver.pdsuicommon.logging._
import scala.concurrent.ExecutionContext
@@ -35,8 +32,7 @@ object PostgresContext extends PhiLogging {
}
class PostgresContext(val dataSource: DataSource with Closeable, settings: Settings)
- extends PostgresJdbcContext[SnakeCase](dataSource) with TransactionalContext
- with EntityExtractorDerivation[SnakeCase] {
+ extends PostgresJdbcContext[SnakeCase](dataSource) with TransactionalContext {
private val tpe = Executors.newFixedThreadPool(settings.threadPoolSize)
@@ -49,25 +45,4 @@ class PostgresContext(val dataSource: DataSource with Closeable, settings: Setti
super.close()
tpe.shutdownNow()
}
-
- /**
- * Usable for QueryBuilder's extractors
- */
- def timestampToLocalDateTime(timestamp: java.sql.Timestamp): LocalDateTime = {
- LocalDateTime.ofInstant(timestamp.toInstant, ZoneOffset.UTC)
- }
-
- implicit def encodeUuidId[T] = MappedEncoding[UuidId[T], String](_.toString)
- implicit def decodeUuidId[T] = MappedEncoding[String, UuidId[T]] { uuid =>
- UuidId[T](UUID.fromString(uuid))
- }
-
- def decodeOptUuidId[T] = MappedEncoding[Option[String], Option[UuidId[T]]] {
- case Some(x) => Option(x).map(y => UuidId[T](UUID.fromString(y)))
- case None => None
- }
-
- implicit def decodeUuid[T] = MappedEncoding[String, UUID] { uuid =>
- UUID.fromString(uuid)
- }
}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala
deleted file mode 100644
index 0ddf811..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import java.sql.ResultSet
-
-import io.getquill.{PostgresDialect, PostgresEscape}
-import xyz.driver.pdsuicommon.db.PostgresQueryBuilder.SmartPostgresEscape
-
-import scala.collection.breakOut
-
-object PostgresQueryBuilder {
-
- import xyz.driver.pdsuicommon.db.QueryBuilder._
-
- trait SmartPostgresEscape extends PostgresEscape {
- override def column(s: String): String =
- if (s.startsWith("$")) s else super.column(s)
- override def default(s: String): String =
- s.split("\\.").map(ss => s""""$ss"""").mkString(".")
- }
-
- object SmartPostgresEscape extends SmartPostgresEscape
-
- type Escape = SmartPostgresEscape
- val Escape = SmartPostgresEscape
-
- def apply[T](tableName: String,
- lastUpdateFieldName: Option[String],
- nullableFields: Set[String],
- links: Set[TableLink],
- runner: Runner[T],
- countRunner: CountRunner): PostgresQueryBuilder[T] = {
- val parameters = PostgresQueryBuilderParameters(
- tableData = TableData(tableName, lastUpdateFieldName, nullableFields),
- links = links.map(x => x.foreignTableName -> x)(breakOut)
- )
- new PostgresQueryBuilder[T](parameters)(runner, countRunner)
- }
-
- def apply[T](tableName: String,
- lastUpdateFieldName: Option[String],
- nullableFields: Set[String],
- links: Set[TableLink],
- extractor: ResultSet => T)(implicit sqlContext: PostgresContext): PostgresQueryBuilder[T] = {
- apply(tableName, QueryBuilderParameters.AllFields, lastUpdateFieldName, nullableFields, links, extractor)
- }
-
- def apply[T](tableName: String,
- fields: Set[String],
- lastUpdateFieldName: Option[String],
- nullableFields: Set[String],
- links: Set[TableLink],
- extractor: ResultSet => T)(implicit sqlContext: PostgresContext): PostgresQueryBuilder[T] = {
-
- val runner: Runner[T] = { parameters =>
- val (sql, binder) = parameters.toSql(countQuery = false, fields = fields, namingStrategy = SmartPostgresEscape)
- sqlContext.executeQuery[T](sql, binder, { resultSet =>
- extractor(resultSet)
- })
- }
-
- val countRunner: CountRunner = { parameters =>
- val (sql, binder) = parameters.toSql(countQuery = true, namingStrategy = SmartPostgresEscape)
- sqlContext
- .executeQuery[CountResult](
- sql,
- binder, { resultSet =>
- val count = resultSet.getInt(1)
- val lastUpdate = if (parameters.tableData.lastUpdateFieldName.isDefined) {
- Option(resultSet.getTimestamp(2)).map(sqlContext.timestampToLocalDateTime)
- } else None
-
- (count, lastUpdate)
- }
- )
- .head
- }
-
- apply[T](
- tableName = tableName,
- lastUpdateFieldName = lastUpdateFieldName,
- nullableFields = nullableFields,
- links = links,
- runner = runner,
- countRunner = countRunner
- )
- }
-}
-
-class PostgresQueryBuilder[T](parameters: PostgresQueryBuilderParameters)(implicit runner: QueryBuilder.Runner[T],
- countRunner: QueryBuilder.CountRunner)
- extends QueryBuilder[T, PostgresDialect, PostgresQueryBuilder.Escape](parameters) {
-
- def withFilter(newFilter: SearchFilterExpr): QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
- new PostgresQueryBuilder[T](parameters.copy(filter = newFilter))
- }
-
- def withSorting(newSorting: Sorting): QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
- new PostgresQueryBuilder[T](parameters.copy(sorting = newSorting))
- }
-
- def withPagination(newPagination: Pagination): QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
- new PostgresQueryBuilder[T](parameters.copy(pagination = Some(newPagination)))
- }
-
- def resetPagination: QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
- new PostgresQueryBuilder[T](parameters.copy(pagination = None))
- }
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/QueryBuilder.scala b/src/main/scala/xyz/driver/pdsuicommon/db/QueryBuilder.scala
deleted file mode 100644
index 0bf1ed6..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/QueryBuilder.scala
+++ /dev/null
@@ -1,340 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import java.sql.PreparedStatement
-import java.time.LocalDateTime
-
-import io.getquill.NamingStrategy
-import io.getquill.context.sql.idiom.SqlIdiom
-import xyz.driver.pdsuicommon.db.Sorting.{Dimension, Sequential}
-import xyz.driver.pdsuicommon.db.SortingOrder.{Ascending, Descending}
-
-import scala.collection.mutable.ListBuffer
-
-object QueryBuilder {
-
- type Runner[T] = QueryBuilderParameters => Seq[T]
-
- type CountResult = (Int, Option[LocalDateTime])
-
- type CountRunner = QueryBuilderParameters => CountResult
-
- /**
- * Binder for PreparedStatement
- */
- type Binder = PreparedStatement => PreparedStatement
-
- final case class TableData(tableName: String,
- lastUpdateFieldName: Option[String] = None,
- nullableFields: Set[String] = Set.empty)
-
- val AllFields = Set("*")
-
-}
-
-final case class TableLink(keyColumnName: String, foreignTableName: String, foreignKeyColumnName: String)
-
-object QueryBuilderParameters {
- val AllFields = Set("*")
-}
-
-sealed trait QueryBuilderParameters {
-
- def tableData: QueryBuilder.TableData
- def links: Map[String, TableLink]
- def filter: SearchFilterExpr
- def sorting: Sorting
- def pagination: Option[Pagination]
-
- def findLink(tableName: String): TableLink = links.get(tableName) match {
- case None => throw new IllegalArgumentException(s"Cannot find a link for `$tableName`")
- case Some(link) => link
- }
-
- def toSql(countQuery: Boolean = false, namingStrategy: NamingStrategy): (String, QueryBuilder.Binder) = {
- toSql(countQuery, QueryBuilderParameters.AllFields, namingStrategy)
- }
-
- def toSql(countQuery: Boolean, fields: Set[String], namingStrategy: NamingStrategy): (String, QueryBuilder.Binder) = {
- val escapedTableName = namingStrategy.table(tableData.tableName)
- val fieldsSql: String = if (countQuery) {
- val suffix: String = tableData.lastUpdateFieldName match {
- case Some(lastUpdateField) => s", max($escapedTableName.${namingStrategy.column(lastUpdateField)})"
- case None => ""
- }
- "count(*)" + suffix
- } else {
- if (fields == QueryBuilderParameters.AllFields) {
- s"$escapedTableName.*"
- } else {
- fields
- .map { field =>
- s"$escapedTableName.${namingStrategy.column(field)}"
- }
- .mkString(", ")
- }
- }
- val (where, bindings) = filterToSql(escapedTableName, filter, namingStrategy)
- val orderBy = sortingToSql(escapedTableName, sorting, namingStrategy)
-
- val limitSql = limitToSql()
-
- val sql = new StringBuilder()
- sql.append("select ")
- sql.append(fieldsSql)
- sql.append("\nfrom ")
- sql.append(escapedTableName)
-
- val filtersTableLinks: Seq[TableLink] = {
- import SearchFilterExpr._
- def aux(expr: SearchFilterExpr): Seq[TableLink] = expr match {
- case Atom.TableName(tableName) => List(findLink(tableName))
- case Intersection(xs) => xs.flatMap(aux)
- case Union(xs) => xs.flatMap(aux)
- case _ => Nil
- }
- aux(filter)
- }
-
- val sortingTableLinks: Seq[TableLink] = Sorting.collect(sorting) {
- case Dimension(Some(foreignTableName), _, _) => findLink(foreignTableName)
- }
-
- // Combine links from sorting and filter without duplicates
- val foreignTableLinks = (filtersTableLinks ++ sortingTableLinks).distinct
-
- foreignTableLinks.foreach {
- case TableLink(keyColumnName, foreignTableName, foreignKeyColumnName) =>
- val escapedForeignTableName = namingStrategy.table(foreignTableName)
-
- sql.append("\ninner join ")
- sql.append(escapedForeignTableName)
- sql.append(" on ")
-
- sql.append(escapedTableName)
- sql.append('.')
- sql.append(namingStrategy.column(keyColumnName))
-
- sql.append(" = ")
-
- sql.append(escapedForeignTableName)
- sql.append('.')
- sql.append(namingStrategy.column(foreignKeyColumnName))
- }
-
- if (where.nonEmpty) {
- sql.append("\nwhere ")
- sql.append(where)
- }
-
- if (orderBy.nonEmpty && !countQuery) {
- sql.append("\norder by ")
- sql.append(orderBy)
- }
-
- if (limitSql.nonEmpty && !countQuery) {
- sql.append("\n")
- sql.append(limitSql)
- }
-
- (sql.toString, binder(bindings))
- }
-
- /**
- * Converts filter expression to SQL expression.
- *
- * @return Returns SQL string and list of values for binding in prepared statement.
- */
- protected def filterToSql(escapedTableName: String,
- filter: SearchFilterExpr,
- namingStrategy: NamingStrategy): (String, List[AnyRef]) = {
- import SearchFilterBinaryOperation._
- import SearchFilterExpr._
-
- def isNull(string: AnyRef) = Option(string).isEmpty || string.toString.toLowerCase == "null"
-
- def placeholder(field: String) = "?"
-
- def escapeDimension(dimension: SearchFilterExpr.Dimension) = {
- val tableName = dimension.tableName.fold(escapedTableName)(namingStrategy.table)
- s"$tableName.${namingStrategy.column(dimension.name)}"
- }
-
- def filterToSqlMultiple(operands: Seq[SearchFilterExpr]) = operands.collect {
- case x if !SearchFilterExpr.isEmpty(x) => filterToSql(escapedTableName, x, namingStrategy)
- }
-
- filter match {
- case x if isEmpty(x) =>
- ("", List.empty)
-
- case AllowAll =>
- ("1", List.empty)
-
- case DenyAll =>
- ("0", List.empty)
-
- case Atom.Binary(dimension, Eq, value) if isNull(value) =>
- (s"${escapeDimension(dimension)} is NULL", List.empty)
-
- case Atom.Binary(dimension, NotEq, value) if isNull(value) =>
- (s"${escapeDimension(dimension)} is not NULL", List.empty)
-
- case Atom.Binary(dimension, NotEq, value) if tableData.nullableFields.contains(dimension.name) =>
- // In MySQL NULL <> Any === NULL
- // So, to handle NotEq for nullable fields we need to use more complex SQL expression.
- // http://dev.mysql.com/doc/refman/5.7/en/working-with-null.html
- val escapedColumn = escapeDimension(dimension)
- val sql = s"($escapedColumn is null or $escapedColumn != ${placeholder(dimension.name)})"
- (sql, List(value))
-
- case Atom.Binary(dimension, op, value) =>
- val operator = op match {
- case Eq => "="
- case NotEq => "!="
- case Like => "like"
- case Gt => ">"
- case GtEq => ">="
- case Lt => "<"
- case LtEq => "<="
- }
- (s"${escapeDimension(dimension)} $operator ${placeholder(dimension.name)}", List(value))
-
- case Atom.NAry(dimension, op, values) =>
- val sqlOp = op match {
- case SearchFilterNAryOperation.In => "in"
- case SearchFilterNAryOperation.NotIn => "not in"
- }
-
- val bindings = ListBuffer[AnyRef]()
- val sqlPlaceholder = placeholder(dimension.name)
- val formattedValues = if (values.nonEmpty) {
- values
- .map { value =>
- bindings += value
- sqlPlaceholder
- }
- .mkString(", ")
- } else "NULL"
- (s"${escapeDimension(dimension)} $sqlOp ($formattedValues)", bindings.toList)
-
- case Intersection(operands) =>
- val (sql, bindings) = filterToSqlMultiple(operands).unzip
- (sql.mkString("(", " and ", ")"), bindings.flatten.toList)
-
- case Union(operands) =>
- val (sql, bindings) = filterToSqlMultiple(operands).unzip
- (sql.mkString("(", " or ", ")"), bindings.flatten.toList)
- }
- }
-
- protected def limitToSql(): String
-
- /**
- * @param escapedMainTableName Should be escaped
- */
- protected def sortingToSql(escapedMainTableName: String, sorting: Sorting, namingStrategy: NamingStrategy): String = {
- sorting match {
- case Dimension(optSortingTableName, field, order) =>
- val sortingTableName = optSortingTableName.map(namingStrategy.table).getOrElse(escapedMainTableName)
- val fullName = s"$sortingTableName.${namingStrategy.column(field)}"
-
- s"$fullName ${orderToSql(order)}"
-
- case Sequential(xs) =>
- xs.map(sortingToSql(escapedMainTableName, _, namingStrategy)).mkString(", ")
- }
- }
-
- protected def orderToSql(x: SortingOrder): String = x match {
- case Ascending => "asc"
- case Descending => "desc"
- }
-
- protected def binder(bindings: List[AnyRef])(bind: PreparedStatement): PreparedStatement = {
- bindings.zipWithIndex.foreach {
- case (binding, index) =>
- bind.setObject(index + 1, binding)
- }
-
- bind
- }
-
-}
-
-final case class PostgresQueryBuilderParameters(tableData: QueryBuilder.TableData,
- links: Map[String, TableLink] = Map.empty,
- filter: SearchFilterExpr = SearchFilterExpr.Empty,
- sorting: Sorting = Sorting.Empty,
- pagination: Option[Pagination] = None)
- extends QueryBuilderParameters {
-
- def limitToSql(): String = {
- pagination.map { pagination =>
- val startFrom = (pagination.pageNumber - 1) * pagination.pageSize
- s"limit ${pagination.pageSize} OFFSET $startFrom"
- } getOrElse ""
- }
-
-}
-
-/**
- * @param links Links to another tables grouped by foreignTableName
- */
-final case class MysqlQueryBuilderParameters(tableData: QueryBuilder.TableData,
- links: Map[String, TableLink] = Map.empty,
- filter: SearchFilterExpr = SearchFilterExpr.Empty,
- sorting: Sorting = Sorting.Empty,
- pagination: Option[Pagination] = None)
- extends QueryBuilderParameters {
-
- def limitToSql(): String =
- pagination
- .map { pagination =>
- val startFrom = (pagination.pageNumber - 1) * pagination.pageSize
- s"limit $startFrom, ${pagination.pageSize}"
- }
- .getOrElse("")
-
-}
-
-abstract class QueryBuilder[T, D <: SqlIdiom, N <: NamingStrategy](val parameters: QueryBuilderParameters)(
- implicit runner: QueryBuilder.Runner[T],
- countRunner: QueryBuilder.CountRunner) {
-
- def run: Seq[T] = runner(parameters)
-
- def runCount: QueryBuilder.CountResult = countRunner(parameters)
-
- /**
- * Runs the query and returns total found rows without considering of pagination.
- */
- def runWithCount: (Seq[T], Int, Option[LocalDateTime]) = {
- val (total, lastUpdate) = runCount
- (run, total, lastUpdate)
- }
-
- def withFilter(newFilter: SearchFilterExpr): QueryBuilder[T, D, N]
-
- def withFilter(filter: Option[SearchFilterExpr]): QueryBuilder[T, D, N] = {
- filter.fold(this)(withFilter)
- }
-
- def resetFilter: QueryBuilder[T, D, N] = withFilter(SearchFilterExpr.Empty)
-
- def withSorting(newSorting: Sorting): QueryBuilder[T, D, N]
-
- def withSorting(sorting: Option[Sorting]): QueryBuilder[T, D, N] = {
- sorting.fold(this)(withSorting)
- }
-
- def resetSorting: QueryBuilder[T, D, N] = withSorting(Sorting.Empty)
-
- def withPagination(newPagination: Pagination): QueryBuilder[T, D, N]
-
- def withPagination(pagination: Option[Pagination]): QueryBuilder[T, D, N] = {
- pagination.fold(this)(withPagination)
- }
-
- def resetPagination: QueryBuilder[T, D, N]
-
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/SlickQueryBuilder.scala b/src/main/scala/xyz/driver/pdsuicommon/db/SlickQueryBuilder.scala
index dc03a52..7366151 100644
--- a/src/main/scala/xyz/driver/pdsuicommon/db/SlickQueryBuilder.scala
+++ b/src/main/scala/xyz/driver/pdsuicommon/db/SlickQueryBuilder.scala
@@ -70,7 +70,7 @@ sealed trait SlickQueryBuilderParameters {
}
def toSql(countQuery: Boolean = false)(implicit profile: JdbcProfile): SQLActionBuilder = {
- toSql(countQuery, QueryBuilderParameters.AllFields)
+ toSql(countQuery, SlickQueryBuilderParameters.AllFields)
}
def toSql(countQuery: Boolean, fields: Set[String])(implicit profile: JdbcProfile): SQLActionBuilder = {
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/repositories/BridgeUploadQueueRepository.scala b/src/main/scala/xyz/driver/pdsuicommon/db/repositories/BridgeUploadQueueRepository.scala
deleted file mode 100644
index 4c25afa..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/repositories/BridgeUploadQueueRepository.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package xyz.driver.pdsuicommon.db.repositories
-
-import xyz.driver.pdsuicommon.concurrent.BridgeUploadQueue
-import xyz.driver.pdsuicommon.db.MysqlQueryBuilder
-
-trait BridgeUploadQueueRepository extends Repository {
-
- type EntityT = BridgeUploadQueue.Item
-
- def add(draft: EntityT): EntityT
-
- def getById(kind: String, tag: String): Option[EntityT]
-
- def getOne(kind: String): Option[BridgeUploadQueue.Item]
-
- def update(entity: EntityT): EntityT
-
- def delete(kind: String, tag: String): Unit
-
- def buildQuery: MysqlQueryBuilder[EntityT]
-}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/repositories/Repository.scala b/src/main/scala/xyz/driver/pdsuicommon/db/repositories/Repository.scala
deleted file mode 100644
index d671e80..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/repositories/Repository.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package xyz.driver.pdsuicommon.db.repositories
-
-// For further usage and migration to Postgres and slick
-trait Repository extends RepositoryLogging
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/repositories/RepositoryLogging.scala b/src/main/scala/xyz/driver/pdsuicommon/db/repositories/RepositoryLogging.scala
deleted file mode 100644
index d1ec1da..0000000
--- a/src/main/scala/xyz/driver/pdsuicommon/db/repositories/RepositoryLogging.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-package xyz.driver.pdsuicommon.db.repositories
-
-import xyz.driver.pdsuicommon.logging._
-
-trait RepositoryLogging extends PhiLogging {
-
- protected def logCreatedOne[T](x: T)(implicit toPhiString: T => PhiString): T = {
- logger.info(phi"An entity was created: $x")
- x
- }
-
- protected def logCreatedMultiple[T <: Iterable[_]](xs: T)(implicit toPhiString: T => PhiString): T = {
- if (xs.nonEmpty) {
- logger.info(phi"Entities were created: $xs")
- }
- xs
- }
-
- protected def logUpdatedOne(rowsAffected: Long): Long = {
- rowsAffected match {
- case 0 => logger.trace(phi"The entity is up to date")
- case 1 => logger.info(phi"The entity was updated")
- case x => logger.warn(phi"The ${Unsafe(x)} entities were updated")
- }
- rowsAffected
- }
-
- protected def logUpdatedOneUnimportant(rowsAffected: Long): Long = {
- rowsAffected match {
- case 0 => logger.trace(phi"The entity is up to date")
- case 1 => logger.trace(phi"The entity was updated")
- case x => logger.warn(phi"The ${Unsafe(x)} entities were updated")
- }
- rowsAffected
- }
-
- protected def logUpdatedMultiple(rowsAffected: Long): Long = {
- rowsAffected match {
- case 0 => logger.trace(phi"All entities are up to date")
- case x => logger.info(phi"The ${Unsafe(x)} entities were updated")
- }
- rowsAffected
- }
-
- protected def logDeletedOne(rowsAffected: Long): Long = {
- rowsAffected match {
- case 0 => logger.trace(phi"The entity does not exist")
- case 1 => logger.info(phi"The entity was deleted")
- case x => logger.warn(phi"Deleted ${Unsafe(x)} entities, expected one")
- }
- rowsAffected
- }
-
- protected def logDeletedMultiple(rowsAffected: Long): Long = {
- rowsAffected match {
- case 0 => logger.trace(phi"Entities do not exist")
- case x => logger.info(phi"Deleted ${Unsafe(x)} entities")
- }
- rowsAffected
- }
-
-}
diff --git a/src/test/scala/xyz/driver/pdsuicommon/BaseSuite.scala b/src/test/scala/xyz/driver/pdsuicommon/BaseSuite.scala
index 7c9d8c4..29e7610 100644
--- a/src/test/scala/xyz/driver/pdsuicommon/BaseSuite.scala
+++ b/src/test/scala/xyz/driver/pdsuicommon/BaseSuite.scala
@@ -5,15 +5,12 @@ import java.time.{LocalDateTime, ZoneId}
import org.scalatest.FreeSpecLike
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Span}
-import xyz.driver.pdsuicommon.db._
import xyz.driver.pdsuicommon.domain._
-import xyz.driver.pdsuicommon.error.UnexpectedFilterException
import xyz.driver.pdsuicommon.utils.DiffUtils
trait BaseSuite extends FreeSpecLike with DiffUtils with ScalaFutures {
implicit val defaultPatience = PatienceConfig(timeout = Span(1000, Millis), interval = Span(20, Millis))
- implicit val sqlContext = new MockMySqlContext()
def sampleUser(role: User.Role, email: String = "test@example.com", password: String = "123") = User(
id = StringId("2001"),
@@ -23,13 +20,4 @@ trait BaseSuite extends FreeSpecLike with DiffUtils with ScalaFutures {
latestActivity = Some(LocalDateTime.now(ZoneId.of("Z"))),
deleted = None
)
-
- def createMockQueryBuilder[T](isExpectedFilter: SearchFilterExpr => Boolean): MysqlQueryBuilder[T] = {
- MockQueryBuilder[T] {
- case (filter, _, _) if isExpectedFilter(filter) => Seq.empty
- case (filter, _, _) => throw new UnexpectedFilterException(s"Filter is unexpected: $filter")
- } {
- case _ => (0, Option.empty[LocalDateTime])
- }
- }
}
diff --git a/src/test/scala/xyz/driver/pdsuicommon/Mocks.scala b/src/test/scala/xyz/driver/pdsuicommon/Mocks.scala
index 51d39e5..699020c 100644
--- a/src/test/scala/xyz/driver/pdsuicommon/Mocks.scala
+++ b/src/test/scala/xyz/driver/pdsuicommon/Mocks.scala
@@ -6,7 +6,7 @@ import java.sql.Connection
import java.util.logging.Logger
import javax.sql.DataSource
-import com.typesafe.config.ConfigFactory
+import xyz.driver.pdsuicommon.db.SlickQueryBuilder.TableData
import xyz.driver.pdsuicommon.db._
import xyz.driver.pdsuicommon.http.HttpFetcher
@@ -27,32 +27,7 @@ class MockDataSource extends DataSource with Closeable {
override def isWrapperFor(iface: Class[_]): Boolean = throw new NotImplementedError("MockDataSource.isWrapperFor")
}
-object MockMySqlContext {
-
- val Settings = MySqlContext.Settings(
- credentials = MySqlContext.DbCredentials(
- user = "test",
- password = "test",
- host = "localhost",
- port = 3248,
- dbName = "test",
- dbCreateFlag = false,
- dbContext = "test",
- connectionParams = "",
- url = ""
- ),
- connection = ConfigFactory.empty(),
- threadPoolSize = 10
- )
-}
-
-class MockMySqlContext() extends MySqlContext(new MockDataSource, MockMySqlContext.Settings) {
- override protected def withConnection[T](f: Connection => T): Nothing = {
- throw new NotImplementedError("MockSqlContext.withConnection")
- }
-}
-
-class MockFactory()(implicit val sqlContext: MySqlContext) {
+class MockFactory()(implicit val sqlContext: PostgresContext) {
val MockHttpFetcher: HttpFetcher = { (url: URL) =>
Future.successful(Array.empty[Byte])
}
@@ -61,28 +36,26 @@ class MockFactory()(implicit val sqlContext: MySqlContext) {
object MockQueryBuilder {
type MockRunnerIn = (SearchFilterExpr, Sorting, Option[Pagination])
- type MockRunnerOut[T] = Seq[T]
- type MockCountRunnerOut = QueryBuilder.CountResult
+ type MockRunnerOut[T] = Future[Seq[T]]
+ type MockCountRunnerOut = SlickQueryBuilder.CountResult
def apply[T](matcher: PartialFunction[MockRunnerIn, MockRunnerOut[T]])(
countMatcher: PartialFunction[MockRunnerIn, MockCountRunnerOut])(
- implicit context: MySqlContext): MysqlQueryBuilder[T] = {
+ implicit context: PostgresContext): SlickQueryBuilder[T] = {
- val runner: QueryBuilder.Runner[T] = { parameters =>
+ val runner: SlickQueryBuilder.Runner[T] = { parameters =>
matcher((parameters.filter, parameters.sorting, parameters.pagination))
}
- val countRunner: QueryBuilder.CountRunner = { parameters =>
+ val countRunner: SlickQueryBuilder.CountRunner = { parameters =>
countMatcher((parameters.filter, parameters.sorting, parameters.pagination))
}
- MysqlQueryBuilder[T](
- tableName = "",
- lastUpdateFieldName = Option.empty[String],
- nullableFields = Set.empty[String],
- links = Set.empty[TableLink],
- runner = runner,
- countRunner = countRunner
+ val parameters = SlickPostgresQueryBuilderParameters(
+ databaseName = "test",
+ tableData = TableData("", None, Set.empty[String]),
+ links = Map.empty
)
+ new SlickPostgresQueryBuilder(parameters)(runner, countRunner)
}
}
diff --git a/src/test/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala b/src/test/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala
deleted file mode 100644
index 8b38316..0000000
--- a/src/test/scala/xyz/driver/pdsuicommon/concurrent/BridgeUploadQueueRepositoryAdapterSuite.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-package xyz.driver.pdsuicommon.concurrent
-
-import java.util.concurrent.ThreadLocalRandom
-
-import xyz.driver.pdsuicommon.BaseSuite
-import xyz.driver.pdsuicommon.concurrent.BridgeUploadQueueRepositoryAdapter.Strategy
-import xyz.driver.pdsuicommon.concurrent.BridgeUploadQueueRepositoryAdapter.Strategy.{OnAttempt, OnComplete}
-import xyz.driver.pdsuicommon.db.{FakeDbIo, MysqlQueryBuilder}
-import xyz.driver.pdsuicommon.db.repositories.BridgeUploadQueueRepository
-
-import scala.concurrent.Future
-import scala.concurrent.duration.DurationInt
-
-class BridgeUploadQueueRepositoryAdapterSuite extends BaseSuite {
-
- // IDEA have some issue here with imports
- private implicit val executionContext = scala.concurrent.ExecutionContext.global
-
- "Strategy" - {
- "LimitExponential" - {
- "on" - {
- val strategy = Strategy.LimitExponential(
- startInterval = 10.seconds,
- intervalFactor = 1.4,
- maxInterval = 50.seconds,
- onComplete = OnComplete.Delete
- )
-
- "a new interval should be greater than the previous one if the limit not reached" in {
- val previous = strategy.on(1)
- val current = strategy.on(2)
-
- (previous, current) match {
- case (OnAttempt.Continue(a), OnAttempt.Continue(b)) => assert(a < b)
- case x => fail(s"Unexpected result: $x")
- }
- }
-
- "should limit intervals" in {
- assert(strategy.on(20) == OnAttempt.Continue(strategy.maxInterval))
- }
-
- "should not fail, if there is many attempts" in {
- assert(strategy.on(1000) == OnAttempt.Continue(strategy.maxInterval))
- }
- }
- }
- }
-
- "complete" - {
- "onComplete == mark" - {
- "should update the item" in {
- var done = false
- val item = defaultItem
-
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
-
- override def delete(kind: String, tag: String): Unit = throw new IllegalStateException("Impossible call")
-
- override def update(entity: EntityT): EntityT = {
- assert(entity.kind == item.kind, "repository.delete, kind")
- assert(entity.tag == item.tag, "repository.delete, tag")
- done = true
- entity
- }
-
- override def getById(kind: String, tag: String): Option[EntityT] = Some(item)
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = Strategy.Stop(OnComplete.Mark),
- repository = repository,
- dbIo = FakeDbIo
- )
-
- assert(adapter.complete(item.kind, item.tag).isReadyWithin(100.millis))
- assert(done)
- }
- }
-
- "onComplete == delete" - {
- "should delete the item" in {
- var done = false
- val item = defaultItem
-
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
-
- override def delete(kind: String, tag: String): Unit = {
- assert(kind == item.kind, "repository.delete, kind")
- assert(tag == item.tag, "repository.delete, tag")
- done = true
- }
- override def update(entity: EntityT): EntityT = throw new IllegalStateException("Impossible call")
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = Strategy.Stop(OnComplete.Delete),
- repository = repository,
- dbIo = FakeDbIo
- )
-
- assert(adapter.complete(item.kind, item.tag).isReadyWithin(100.millis))
- assert(done)
- }
- }
- }
-
- "tryRetry" - {
-
- "when all attempts are not out" - {
-
- val defaultStrategy = Strategy.Constant(10.seconds)
-
- "should return an updated item" in {
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
-
- override def update(draft: EntityT): EntityT = draft
- override def delete(kind: String, tag: String): Unit = throw new IllegalAccessError(s"kind=$kind, tag=$tag")
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = defaultStrategy,
- repository = repository,
- dbIo = FakeDbIo
- )
-
- val item = defaultItem
- val r = adapter.tryRetry(item).futureValue
- assert(r.isDefined)
- assert(!r.contains(item))
- }
-
- "should update an item with increased attempts" in {
- val item = defaultItem
-
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
-
- override def update(draft: EntityT): EntityT = {
- assert(draft.attempts === (item.attempts + 1), "repository.add")
- draft
- }
- override def delete(kind: String, tag: String): Unit = throw new IllegalAccessError(s"kind=$kind, tag=$tag")
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = defaultStrategy,
- repository = repository,
- dbIo = FakeDbIo
- )
-
- assert(adapter.tryRetry(item).isReadyWithin(100.millis))
- }
-
- "should remove an old item" in {
- val item = defaultItem
-
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
- override def update(draft: EntityT): EntityT = draft
- override def delete(kind: String, tag: String): Unit = {
- assert(kind == item.kind, "repository.delete, kind")
- assert(tag == item.tag, "repository.delete, kind")
- }
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = defaultStrategy,
- repository = repository,
- dbIo = FakeDbIo
- )
-
- assert(adapter.tryRetry(item).isReadyWithin(100.millis))
- }
-
- "should update time of the next attempt" in {
- val item = defaultItem
-
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
-
- override def update(draft: EntityT): EntityT = {
- assert(draft.nextAttempt.isAfter(item.nextAttempt), "repository.add")
- draft
- }
- override def delete(kind: String, tag: String): Unit = throw new IllegalAccessError(s"kind=$kind, tag=$tag")
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = defaultStrategy,
- repository = repository,
- dbIo = FakeDbIo
- )
-
- assert(adapter.tryRetry(item).isReadyWithin(100.millis))
- }
-
- }
-
- "when all attempts are out" - {
-
- val defaultStrategy = Strategy.Stop()
-
- "should not return an item" in {
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
- override def update(entity: EntityT): EntityT = fail("update should not be used!")
-
- override def delete(kind: String, tag: String): Unit = {}
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = defaultStrategy,
- repository = repository,
- dbIo = FakeDbIo
- )
-
- val r = adapter.tryRetry(defaultItem).futureValue
- assert(r.isEmpty)
- }
-
- "should complete the item" in {
- var taskWasCompleted = false
- val item = defaultItem
-
- val repository = new BridgeUploadQueueRepository {
- override def add(draft: EntityT): EntityT = draft
- override def getOne(kind: String): Option[EntityT] = fail("getOne should not be used!")
- override def buildQuery: MysqlQueryBuilder[EntityT] = fail("buildQuery should not be used!")
- override def getById(kind: String, tag: String): Option[EntityT] = fail("getById should not be used!")
- override def update(entity: EntityT): EntityT = fail("update should not be used!")
-
- override def delete(kind: String, tag: String): Unit = {}
- }
-
- val adapter = new BridgeUploadQueueRepositoryAdapter(
- strategy = defaultStrategy,
- repository = repository,
- dbIo = FakeDbIo
- ) {
- override def complete(kind: String, tag: String): Future[Unit] = Future {
- assert(kind == item.kind, "adapter.complete, kind")
- assert(tag == item.tag, "adapter.complete, tag")
- taskWasCompleted = true
- }
- }
-
- val r = adapter.tryRetry(item).futureValue
- assert(r.isEmpty)
- assert(taskWasCompleted)
- }
-
- }
-
- }
-
- private def defaultItem = BridgeUploadQueue.Item(
- "test",
- ThreadLocalRandom.current().nextInt().toString
- )
-
-}
diff --git a/src/test/scala/xyz/driver/pdsuicommon/db/QueryBuilderParametersSuite.scala b/src/test/scala/xyz/driver/pdsuicommon/db/QueryBuilderParametersSuite.scala
deleted file mode 100644
index 2c23b92..0000000
--- a/src/test/scala/xyz/driver/pdsuicommon/db/QueryBuilderParametersSuite.scala
+++ /dev/null
@@ -1,310 +0,0 @@
-package xyz.driver.pdsuicommon.db
-
-import java.time.LocalDateTime
-
-import io.getquill.MysqlEscape
-import org.scalatest.FreeSpecLike
-import xyz.driver.pdsuicommon.db.QueryBuilder.TableData
-import xyz.driver.pdsuicommon.domain._
-
-class QueryBuilderParametersSuite extends FreeSpecLike {
-
- import SearchFilterBinaryOperation._
- import SearchFilterExpr.{Dimension => _, _}
- import SearchFilterNAryOperation._
- import Sorting._
- import SortingOrder._
-
- val tableName = "Entity"
-
- case class Entity(id: LongId[Entity],
- name: String,
- email: Email,
- optionUser: Option[StringId[User]],
- date: LocalDateTime,
- optionDate: Option[LocalDateTime],
- kindId: Long)
-
- def queryBuilderParameters = MysqlQueryBuilderParameters(
- tableData = TableData(
- tableName = tableName,
- nullableFields = Set("optionUser", "optionDate")
- ),
- links = Map(
- "Kind" -> TableLink("kindId", "Kind", "id"),
- "User" -> TableLink("optionUser", "User", "id")
- )
- )
-
- val queryBasis =
- s"""select `$tableName`.*
- |from `$tableName`""".stripMargin.trim
-
- "toSql" - {
- "should generate correct SQL query" - {
- "with default parameters" in {
- val (sql, _) = queryBuilderParameters.toSql(namingStrategy = MysqlEscape)
- assert(sql == queryBasis)
- }
-
- "with filtering: " - {
- "single atom filter" in {
- val (sql, _) =
- queryBuilderParameters.copy(filter = Atom.Binary("name", Eq, "x")).toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |where `$tableName`.`name` = ?""".stripMargin)
- }
-
- "single atom filter for optional field with NotEq operation" in {
- val (sql, _) = queryBuilderParameters
- .copy(filter = Atom.Binary("optionUser", NotEq, "x"))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |where (`$tableName`.`optionUser` is null or `$tableName`.`optionUser` != ?)""".stripMargin)
- }
-
- "single atom filter for field with IN operation" in {
- val (sql, _) = queryBuilderParameters
- .copy(filter = Atom.NAry("date", In, Seq("x", "x", "x")))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |where `$tableName`.`date` in (?, ?, ?)""".stripMargin)
- }
-
- "multiple intersected filters" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- filter = Intersection(
- Seq(
- Atom.Binary("name", Gt, "x"),
- Atom.Binary("optionDate", GtEq, "x")
- )))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |where (`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?)""".stripMargin)
- }
-
- "multiple intersected nested filters" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- filter = Intersection(
- Seq(
- Atom.Binary("name", Gt, "x"),
- Atom.Binary("optionDate", GtEq, "x"),
- Intersection(Seq(
- Atom.Binary("optionUser", Eq, "x"),
- Atom.Binary("date", LtEq, "x")
- ))
- )))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"$queryBasis\nwhere (`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?" +
- s" and (`$tableName`.`optionUser` = ? and `$tableName`.`date` <= ?))")
- }
-
- "multiple unionized filters" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- filter = Union(
- Seq(
- Atom.Binary("name", Gt, "x"),
- Atom.Binary("optionDate", GtEq, "x")
- )))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |where (`$tableName`.`name` > ? or `$tableName`.`optionDate` >= ?)""".stripMargin.trim)
- }
-
- "multiple unionized nested filters" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- filter = Union(
- Seq(
- Atom.Binary("name", Gt, "x"),
- Atom.Binary("optionDate", GtEq, "x"),
- Union(Seq(
- Atom.Binary("optionUser", Eq, "x"),
- Atom.Binary("date", LtEq, "x")
- ))
- )))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |where (`$tableName`.`name` > ? or `$tableName`.`optionDate` >= ? or (`$tableName`.`optionUser` = ? or `$tableName`.`date` <= ?))""".stripMargin)
- }
-
- "multiple unionized and intersected nested filters" in {
- val (sql, _) = queryBuilderParameters
- .copy(filter = Union(Seq(
- Intersection(Seq(
- Atom.Binary("name", Gt, "x"),
- Atom.Binary("optionDate", GtEq, "x")
- )),
- Intersection(Seq(
- Atom.Binary("optionUser", Eq, "x"),
- Atom.Binary("date", LtEq, "x")
- ))
- )))
- .toSql(namingStrategy = MysqlEscape)
-
- assert(
- sql ==
- s"$queryBasis\nwhere ((`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?) " +
- s"or (`$tableName`.`optionUser` = ? and `$tableName`.`date` <= ?))")
- }
-
- "single field from foreign table" in {
- val (sql, _) = queryBuilderParameters
- .copy(filter = Atom.Binary(SearchFilterExpr.Dimension(Some("Kind"), "name"), Eq, "x"))
- .toSql(namingStrategy = MysqlEscape)
- val pattern =
- s"""select `$tableName`.*
- |from `$tableName`
- |inner join `Kind` on `Entity`.`kindId` = `Kind`.`id`
- |where `Kind`.`name` = ?""".stripMargin
- assert(sql == pattern)
- }
- }
-
- "with sorting:" - {
- "single field sorting" in {
- val (sql, _) = queryBuilderParameters
- .copy(sorting = Dimension(None, "name", Ascending))
- .toSql(namingStrategy = MysqlEscape)
-
- assert(
- sql ==
- s"""$queryBasis
- |order by `$tableName`.`name` asc""".stripMargin)
- }
-
- "single foreign sorting field" in {
- val (sql, _) = queryBuilderParameters
- .copy(sorting = Dimension(Some("Kind"), "name", Ascending))
- .toSql(namingStrategy = MysqlEscape)
-
- assert(
- sql ==
- s"""select `$tableName`.*
- |from `$tableName`
- |inner join `Kind` on `Entity`.`kindId` = `Kind`.`id`
- |order by `Kind`.`name` asc""".stripMargin)
- }
-
- "multiple fields sorting" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- sorting = Sequential(
- Seq(
- Dimension(None, "name", Ascending),
- Dimension(None, "date", Descending)
- )))
- .toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |order by `$tableName`.`name` asc, `$tableName`.`date` desc""".stripMargin)
- }
-
- "multiple foreign sorting field" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- sorting = Sequential(
- Seq(
- Dimension(Some("Kind"), "name", Ascending),
- Dimension(Some("User"), "name", Descending)
- )))
- .toSql(namingStrategy = MysqlEscape)
-
- assert(
- sql ==
- s"""select `$tableName`.*
- |from `$tableName`
- |inner join `Kind` on `$tableName`.`kindId` = `Kind`.`id`
- |inner join `User` on `$tableName`.`optionUser` = `User`.`id`
- |order by `Kind`.`name` asc, `User`.`name` desc""".stripMargin)
- }
-
- "multiple field sorting (including foreign tables)" in {
- val (sql, _) = queryBuilderParameters
- .copy(
- sorting = Sequential(
- Seq(
- Dimension(Some("Kind"), "name", Ascending),
- Dimension(None, "date", Descending)
- )))
- .toSql(namingStrategy = MysqlEscape)
-
- assert(
- sql ==
- s"""select `$tableName`.*
- |from `$tableName`
- |inner join `Kind` on `$tableName`.`kindId` = `Kind`.`id`
- |order by `Kind`.`name` asc, `$tableName`.`date` desc""".stripMargin)
- }
- }
-
- "with pagination" in {
- val (sql, _) =
- queryBuilderParameters.copy(pagination = Some(Pagination(5, 3))).toSql(namingStrategy = MysqlEscape)
- assert(
- sql ==
- s"""$queryBasis
- |limit 10, 5""".stripMargin)
- }
-
- "combined" in {
- val filter = Union(
- Seq(
- Intersection(
- Seq(
- Atom.Binary("name", Gt, "x"),
- Atom.Binary("optionDate", GtEq, "x")
- )),
- Intersection(
- Seq(
- Atom.Binary("optionUser", Eq, "x"),
- Atom.Binary("date", LtEq, "x")
- ))
- ))
- val sorting = Sequential(
- Seq(
- Dimension(Some("Kind"), "name", Ascending),
- Dimension(None, "name", Ascending),
- Dimension(None, "date", Descending)
- ))
-
- val (sql, _) = queryBuilderParameters
- .copy(
- filter = filter,
- sorting = sorting,
- pagination = Some(Pagination(5, 3))
- )
- .toSql(namingStrategy = MysqlEscape)
-
- assert(
- sql ==
- s"""select `$tableName`.*
- |from `$tableName`
- |inner join `Kind` on `$tableName`.`kindId` = `Kind`.`id`
- |where ((`$tableName`.`name` > ? and `$tableName`.`optionDate` >= ?) or (`$tableName`.`optionUser` = ? and `$tableName`.`date` <= ?))
- |order by `Kind`.`name` asc, `$tableName`.`name` asc, `$tableName`.`date` desc
- |limit 10, 5""".stripMargin)
- }
-
- }
- }
-
-}