aboutsummaryrefslogtreecommitdiff
path: root/src/main/scala/xyz/driver/pdsuicommon
diff options
context:
space:
mode:
authorkseniya <ktomskih@datamonsters.co>2017-09-20 18:01:15 +0700
committerkseniya <ktomskih@datamonsters.co>2017-09-20 18:01:15 +0700
commit9968eaefa2a97ebe495fa51b640e31c78db61ac6 (patch)
tree4eed12a4ebb2829e336a3da673c7c8462e7ab845 /src/main/scala/xyz/driver/pdsuicommon
parentd5ecec043a3d70dd09bda8a79fcd188f411b47df (diff)
parentd4b18efda238f506103dddbf3b400ae17c797276 (diff)
downloadrest-query-9968eaefa2a97ebe495fa51b640e31c78db61ac6.tar.gz
rest-query-9968eaefa2a97ebe495fa51b640e31c78db61ac6.tar.bz2
rest-query-9968eaefa2a97ebe495fa51b640e31c78db61ac6.zip
Merge branch 'master' into slick-query-builder
Diffstat (limited to 'src/main/scala/xyz/driver/pdsuicommon')
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala5
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala73
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala108
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/http/Directives.scala121
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/parsers/DimensionsParser.scala30
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/parsers/PaginationParser.scala29
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/parsers/ParseQueryArgException.scala3
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/parsers/SearchFilterParser.scala152
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/parsers/SortingParser.scala64
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/utils/CustomSwaggerJsonFormats.scala55
-rw-r--r--src/main/scala/xyz/driver/pdsuicommon/utils/Utils.scala20
11 files changed, 656 insertions, 4 deletions
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala b/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala
index c547bf4..9d2664d 100644
--- a/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala
+++ b/src/main/scala/xyz/driver/pdsuicommon/db/MySqlContext.scala
@@ -28,10 +28,7 @@ object MySqlContext extends PhiLogging {
connectionParams: String,
url: String)
- final case class Settings(credentials: DbCredentials,
- connection: Config,
- connectionAttemptsOnStartup: Int,
- threadPoolSize: Int)
+ final case class Settings(credentials: DbCredentials, connection: Config, threadPoolSize: Int)
def apply(settings: Settings): MySqlContext = {
// Prevent leaking credentials to a log
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala
new file mode 100644
index 0000000..7bdfd1b
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresContext.scala
@@ -0,0 +1,73 @@
+package xyz.driver.pdsuicommon.db
+
+import java.io.Closeable
+import java.time._
+import java.util.UUID
+import java.util.concurrent.Executors
+import javax.sql.DataSource
+
+import io.getquill._
+import xyz.driver.pdsuicommon.concurrent.MdcExecutionContext
+import xyz.driver.pdsuicommon.db.PostgresContext.Settings
+import xyz.driver.pdsuicommon.domain.UuidId
+import xyz.driver.pdsuicommon.logging._
+
+import scala.concurrent.ExecutionContext
+import scala.util.control.NonFatal
+import scala.util.{Failure, Success, Try}
+
+object PostgresContext extends PhiLogging {
+
+ final case class Settings(connection: com.typesafe.config.Config,
+ connectionAttemptsOnStartup: Int,
+ threadPoolSize: Int)
+
+ def apply(settings: Settings): PostgresContext = {
+ // Prevent leaking credentials to a log
+ Try(JdbcContextConfig(settings.connection).dataSource) match {
+ case Success(dataSource) => new PostgresContext(dataSource, settings)
+ case Failure(NonFatal(e)) =>
+ logger.error(phi"Can not load dataSource, error: ${Unsafe(e.getClass.getName)}")
+ throw new IllegalArgumentException("Can not load dataSource from config. Check your database and config", e)
+ }
+ }
+
+}
+
+class PostgresContext(val dataSource: DataSource with Closeable, settings: Settings)
+ extends PostgresJdbcContext[SnakeCase](dataSource) with TransactionalContext
+ with EntityExtractorDerivation[SnakeCase] {
+
+ private val tpe = Executors.newFixedThreadPool(settings.threadPoolSize)
+
+ implicit val executionContext: ExecutionContext = {
+ val orig = ExecutionContext.fromExecutor(tpe)
+ MdcExecutionContext.from(orig)
+ }
+
+ override def close(): Unit = {
+ super.close()
+ tpe.shutdownNow()
+ }
+
+ /**
+ * Usable for QueryBuilder's extractors
+ */
+ def timestampToLocalDateTime(timestamp: java.sql.Timestamp): LocalDateTime = {
+ LocalDateTime.ofInstant(timestamp.toInstant, ZoneOffset.UTC)
+ }
+
+ implicit def encodeUuidId[T] = MappedEncoding[UuidId[T], String](_.toString)
+ implicit def decodeUuidId[T] = MappedEncoding[String, UuidId[T]] { uuid =>
+ UuidId[T](UUID.fromString(uuid))
+ }
+
+ def decodeOptUuidId[T] = MappedEncoding[Option[String], Option[UuidId[T]]] {
+ case Some(x) => Option(x).map(y => UuidId[T](UUID.fromString(y)))
+ case None => None
+ }
+
+ implicit def decodeUuid[T] = MappedEncoding[String, UUID] { uuid =>
+ UUID.fromString(uuid)
+ }
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala
new file mode 100644
index 0000000..0ddf811
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/db/PostgresQueryBuilder.scala
@@ -0,0 +1,108 @@
+package xyz.driver.pdsuicommon.db
+
+import java.sql.ResultSet
+
+import io.getquill.{PostgresDialect, PostgresEscape}
+import xyz.driver.pdsuicommon.db.PostgresQueryBuilder.SmartPostgresEscape
+
+import scala.collection.breakOut
+
+object PostgresQueryBuilder {
+
+ import xyz.driver.pdsuicommon.db.QueryBuilder._
+
+ trait SmartPostgresEscape extends PostgresEscape {
+ override def column(s: String): String =
+ if (s.startsWith("$")) s else super.column(s)
+ override def default(s: String): String =
+ s.split("\\.").map(ss => s""""$ss"""").mkString(".")
+ }
+
+ object SmartPostgresEscape extends SmartPostgresEscape
+
+ type Escape = SmartPostgresEscape
+ val Escape = SmartPostgresEscape
+
+ def apply[T](tableName: String,
+ lastUpdateFieldName: Option[String],
+ nullableFields: Set[String],
+ links: Set[TableLink],
+ runner: Runner[T],
+ countRunner: CountRunner): PostgresQueryBuilder[T] = {
+ val parameters = PostgresQueryBuilderParameters(
+ tableData = TableData(tableName, lastUpdateFieldName, nullableFields),
+ links = links.map(x => x.foreignTableName -> x)(breakOut)
+ )
+ new PostgresQueryBuilder[T](parameters)(runner, countRunner)
+ }
+
+ def apply[T](tableName: String,
+ lastUpdateFieldName: Option[String],
+ nullableFields: Set[String],
+ links: Set[TableLink],
+ extractor: ResultSet => T)(implicit sqlContext: PostgresContext): PostgresQueryBuilder[T] = {
+ apply(tableName, QueryBuilderParameters.AllFields, lastUpdateFieldName, nullableFields, links, extractor)
+ }
+
+ def apply[T](tableName: String,
+ fields: Set[String],
+ lastUpdateFieldName: Option[String],
+ nullableFields: Set[String],
+ links: Set[TableLink],
+ extractor: ResultSet => T)(implicit sqlContext: PostgresContext): PostgresQueryBuilder[T] = {
+
+ val runner: Runner[T] = { parameters =>
+ val (sql, binder) = parameters.toSql(countQuery = false, fields = fields, namingStrategy = SmartPostgresEscape)
+ sqlContext.executeQuery[T](sql, binder, { resultSet =>
+ extractor(resultSet)
+ })
+ }
+
+ val countRunner: CountRunner = { parameters =>
+ val (sql, binder) = parameters.toSql(countQuery = true, namingStrategy = SmartPostgresEscape)
+ sqlContext
+ .executeQuery[CountResult](
+ sql,
+ binder, { resultSet =>
+ val count = resultSet.getInt(1)
+ val lastUpdate = if (parameters.tableData.lastUpdateFieldName.isDefined) {
+ Option(resultSet.getTimestamp(2)).map(sqlContext.timestampToLocalDateTime)
+ } else None
+
+ (count, lastUpdate)
+ }
+ )
+ .head
+ }
+
+ apply[T](
+ tableName = tableName,
+ lastUpdateFieldName = lastUpdateFieldName,
+ nullableFields = nullableFields,
+ links = links,
+ runner = runner,
+ countRunner = countRunner
+ )
+ }
+}
+
+class PostgresQueryBuilder[T](parameters: PostgresQueryBuilderParameters)(implicit runner: QueryBuilder.Runner[T],
+ countRunner: QueryBuilder.CountRunner)
+ extends QueryBuilder[T, PostgresDialect, PostgresQueryBuilder.Escape](parameters) {
+
+ def withFilter(newFilter: SearchFilterExpr): QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
+ new PostgresQueryBuilder[T](parameters.copy(filter = newFilter))
+ }
+
+ def withSorting(newSorting: Sorting): QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
+ new PostgresQueryBuilder[T](parameters.copy(sorting = newSorting))
+ }
+
+ def withPagination(newPagination: Pagination): QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
+ new PostgresQueryBuilder[T](parameters.copy(pagination = Some(newPagination)))
+ }
+
+ def resetPagination: QueryBuilder[T, PostgresDialect, SmartPostgresEscape] = {
+ new PostgresQueryBuilder[T](parameters.copy(pagination = None))
+ }
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/http/Directives.scala b/src/main/scala/xyz/driver/pdsuicommon/http/Directives.scala
new file mode 100644
index 0000000..e9a4132
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/http/Directives.scala
@@ -0,0 +1,121 @@
+package xyz.driver.pdsuicommon.http
+
+import akka.http.scaladsl.server._
+import akka.http.scaladsl.server.Directives._
+import akka.http.scaladsl.model._
+import xyz.driver.core.rest.ContextHeaders
+import xyz.driver.entities.users.AuthUserInfo
+import xyz.driver.pdsuicommon.auth._
+import xyz.driver.pdsuicommon.error._
+import xyz.driver.pdsuicommon.error.DomainError._
+import xyz.driver.pdsuicommon.error.ErrorsResponse.ResponseError
+import xyz.driver.pdsuicommon.parsers._
+import xyz.driver.pdsuicommon.db.{Pagination, Sorting, SearchFilterExpr}
+import xyz.driver.pdsuicommon.domain._
+import xyz.driver.pdsuicommon.serialization.PlayJsonSupport._
+import xyz.driver.core.rest.AuthProvider
+import scala.util.control._
+
+import scala.util._
+
+trait Directives {
+
+ val paginated: Directive1[Pagination] = parameterSeq.flatMap { params =>
+ PaginationParser.parse(params) match {
+ case Success(pagination) => provide(pagination)
+ case Failure(ex) =>
+ reject(ValidationRejection("invalid pagination parameter", Some(ex)))
+ }
+ }
+
+ def sorted(validDimensions: Set[String] = Set.empty): Directive1[Sorting] = parameterSeq.flatMap { params =>
+ SortingParser.parse(validDimensions, params) match {
+ case Success(sorting) => provide(sorting)
+ case Failure(ex) =>
+ reject(ValidationRejection("invalid sorting parameter", Some(ex)))
+ }
+ }
+
+ val dimensioned: Directive1[Dimensions] = parameterSeq.flatMap { params =>
+ DimensionsParser.tryParse(params) match {
+ case Success(dims) => provide(dims)
+ case Failure(ex) =>
+ reject(ValidationRejection("invalid dimension parameter", Some(ex)))
+ }
+ }
+
+ val searchFiltered: Directive1[SearchFilterExpr] = parameterSeq.flatMap { params =>
+ SearchFilterParser.parse(params) match {
+ case Success(sorting) => provide(sorting)
+ case Failure(ex) =>
+ reject(ValidationRejection("invalid filter parameter", Some(ex)))
+ }
+ }
+
+ def StringIdInPath[T]: PathMatcher1[StringId[T]] =
+ PathMatchers.Segment.map((id) => StringId(id.toString))
+
+ def LongIdInPath[T]: PathMatcher1[LongId[T]] =
+ PathMatchers.LongNumber.map((id) => LongId(id))
+
+ def UuidIdInPath[T]: PathMatcher1[UuidId[T]] =
+ PathMatchers.JavaUUID.map((id) => UuidId(id))
+
+ def failFast[A](reply: A): A = reply match {
+ case err: NotFoundError => throw new NotFoundException(err.getMessage)
+ case err: AuthenticationError => throw new AuthenticationException(err.getMessage)
+ case err: AuthorizationError => throw new AuthorizationException(err.getMessage)
+ case err: DomainError => throw new DomainException(err.getMessage)
+ case other => other
+ }
+
+ def domainExceptionHandler(req: RequestId): ExceptionHandler = {
+ def errorResponse(ex: Throwable) =
+ ErrorsResponse(Seq(ResponseError(None, ex.getMessage, ErrorCode.Unspecified)), req)
+ ExceptionHandler {
+ case ex: AuthenticationException => complete(StatusCodes.Unauthorized -> errorResponse(ex))
+ case ex: AuthorizationException => complete(StatusCodes.Forbidden -> errorResponse(ex))
+ case ex: NotFoundException => complete(StatusCodes.NotFound -> errorResponse(ex))
+ case ex: DomainException => complete(StatusCodes.BadRequest -> errorResponse(ex))
+ case NonFatal(ex) => complete(StatusCodes.InternalServerError -> errorResponse(ex))
+ }
+ }
+
+ def domainRejectionHandler(req: RequestId): RejectionHandler = {
+ def wrapContent(message: String) = {
+ import play.api.libs.json._
+ val err = ErrorsResponse(Seq(ResponseError(None, message, ErrorCode.Unspecified)), req)
+ val text = Json.stringify(implicitly[Writes[ErrorsResponse]].writes(err))
+ HttpEntity(ContentTypes.`application/json`, text)
+ }
+ RejectionHandler.default.mapRejectionResponse {
+ case res @ HttpResponse(_, _, ent: HttpEntity.Strict, _) =>
+ res.copy(entity = wrapContent(ent.data.utf8String))
+ case x => x // pass through all other types of responses
+ }
+ }
+
+ val tracked: Directive1[RequestId] = optionalHeaderValueByName(ContextHeaders.TrackingIdHeader) flatMap {
+ case Some(id) => provide(RequestId(id))
+ case None => provide(RequestId())
+ }
+
+ val domainResponse: Directive0 = tracked.flatMap { id =>
+ handleExceptions(domainExceptionHandler(id)) & handleRejections(domainRejectionHandler(id))
+ }
+
+ implicit class AuthProviderWrapper(provider: AuthProvider[AuthUserInfo]) {
+ val authenticated: Directive1[AuthenticatedRequestContext] = (provider.authorize() & tracked) tflatMap {
+ case (core, requestId) =>
+ provide(
+ new AuthenticatedRequestContext(
+ core.authenticatedUser,
+ requestId,
+ core.contextHeaders(ContextHeaders.AuthenticationTokenHeader)
+ ))
+ }
+ }
+
+}
+
+object Directives extends Directives
diff --git a/src/main/scala/xyz/driver/pdsuicommon/parsers/DimensionsParser.scala b/src/main/scala/xyz/driver/pdsuicommon/parsers/DimensionsParser.scala
new file mode 100644
index 0000000..17c09ed
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/parsers/DimensionsParser.scala
@@ -0,0 +1,30 @@
+package xyz.driver.pdsuicommon.parsers
+
+import scala.util.{Failure, Success, Try}
+
+class Dimensions(private val xs: Set[String] = Set.empty) {
+ def contains(x: String): Boolean = xs.isEmpty || xs.contains(x)
+}
+
+object DimensionsParser {
+
+ @deprecated("play-akka transition", "0")
+ def tryParse(query: Map[String, Seq[String]]): Try[Dimensions] =
+ tryParse(query.toSeq.flatMap {
+ case (key, values) =>
+ values.map(value => key -> value)
+ })
+
+ def tryParse(query: Seq[(String, String)]): Try[Dimensions] = {
+ query.collect { case ("dimensions", value) => value } match {
+ case Nil => Success(new Dimensions())
+
+ case x +: Nil =>
+ val raw: Set[String] = x.split(",").view.map(_.trim).filter(_.nonEmpty).to[Set]
+ Success(new Dimensions(raw))
+
+ case xs =>
+ Failure(new IllegalArgumentException(s"Dimensions are specified ${xs.size} times"))
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/parsers/PaginationParser.scala b/src/main/scala/xyz/driver/pdsuicommon/parsers/PaginationParser.scala
new file mode 100644
index 0000000..b59b1a5
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/parsers/PaginationParser.scala
@@ -0,0 +1,29 @@
+package xyz.driver.pdsuicommon.parsers
+
+import xyz.driver.pdsuicommon.db._
+import scala.util._
+
+object PaginationParser {
+
+ @deprecated("play-akka transition", "0")
+ def parse(query: Map[String, Seq[String]]): Try[Pagination] =
+ parse(query.toSeq.flatMap {
+ case (key, values) =>
+ values.map(value => key -> value)
+ })
+
+ def parse(query: Seq[(String, String)]): Try[Pagination] = {
+ val IntString = """(\d+)""".r
+ def validate(field: String, default: Int) = query.collectFirst { case (`field`, size) => size } match {
+ case Some(IntString(x)) if x.toInt > 0 => x.toInt
+ case Some(IntString(x)) => throw new ParseQueryArgException((field, s"must greater than zero (found $x)"))
+ case Some(str) => throw new ParseQueryArgException((field, s"must be an integer (found $str)"))
+ case None => default
+ }
+
+ Try {
+ Pagination(validate("pageSize", Pagination.Default.pageSize),
+ validate("pageNumber", Pagination.Default.pageNumber))
+ }
+ }
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/parsers/ParseQueryArgException.scala b/src/main/scala/xyz/driver/pdsuicommon/parsers/ParseQueryArgException.scala
new file mode 100644
index 0000000..64b3d2e
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/parsers/ParseQueryArgException.scala
@@ -0,0 +1,3 @@
+package xyz.driver.pdsuicommon.parsers
+
+class ParseQueryArgException(val errors: (String, String)*) extends Exception(errors.mkString(","))
diff --git a/src/main/scala/xyz/driver/pdsuicommon/parsers/SearchFilterParser.scala b/src/main/scala/xyz/driver/pdsuicommon/parsers/SearchFilterParser.scala
new file mode 100644
index 0000000..8aff397
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/parsers/SearchFilterParser.scala
@@ -0,0 +1,152 @@
+package xyz.driver.pdsuicommon.parsers
+
+import xyz.driver.pdsuicommon.utils.Implicits.{toCharOps, toStringOps}
+import fastparse.all._
+import fastparse.core.Parsed
+import fastparse.parsers.Intrinsics.CharPred
+import xyz.driver.pdsuicommon.db.{SearchFilterBinaryOperation, SearchFilterExpr, SearchFilterNAryOperation}
+import xyz.driver.pdsuicommon.utils.Utils._
+
+import scala.util.Try
+
+@SuppressWarnings(Array("org.wartremover.warts.Product", "org.wartremover.warts.Serializable"))
+object SearchFilterParser {
+
+ private object BinaryAtomFromTuple {
+ def unapply(input: (SearchFilterExpr.Dimension, (String, String))): Option[SearchFilterExpr.Atom.Binary] = {
+ val (dimensionName, (strOperation, value)) = input
+ parseOperation(strOperation.toLowerCase).map { op =>
+ SearchFilterExpr.Atom.Binary(dimensionName, op, value.safeTrim)
+ }
+ }
+ }
+
+ private object NAryAtomFromTuple {
+ // Compiler warning: unchecked since it is eliminated by erasure, if we user Seq[String]
+ def unapply(input: (SearchFilterExpr.Dimension, (String, Seq[_]))): Option[SearchFilterExpr.Atom.NAry] = {
+ val (dimensionName, (strOperation, xs)) = input
+ if (strOperation.toLowerCase == "in") {
+ val values = xs.asInstanceOf[Seq[String]].map(_.safeTrim)
+ Some(SearchFilterExpr.Atom.NAry(dimensionName, SearchFilterNAryOperation.In, values))
+ } else {
+ None
+ }
+ }
+ }
+
+ private val operationsMapping = {
+ import xyz.driver.pdsuicommon.db.SearchFilterBinaryOperation._
+
+ Map[String, SearchFilterBinaryOperation](
+ "eq" -> Eq,
+ "noteq" -> NotEq,
+ "like" -> Like,
+ "gt" -> Gt,
+ "gteq" -> GtEq,
+ "lt" -> Lt,
+ "lteq" -> LtEq
+ )
+ }
+
+ private def parseOperation(x: String): Option[SearchFilterBinaryOperation] = operationsMapping.get(x)
+
+ private val whitespaceParser = P(CharPred(_.isSafeWhitespace))
+
+ val dimensionParser: Parser[SearchFilterExpr.Dimension] = {
+ val identParser = P(
+ CharPred(c => c.isLetterOrDigit)
+ .rep(min = 1)).!.map(s => SearchFilterExpr.Dimension(None, toSnakeCase(s)))
+ val pathParser = P(identParser.! ~ "." ~ identParser.!) map {
+ case (left, right) =>
+ SearchFilterExpr.Dimension(Some(toSnakeCase(left)), toSnakeCase(right))
+ }
+ P(pathParser | identParser)
+ }
+
+ private val commonOperatorParser: Parser[String] = {
+ P(IgnoreCase("eq") | IgnoreCase("like") | IgnoreCase("noteq")).!
+ }
+
+ private val numericOperatorParser: Parser[String] = {
+ P((IgnoreCase("gt") | IgnoreCase("lt")) ~ IgnoreCase("eq").?).!
+ }
+
+ private val naryOperatorParser: Parser[String] = P(IgnoreCase("in")).!
+
+ private val isPositiveParser: Parser[Boolean] = P(CharIn("-+").!.?).map {
+ case Some("-") => false
+ case _ => true
+ }
+
+ // Exclude Unicode "digits"
+ private val digitsParser: Parser[String] = P(CharIn('0' to '9').rep(min = 1).!)
+
+ // @TODO Make complex checking here
+ private val numberParser: Parser[String] = P(isPositiveParser ~ digitsParser.! ~ ("." ~ digitsParser).!.?).map {
+ case (false, intPart, Some(fracPart)) => s"-$intPart.${fracPart.tail}"
+ case (false, intPart, None) => s"-$intPart"
+ case (_, intPart, Some(fracPart)) => s"$intPart.${fracPart.tail}"
+ case (_, intPart, None) => s"$intPart"
+ }
+
+ private val nAryValueParser: Parser[String] = P(CharPred(_ != ',').rep(min = 1).!)
+
+ private val binaryAtomParser: Parser[SearchFilterExpr.Atom.Binary] = P(
+ dimensionParser ~ whitespaceParser ~ (
+ (commonOperatorParser.! ~/ whitespaceParser ~/ AnyChar.rep(min = 1).!)
+ | (numericOperatorParser.! ~/ whitespaceParser ~/ numberParser.!)
+ ) ~ End
+ ).map {
+ case BinaryAtomFromTuple(atom) => atom
+ }
+
+ private val nAryAtomParser: Parser[SearchFilterExpr.Atom.NAry] = P(
+ dimensionParser ~ whitespaceParser ~ (
+ naryOperatorParser ~/ whitespaceParser ~/ nAryValueParser.!.rep(min = 1, sep = ",")
+ ) ~ End
+ ).map {
+ case NAryAtomFromTuple(atom) => atom
+ }
+
+ private val atomParser: Parser[SearchFilterExpr.Atom] = P(binaryAtomParser | nAryAtomParser)
+
+ @deprecated("play-akka transition", "0")
+ def parse(query: Map[String, Seq[String]]): Try[SearchFilterExpr] =
+ parse(query.toSeq.flatMap {
+ case (key, values) =>
+ values.map(value => key -> value)
+ })
+
+ def parse(query: Seq[(String, String)]): Try[SearchFilterExpr] = Try {
+ query.toList.collect { case ("filters", value) => value } match {
+ case Nil => SearchFilterExpr.Empty
+
+ case head :: Nil =>
+ atomParser.parse(head) match {
+ case Parsed.Success(x, _) => x
+ case e: Parsed.Failure => throw new ParseQueryArgException("filters" -> formatFailure(1, e))
+ }
+
+ case xs =>
+ val parsed = xs.map(x => atomParser.parse(x))
+ val failures: Seq[String] = parsed.zipWithIndex.collect {
+ case (e: Parsed.Failure, index) => formatFailure(index, e)
+ }
+
+ if (failures.isEmpty) {
+ val filters = parsed.collect {
+ case Parsed.Success(x, _) => x
+ }
+
+ SearchFilterExpr.Intersection.create(filters: _*)
+ } else {
+ throw new ParseQueryArgException("filters" -> failures.mkString("; "))
+ }
+ }
+ }
+
+ private def formatFailure(sectionIndex: Int, e: Parsed.Failure): String = {
+ s"section $sectionIndex: ${ParseError.msg(e.extra.input, e.extra.traced.expected, e.index)}"
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/parsers/SortingParser.scala b/src/main/scala/xyz/driver/pdsuicommon/parsers/SortingParser.scala
new file mode 100644
index 0000000..4bfc669
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/parsers/SortingParser.scala
@@ -0,0 +1,64 @@
+package xyz.driver.pdsuicommon.parsers
+
+import xyz.driver.pdsuicommon.db.{Sorting, SortingOrder}
+import fastparse.all._
+import fastparse.core.Parsed
+import xyz.driver.pdsuicommon.utils.Utils._
+
+import scala.util.Try
+
+object SortingParser {
+
+ private val sortingOrderParser: Parser[SortingOrder] = P("-".!.?).map {
+ case Some(_) => SortingOrder.Descending
+ case None => SortingOrder.Ascending
+ }
+
+ private def dimensionSortingParser(validDimensions: Seq[String]): Parser[Sorting.Dimension] = {
+ P(sortingOrderParser ~ StringIn(validDimensions: _*).!).map {
+ case (sortingOrder, field) =>
+ val prefixedFields = field.split("\\.", 2)
+ prefixedFields.size match {
+ case 1 => Sorting.Dimension(None, toSnakeCase(field), sortingOrder)
+ case 2 =>
+ Sorting.Dimension(Some(prefixedFields.head).map(toSnakeCase),
+ toSnakeCase(prefixedFields.last),
+ sortingOrder)
+ }
+ }
+ }
+
+ private def sequentialSortingParser(validDimensions: Seq[String]): Parser[Sorting.Sequential] = {
+ P(dimensionSortingParser(validDimensions).rep(min = 1, sep = ",") ~ End).map { dimensions =>
+ Sorting.Sequential(dimensions)
+ }
+ }
+
+ @deprecated("play-akka transition", "0")
+ def parse(validDimensions: Set[String], query: Map[String, Seq[String]]): Try[Sorting] =
+ parse(validDimensions, query.toSeq.flatMap {
+ case (key, values) =>
+ values.map(value => key -> value)
+ })
+
+ def parse(validDimensions: Set[String], query: Seq[(String, String)]): Try[Sorting] = Try {
+ query.toList.collect { case ("sort", value) => value } match {
+ case Nil => Sorting.Sequential(Seq.empty)
+
+ case rawSorting :: Nil =>
+ val parser = sequentialSortingParser(validDimensions.toSeq)
+ parser.parse(rawSorting) match {
+ case Parsed.Success(x, _) => x
+ case e: Parsed.Failure =>
+ throw new ParseQueryArgException("sort" -> formatFailure(e))
+ }
+
+ case _ => throw new ParseQueryArgException("sort" -> "multiple sections are not allowed")
+ }
+ }
+
+ private def formatFailure(e: Parsed.Failure): String = {
+ ParseError.msg(e.extra.input, e.extra.traced.expected, e.index)
+ }
+
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/utils/CustomSwaggerJsonFormats.scala b/src/main/scala/xyz/driver/pdsuicommon/utils/CustomSwaggerJsonFormats.scala
new file mode 100644
index 0000000..c1a2c7c
--- /dev/null
+++ b/src/main/scala/xyz/driver/pdsuicommon/utils/CustomSwaggerJsonFormats.scala
@@ -0,0 +1,55 @@
+package xyz.driver.pdsuicommon.utils
+
+import java.time.{LocalDate, LocalDateTime}
+
+import io.swagger.models.properties.Property
+import spray.json.JsValue
+import xyz.driver.pdsuicommon.domain.{LongId, StringId, UuidId}
+import xyz.driver.pdsuidomain.entities._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.arm._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.criterion._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.intervention._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.hypothesis._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.studydesign._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.trial._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.trialhistory._
+import xyz.driver.pdsuidomain.formats.json.sprayformats.trialissue._
+import xyz.driver.core.swagger.CustomSwaggerJsonConverter._
+import xyz.driver.pdsuidomain.services.CriterionService.RichCriterion
+
+object CustomSwaggerJsonFormats {
+
+ val customCommonProperties = Map[Class[_], Property](
+ classOf[LocalDateTime] -> stringProperty(example = Some("2010-12-31'T'18:59:59Z")),
+ classOf[LocalDate] -> stringProperty(example = Some("2010-12-31")),
+ classOf[UuidId[_]] -> stringProperty(example = Some("370b0450-35cb-4aab-ba74-0145be75add5")),
+ classOf[StringId[_]] -> stringProperty(),
+ classOf[LongId[_]] -> stringProperty()
+ )
+ val customTrialCurationProperties = Map[Class[_], Property](
+ classOf[Trial.Status] -> stringProperty(),
+ classOf[Trial.Condition] -> stringProperty(),
+ classOf[TrialHistory.Action] -> stringProperty(),
+ classOf[TrialHistory.State] -> stringProperty()
+ ) ++ customCommonProperties
+
+ val customTrialCurationObjectsExamples = Map[Class[_], JsValue](
+ classOf[Trial] -> trialWriter.write(xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextTrial()),
+ classOf[Arm] -> armFormat.write(xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextArm()),
+ classOf[TrialHistory] -> trialHistoryFormat.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextTrialHistory()),
+ classOf[TrialIssue] -> trialIssueWriter.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextTrialIssue()),
+ classOf[RichCriterion] -> richCriterionFormat.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextRichCriterion()),
+ classOf[InterventionWithArms] -> interventionWriter.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextInterventionWithArms()),
+ classOf[InterventionType] -> interventionTypeFormat.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextInterventionType()),
+ classOf[Hypothesis] -> hypothesisFormat.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextHypothesis()),
+ classOf[StudyDesign] -> studyDesignFormat.write(
+ xyz.driver.pdsuidomain.fakes.entities.trialcuration.nextStudyDesign())
+ )
+
+}
diff --git a/src/main/scala/xyz/driver/pdsuicommon/utils/Utils.scala b/src/main/scala/xyz/driver/pdsuicommon/utils/Utils.scala
index 02c9e28..63b0572 100644
--- a/src/main/scala/xyz/driver/pdsuicommon/utils/Utils.scala
+++ b/src/main/scala/xyz/driver/pdsuicommon/utils/Utils.scala
@@ -1,6 +1,7 @@
package xyz.driver.pdsuicommon.utils
import java.time.LocalDateTime
+import java.util.regex.{Matcher, Pattern}
object Utils {
@@ -20,4 +21,23 @@ object Utils {
fullClassName.substring(fullClassName.lastIndexOf("$") + 1)
}
}
+
+ def toSnakeCase(str: String): String =
+ str
+ .replaceAll("([A-Z]+)([A-Z][a-z])", "$1_$2")
+ .replaceAll("([a-z\\d])([A-Z])", "$1_$2")
+ .toLowerCase
+
+ def toCamelCase(str: String): String = {
+ val sb = new StringBuffer()
+ def loop(m: Matcher): Unit = if (m.find()) {
+ m.appendReplacement(sb, m.group(1).toUpperCase())
+ loop(m)
+ }
+ val m: Matcher = Pattern.compile("_(.)").matcher(str)
+ loop(m)
+ m.appendTail(sb)
+ sb.toString
+ }
+
}