aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTim Gushue <tim@drivergrp.com>2016-11-15 15:07:38 -0800
committerTim Gushue <tim@drivergrp.com>2016-11-15 15:07:38 -0800
commitb73041763e7e3ecb0e046c7c5ce117f73ba36f3c (patch)
treee5d01953d6508966e8715cd9bade3c03724260f9
parent929aa3f88643f4e4746e6016f9ad962b727bce2c (diff)
downloadslick-codegen-plugin-b73041763e7e3ecb0e046c7c5ce117f73ba36f3c.tar.gz
slick-codegen-plugin-b73041763e7e3ecb0e046c7c5ce117f73ba36f3c.tar.bz2
slick-codegen-plugin-b73041763e7e3ecb0e046c7c5ce117f73ba36f3c.zip
TM-251 refactor(codegen) Wrapped the whitelist in an option
-rw-r--r--src/main/scala/CodegenPlugin.scala2
-rw-r--r--src/main/scala/NamespacedCodegen.scala22
2 files changed, 13 insertions, 11 deletions
diff --git a/src/main/scala/CodegenPlugin.scala b/src/main/scala/CodegenPlugin.scala
index 366ef19..2b5ceb5 100644
--- a/src/main/scala/CodegenPlugin.scala
+++ b/src/main/scala/CodegenPlugin.scala
@@ -19,7 +19,7 @@ object CodegenPlugin extends AutoPlugin {
Generator.run(
new java.net.URI(codegenURI.value),
codegenPackage.value,
- codegenSchemaWhitelist.value,
+ Option(codegenSchemaWhitelist.value),
codegenOutputPath.value,
codegenForeignKeys.value
)
diff --git a/src/main/scala/NamespacedCodegen.scala b/src/main/scala/NamespacedCodegen.scala
index b435830..4487361 100644
--- a/src/main/scala/NamespacedCodegen.scala
+++ b/src/main/scala/NamespacedCodegen.scala
@@ -16,14 +16,14 @@ import slick.model.{Column, Model, Table}
object Generator {
- def run(uri: URI, pkg: String, schemaNames: List[String], outputPath: String, manualForeignKeys: Map[(String, String), (String, String)]) = {
+ def run(uri: URI, pkg: String, schemaNames: Option[List[String]], outputPath: String, manualForeignKeys: Map[(String, String), (String, String)]) = {
val dc: DatabaseConfig[JdbcProfile] = DatabaseConfig.forURI[JdbcProfile](uri)
- val parsedSchemas: Map[String, List[String]] = SchemaParser.parse(schemaNames)
- val dbModel: Model = Await.result(dc.db.run(SchemaParser.createModel(dc.driver, parsedSchemas)), Duration.Inf)
+ val parsedSchemasOpt: Option[Map[String, List[String]]] = SchemaParser.parse(schemaNames)
+ val dbModel: Model = Await.result(dc.db.run(SchemaParser.createModel(dc.driver, parsedSchemasOpt)), Duration.Inf)
val generator = new Generator(uri, pkg, dbModel, outputPath, manualForeignKeys)
val generatedCode = generator.code
- parsedSchemas.keys.map(schemaName => FileHelpers.schemaOutputPath(outputPath, schemaName))
+ parsedSchemasOpt.getOrElse(Map()).keys.map(schemaName => FileHelpers.schemaOutputPath(outputPath, schemaName))
}
}
@@ -209,17 +209,19 @@ object SchemaParser {
tcMappings.map{case (from, to) => ({getTableColumn(from); from}, getTableColumn(to))}
}
- def parse(schemaTableNames: List[String]): Map[String, List[String]] =
- schemaTableNames.map(_.split('.'))
+ def parse(schemaTableNamesOpt: Option[List[String]]): Option[Map[String, List[String]]] =
+ schemaTableNamesOpt.map( tNames =>
+ tNames.map(_.split('.'))
.groupBy(_.head)
.mapValues(_.flatMap(_.tail))
+ )
- def createModel(jdbcProfile: JdbcProfile, mappedSchemas: Map[String, List[String]]): DBIO[Model] = {
+ def createModel(jdbcProfile: JdbcProfile, mappedSchemasOpt: Option[Map[String, List[String]]]): DBIO[Model] = {
val allTables: DBIO[Vector[MTable]] = MTable.getTables
- if (mappedSchemas.isEmpty) {
- jdbcProfile.createModel(Some(allTables))
- } else {
+ if (mappedSchemasOpt.isEmpty) jdbcProfile.createModel(Some(allTables))
+ else {
+ val mappedSchemas = mappedSchemasOpt.get
val filteredTables: DBIO[Vector[MTable]] = allTables.map(
(tables: Vector[MTable]) => tables.filter(table =>
table.name.schema.flatMap(mappedSchemas.get).exists(ts =>