com.zaxxer.hikari.HikariDataSource Scala Examples

The following examples show how to use com.zaxxer.hikari.HikariDataSource. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: PostgresDDLGeneratorTest.scala    From maha   with Apache License 2.0 6 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core.ddl

import java.util.UUID

import com.yahoo.maha.core._
import com.yahoo.maha.jdbc.JdbcConnection
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}



class PostgresDDLGeneratorTest extends BaseDDLGeneratorTest {

  val postgresDDLGenerator = new PostgresDDLGenerator

  private var dataSource: HikariDataSource = null
  private var jdbcConnection: JdbcConnection = null

  override protected def beforeAll(): Unit = {
    val config = new HikariConfig()
    config.setJdbcUrl("jdbc:h2:mem:" + UUID.randomUUID().toString.replace("-",
      "") + ";MODE=PostgreSQL;DB_CLOSE_DELAY=-1")
    config.setUsername("sa")
    config.setPassword("sa")
    config.setMaximumPoolSize(1)
    dataSource = new HikariDataSource(config)
    jdbcConnection = new JdbcConnection(dataSource)
  }

  override protected def afterAll(): Unit = {
    dataSource.close()
  }

  def removePartitionBy(ddl : String) : String = {
    val partitionString1 = """PARTITION BY LIST(stats_date)
                             |( PARTITION p_default VALUES(TO_DATE('01-JAN-1970 00:00:00', 'DD-MON-YYYY HH24:MI:SS'))
                             |)
                             |;""".stripMargin
    ddl.replace(partitionString1, ";")
  }

  test("test ddl for fact") {
    val postgresFacts = pubFact.factList.filter(f => f.engine.equals(PostgresEngine))
    val ddlMap : Map[String, String] = postgresFacts.map(fact => fact.name -> removePartitionBy(postgresDDLGenerator.toDDL(fact))).toMap
    assert(ddlMap.keySet.contains("pg_ad_k_stats"),
      "Postgres DDL Generator should generate ddl for postgres table cb_ad_k_stats")
    assert(!ddlMap.keySet.contains("ad_k_stats"),
      "Postgres DDL Generator should not generate ddl for hive table ad_k_stats")

    ddlMap.foreach {
      case(fact, ddl) =>
        val result = jdbcConnection.execute(ddl)
        assert(result.isSuccess && result.toOption.get === false, result.failed.toString)
    }
  }

  test("test ddl for dimension") {
    val postgresDims = pubDim.dimList.filter(d => d.engine.equals(PostgresEngine))
    val ddlMap : Map[String, String] = postgresDims.map(dim => dim.name -> removePartitionBy(postgresDDLGenerator.toDDL(dim))).toMap
    assert(ddlMap.keySet.contains("postgres_advertiser"),
      "Postgres DDL Generator should generate ddl for postgres table postgres_advertiser")
    assert(!ddlMap.keySet.contains("cache_advertiser"),
      "Postgres DDL Generator should not generate ddl for hive table cache_advertiser")

    ddlMap.foreach {
      case(dim, ddl) =>
        val result = jdbcConnection.execute(ddl)
        assert(result.isSuccess && result.toOption.get === false, result.failed.toString)
    }
  }



} 
Example 2
Source File: GenericConnectionPool.scala    From airframe   with Apache License 2.0 6 votes vote down vote up
package wvlet.airframe.jdbc
import java.sql.Connection

import com.zaxxer.hikari.{HikariConfig, HikariDataSource}


class GenericConnectionPool(val config: DbConfig) extends ConnectionPool {

  protected val dataSource: HikariDataSource = {
    val connectionPoolConfig = new HikariConfig

    // Set default JDBC parameters
    connectionPoolConfig.setMaximumPoolSize(config.connectionPool.maxPoolSize) // HikariCP default = 10
    connectionPoolConfig.setAutoCommit(config.connectionPool.autoCommit)       // Enable auto-commit

    connectionPoolConfig.setDriverClassName(config.jdbcDriverName)
    config.user.foreach(u => connectionPoolConfig.setUsername(u))
    config.password.foreach(p => connectionPoolConfig.setPassword(p))

    config.`type` match {
      case "postgresql" =>
        if (config.postgres.useSSL) {
          connectionPoolConfig.addDataSourceProperty("ssl", "true")
          connectionPoolConfig.addDataSourceProperty("sslfactory", config.postgres.sslFactory)
        }
    }

    if (config.host.isEmpty) {
      throw new IllegalArgumentException(s"missing jdbc host: ${config}")
    }

    connectionPoolConfig.setJdbcUrl(config.jdbcUrl)

    info(s"jdbc URL: ${connectionPoolConfig.getJdbcUrl}")
    new HikariDataSource(config.connectionPool.hikariConfig(connectionPoolConfig))
  }

  override def withConnection[U](body: Connection => U): U = {
    val conn = dataSource.getConnection
    try {
      body(conn)
    } finally {
      // Return the connection to the pool
      conn.close
    }
  }

  override def stop: Unit = {
    info(s"Closing the connection pool for ${config.jdbcUrl}")
    dataSource.close()
  }
} 
Example 3
Source File: MysqlHikariCpConnection.scala    From estuary   with Apache License 2.0 5 votes vote down vote up
package com.neighborhood.aka.laplace.estuary.core.source

import com.mysql.jdbc.Connection
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}

import scala.util.{Success, Try}


  def insertBatchSql(sqls: List[String]): Try[List[Int]] = if (sqls.isEmpty) Success(List.empty) else Try {

    val conn = ds.getConnection()
    try {
      conn.setAutoCommit(false)
      val statement = conn.createStatement()
      sqls.foreach(sql => statement.addBatch(sql))
      val re = statement.executeBatch()
      conn.commit()
      statement.clearBatch()
      re.toList
    } catch {
      case e =>
        e.printStackTrace()
        throw e
    } finally {
      conn.close()
    }


  }

  def getConnection: java.sql.Connection = ds.getConnection()
}

object MysqlHikariCpConnection {
  def main(args: Array[String]): Unit = {
    val config = new HikariConfig()
    config.setJdbcUrl("jdbc:mysql://10.10.50.195")
    config.setUsername("root")
    config.setPassword("puhui123!")

    val cp = new MysqlHikariCpConnection(config)
    val a = cp.insertSql("insert into cif_monitor.test1234(name) VALUES('2')")

  }
} 
Example 4
Source File: DatabaseConnection.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.postgresql

import com.cave.metrics.data.AwsConfig

import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import org.apache.commons.logging.LogFactory

import scala.slick.driver.PostgresDriver.simple._


abstract class DatabaseConnection(awsConfig: AwsConfig) {

  val log = LogFactory.getLog(classOf[DatabaseConnection])
  val ds = new HikariDataSource(getDatabaseConfig)
  val db = {

    val database = Database.forDataSource(ds)
    log.debug( s"""
    Db connection initialized.
      driver: ${awsConfig.rdsJdbcDatabaseClass}
      user:   ${awsConfig.rdsJdbcDatabaseUser}
      pass:   [REDACTED]
    """.stripMargin)

    ds.getConnection.close()

    database
  }

  def closeDbConnection(): Unit = ds.close()

  private[this] def getDatabaseConfig: HikariConfig = {
    val config = new HikariConfig
    config.setMaximumPoolSize(awsConfig.rdsJdbcDatabasePoolSize)

    val className = awsConfig.rdsJdbcDatabaseClass
    config.setDataSourceClassName(awsConfig.rdsJdbcDatabaseClass)

    if (className.contains("postgres")) {
      config.addDataSourceProperty("serverName", awsConfig.rdsJdbcDatabaseServer)
      config.addDataSourceProperty("databaseName", awsConfig.rdsJdbcDatabaseName)
      config.addDataSourceProperty("portNumber", awsConfig.rdsJdbcDatabasePort)
    } else {
      config.addDataSourceProperty("url", awsConfig.rdsJdbcDatabaseUrl)
    }

    config.addDataSourceProperty("user", awsConfig.rdsJdbcDatabaseUser)
    config.addDataSourceProperty("password", awsConfig.rdsJdbcDatabasePassword)

    config
  }
} 
Example 5
Source File: SqlComponent.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.db.slick

import akka.Done
import akka.actor.ActorSystem
import com.typesafe.scalalogging.StrictLogging
import com.zaxxer.hikari.HikariDataSource
import fusion.core.extension.FusionCore
import fusion.jdbc.{ FusionJdbc, JdbcTemplate }
import javax.inject.{ Inject, Singleton }
import slick.basic.DatabasePublisher

import scala.concurrent.Future
import scala.util.Failure


@Singleton
class SqlComponent @Inject() (val profile: PgProfile, classicSystem: ActorSystem) extends StrictLogging {
  import profile.api._
  val dataSource: HikariDataSource = FusionJdbc(classicSystem).component
  val db = databaseForDataSource(dataSource)
  val jdbcTemplate: JdbcTemplate = JdbcTemplate(dataSource)
  FusionCore(classicSystem).shutdowns.beforeActorSystemTerminate("StopSqlManager") { () =>
    Future {
      db.close()
      Done
    }(classicSystem.dispatcher)
  }

  def runTransaction[R, E <: Effect.Write](a: DBIOAction[R, NoStream, E]): Future[R] =
    wrapperLogging(db.run(a.transactionally))

  def run[R](a: DBIOAction[R, NoStream, Nothing]): Future[R] = wrapperLogging(db.run(a))

  def stream[T](a: DBIOAction[_, Streaming[T], Nothing]): DatabasePublisher[T] = db.stream(a)

  def streamTransaction[T, E <: Effect.Write](a: DBIOAction[_, Streaming[T], E]): DatabasePublisher[T] =
    db.stream(a.transactionally)

  @inline private def wrapperLogging[T](f: Future[T]): Future[T] =
    f.andThen { case Failure(e) => logger.warn(s"Slick run error [${e.toString}].") }(db.ioExecutionContext)

  override def toString = s"SqlSystem($dataSource, $jdbcTemplate, $db)"
} 
Example 6
Source File: TestSchema.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.connector.sql

import com.zaxxer.hikari.HikariDataSource
import fusion.jdbc.util.JdbcUtils

object TestSchema {
  lazy val postgres: HikariDataSource = JdbcUtils.createHikariDataSource(
    "poolName" -> "postgres",
    "maximumPoolSize" -> "2",
    "dataSourceClassName" -> "org.postgresql.ds.PGSimpleDataSource",
    "dataSource.serverName" -> "localhost",
    "dataSource.portNumber" -> "5432",
    "dataSource.databaseName" -> "massdata",
    "dataSource.user" -> "massdata",
    "dataSource.password" -> "massdata")

  lazy val mysql: HikariDataSource = JdbcUtils.createHikariDataSource(
    "poolName" -> "mysql",
    "maximumPoolSize" -> "2",
    "jdbcUrl" -> "jdbc:mysql://localhost:3306/massdata?useSSL=false&characterEncoding=utf8",
    "username" -> "massdata",
    "password" -> "Massdata.2018",
    "dataSource.cachePrepStmts" -> "true",
    "dataSource.prepStmtCacheSize" -> "250",
    "dataSource.prepStmtCacheSqlLimit" -> "2048")
} 
Example 7
Source File: PostgresSchema.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.connector.sql.schema

import com.zaxxer.hikari.HikariDataSource
import fusion.jdbc.JdbcTemplate
import fusion.jdbc.util.JdbcUtils
import helloscala.common.util.AsBoolean

import scala.collection.immutable
import scala.util.control.NonFatal


class PostgresSchema private (jdbcTemplate: JdbcTemplate) extends SQLSchema {
  import mass.connector.sql.schema.PostgresSchema._

  override def listTable(schemaName: String): immutable.Seq[TableInfo] =
    jdbcTemplate.listForObject(
      s"select * from information_schema.tables where table_schema = ?",
      List(schemaName),
      rs => tableInfo(jdbcTemplate, JdbcUtils.resultSetToMap(rs)))

  override def listColumn(tableName: String, schemaName: String): immutable.Seq[ColumnInfo] =
    jdbcTemplate.listForObject(
      "select * from information_schema.columns where table_schema = ? and table_name = ?",
      List(schemaName, tableName),
      rs => columnInfo(JdbcUtils.resultSetToMap(rs)))
}

object PostgresSchema {
  def listColumn(jdbcTemplate: JdbcTemplate, tableName: String, schemaName: String): immutable.Seq[ColumnInfo] =
    jdbcTemplate.listForObject(
      "select * from information_schema.columns where table_schema = '?' and table_name = '?'",
      List(schemaName, tableName),
      rs => columnInfo(JdbcUtils.resultSetToMap(rs)))

  def tableInfo(jdbcTemplate: JdbcTemplate, _data: Map[String, AnyRef]): TableInfo =
    TableInfo(
      _data("table_schema").toString,
      _data("table_name").toString,
      _data("table_type").toString,
      AsBoolean.unapply(_data("is_insertable_into")).getOrElse(true))

  def columnInfo(_data: Map[String, AnyRef]): ColumnInfo = {
    val helper = new InfoHelper(_data) {}
    try {
      ColumnInfo(
        _data("table_schema").toString,
        _data("table_name").toString,
        _data("column_name").toString,
        helper.asInt('ordinalPosition).get,
        helper.asString("column_default"),
        helper.asBoolean('isNullable).getOrElse(true),
        helper.asString("data_type").getOrElse(""),
        helper.asInt('characterMaximumLength),
        helper.asInt('characterOctetLength),
        helper.asInt('numericPrecision),
        helper.asInt('numericPrecisionRadix),
        helper.asInt('numericScale),
        helper.asInt('datetimePrecision),
        helper.asBoolean('isUpdatable))
    } catch {
      case NonFatal(e) =>
        println(_data("data_type"))
        println(_data)
        e.printStackTrace()
        throw e
    }
  }

  def apply(dataSource: HikariDataSource): PostgresSchema =
    apply(JdbcTemplate(dataSource, true, true, false))

  def apply(JdbcTemplate: JdbcTemplate): PostgresSchema =
    new PostgresSchema(JdbcTemplate)
} 
Example 8
Source File: SQLConnector.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.connector.sql

import com.zaxxer.hikari.HikariDataSource
import fusion.jdbc.JdbcTemplate
import fusion.jdbc.util.JdbcUtils
import mass.connector.ConnectorType.ConnectorType
import mass.connector.{ Connector, ConnectorSetting, ConnectorType }


final case class SQLConnector(name: String, setting: ConnectorSetting) extends Connector {
  override def `type`: ConnectorType = ConnectorType.JDBC

  lazy val dataSource: HikariDataSource = JdbcUtils.createHikariDataSource(configuration)
  lazy val jdbcTemplate = JdbcTemplate(
    dataSource,
    configuration.getOrElse("use-transaction", true),
    configuration.getOrElse("ignore-warnings", true),
    configuration.getOrElse("allow-print-log", false))

  override def close(): Unit = dataSource.close()
} 
Example 9
Source File: FlywayUtils.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.infra.services.storage.sql.utils

import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import gospeak.core.services.storage.DbConf
import org.flywaydb.core.Flyway

object FlywayUtils {
  def build(conf: DbConf): Flyway = {
    val config = new HikariConfig()
    conf match {
      case c: DbConf.H2 =>
        config.setDriverClassName("org.h2.Driver")
        config.setJdbcUrl(c.url)
      case c: DbConf.PostgreSQL =>
        config.setDriverClassName("org.postgresql.Driver")
        config.setJdbcUrl(c.url)
        config.setUsername(c.user)
        config.setPassword(c.pass.decode)
    }
    Flyway.configure()
      .dataSource(new HikariDataSource(config))
      .locations("classpath:sql")
      .load()
  }
} 
Example 10
Source File: HikariDataSourceTransactor.scala    From mist   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.mist.master.store

import java.util.concurrent.{ExecutorService, Executors, Future, TimeUnit}

import cats.arrow.FunctionK
import cats.effect._
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie.util.transactor.Transactor
import doobie.util.transactor.Transactor.Aux
import io.hydrosphere.mist.utils.Logger

import scala.concurrent.ExecutionContext



  def shutdown(): Unit = {
    if (!ds.isClosed) {
      logger.info("Closing Hikari data source")
      ds.close()
    } else {
      logger.warn("Hikari datasource had not been properly initialized before closing")
    }

    shutdownExecutorService(awaitShutdown, ce, "connections EC")
    shutdownExecutorService(awaitShutdown, te, "tx EC")
  }
} 
Example 11
Source File: MySqlConnector.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.mysql

import cats.effect.IO
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import org.flywaydb.core.Flyway
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._

object MySqlConnector {

  private val logger = LoggerFactory.getLogger("MySqlConnector")

  def runDBMigrations(config: MySqlConnectionConfig): IO[Unit] = {
    val migrationConnectionSettings = MySqlDataSourceSettings(
      "flywayConnectionPool",
      config.driver,
      config.migrationUrl,
      config.user,
      config.password,
      minimumIdle = Some(3)
    )

    getDataSource(migrationConnectionSettings).map { migrationDataSource =>
      logger.info("Running migrations to ready the databases")

      val migration = new Flyway()
      migration.setDataSource(migrationDataSource)
      // flyway changed the default schema table name in v5.0.0
      // this allows to revert to an old naming convention if needed
      config.migrationSchemaTable.foreach { tableName =>
        migration.setTable(tableName)
      }

      val placeholders = Map("dbName" -> config.name)
      migration.setPlaceholders(placeholders.asJava)
      migration.setSchemas(config.name)

      // Runs flyway migrations
      migration.migrate()
      logger.info("migrations complete")
    }
  }

  def getDataSource(settings: MySqlDataSourceSettings): IO[HikariDataSource] = IO {

    logger.error(s"Initializing data source with settings: $settings")

    val dsConfig = new HikariConfig()

    dsConfig.setPoolName(settings.poolName)
    dsConfig.setDriverClassName(settings.driver)
    dsConfig.setJdbcUrl(settings.url)
    dsConfig.setUsername(settings.user)
    dsConfig.setPassword(settings.password)

    settings.connectionTimeoutMillis.foreach(dsConfig.setConnectionTimeout)
    settings.idleTimeout.foreach(dsConfig.setIdleTimeout)
    settings.maximumPoolSize.foreach(dsConfig.setMaximumPoolSize)
    settings.maxLifetime.foreach(dsConfig.setMaxLifetime)
    settings.minimumIdle.foreach(dsConfig.setMinimumIdle)
    dsConfig.setRegisterMbeans(settings.registerMbeans)

    settings.mySqlProperties.foreach {
      case (k, v) => dsConfig.addDataSourceProperty(k, v)
    }

    new HikariDataSource(dsConfig)
  }
} 
Example 12
Source File: JdbcContextConfig.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill

import com.typesafe.config.Config
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import java.util.Properties
import scala.util.control.NonFatal

case class JdbcContextConfig(config: Config) {

  def configProperties = {
    import scala.jdk.CollectionConverters._
    val p = new Properties
    for (entry <- config.entrySet.asScala)
      p.setProperty(entry.getKey, entry.getValue.unwrapped.toString)
    p
  }

  def dataSource =
    try
      new HikariDataSource(new HikariConfig(configProperties))
    catch {
      case NonFatal(ex) =>
        throw new IllegalStateException(s"Failed to load data source for config: '$config'", ex)
    }
} 
Example 13
Source File: JDBCJournal.scala    From zio-actors   with Apache License 2.0 5 votes vote down vote up
package zio.actors.persistence.jdbc

import cats.effect.Blocker
import com.zaxxer.hikari.HikariDataSource
import doobie._
import doobie.hikari.HikariTransactor
import doobie.implicits._
import zio.{ IO, Promise, Runtime, Task, UIO, ZIO }
import zio.actors.ActorSystemUtils
import zio.actors.persistence.PersistenceId.PersistenceId
import zio.actors.persistence.journal.Journal
import zio.actors.persistence.jdbc.JDBCConfig.DbConfig
import zio.blocking.Blocking
import zio.interop.catz._

private[actors] final class JDBCJournal[Ev](tnx: Transactor[Task]) extends Journal[Ev] {

  override def persistEvent(persistenceId: PersistenceId, event: Ev): Task[Unit] =
    for {
      bytes <- ActorSystemUtils.objToByteArray(event)
      _     <- SqlEvents.persistEvent(persistenceId, bytes).run.transact(tnx)
    } yield ()

  override def getEvents(persistenceId: PersistenceId): Task[Seq[Ev]] =
    for {
      bytes  <- SqlEvents.getEventsById(persistenceId).to[Seq].transact(tnx)
      events <- IO.collectAll(bytes.map(ActorSystemUtils.objFromByteArray(_).map(_.asInstanceOf[Ev])))
    } yield events

}

private[actors] object JDBCJournal {

  private lazy val runtime           = Runtime.default
  private lazy val transactorPromise = runtime.unsafeRun(Promise.make[Exception, HikariTransactor[Task]])

  def getJournal[Ev](actorSystemName: String, configStr: String): Task[JDBCJournal[Ev]] =
    for {
      dbConfig <- JDBCConfig.getDbConfig(actorSystemName, configStr)
      tnx      <- getTransactor(dbConfig)
    } yield new JDBCJournal[Ev](tnx)

  private def makeTransactor(dbConfig: DbConfig): ZIO[Blocking, Throwable, HikariTransactor[Task]] =
    ZIO.runtime[Blocking].flatMap { implicit rt =>
      for {
        transactEC <- UIO(rt.environment.get.blockingExecutor.asEC)
        connectEC   = rt.platform.executor.asEC
        ds          = new HikariDataSource()
        _           = ds.setJdbcUrl(dbConfig.dbURL.value)
        _           = ds.setUsername(dbConfig.dbUser.value)
        _           = ds.setPassword(dbConfig.dbPass.value)
        transactor <- IO.effect(HikariTransactor.apply[Task](ds, connectEC, Blocker.liftExecutionContext(transactEC)))
      } yield transactor
    }

  private def getTransactor(dbConfig: DbConfig): Task[HikariTransactor[Task]] =
    transactorPromise.poll.flatMap {
      case Some(value) => value
      case None        =>
        for {
          newTnx <- makeTransactor(dbConfig).provideLayer(Blocking.live)
          _      <- transactorPromise.succeed(newTnx)
        } yield newTnx
    }

} 
Example 14
Source File: Config.scala    From franklin   with Apache License 2.0 5 votes vote down vote up
package com.azavea.franklin.database

import cats.effect._
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie.util.transactor.Transactor

import scala.util.Properties

object DatabaseConfig {
  var jdbcDriver: String = "org.postgresql.Driver"

  val jdbcNoDBUrl: String =
    Properties.envOrElse(
      "POSTGRES_URL",
      "jdbc:postgresql://localhost/"
    )

  val jdbcDBName: String =
    Properties.envOrElse("POSTGRES_NAME", "franklin")
  val jdbcUrl: String = jdbcNoDBUrl + jdbcDBName
  val dbUser: String  = Properties.envOrElse("POSTGRES_USER", "franklin")

  val dbPassword: String =
    Properties.envOrElse("POSTGRES_PASSWORD", "franklin")

  val dbStatementTimeout: String =
    Properties.envOrElse("POSTGRES_STATEMENT_TIMEOUT", "30000")

  val dbMaximumPoolSize: Int =
    Properties.envOrElse("POSTGRES_DB_POOL_SIZE", "5").toInt

  def nonHikariTransactor[F[_]: Async](databaseName: String)(implicit cs: ContextShift[F]) = {
    Transactor.fromDriverManager[F](
      "org.postgresql.Driver",
      jdbcNoDBUrl + databaseName,
      dbUser,
      dbPassword
    )
  }

  val hikariConfig = new HikariConfig()
  hikariConfig.setPoolName("franklin-pool")
  hikariConfig.setMaximumPoolSize(dbMaximumPoolSize)
  hikariConfig.setConnectionInitSql(
    s"SET statement_timeout = ${dbStatementTimeout};"
  )
  hikariConfig.setJdbcUrl(jdbcUrl)
  hikariConfig.setUsername(dbUser)
  hikariConfig.setPassword(dbPassword)
  hikariConfig.setDriverClassName(jdbcDriver)

  val hikariDS = new HikariDataSource(hikariConfig)
} 
Example 15
Source File: FlywayMigrations.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.store

import com.daml.logging.{ContextualizedLogger, LoggingContext}
import com.daml.platform.configuration.ServerRole
import com.daml.platform.store.FlywayMigrations._
import com.daml.platform.store.dao.HikariConnection
import com.daml.resources.ResourceOwner
import com.zaxxer.hikari.HikariDataSource
import org.flywaydb.core.Flyway
import org.flywaydb.core.api.MigrationVersion
import org.flywaydb.core.api.configuration.FluentConfiguration

import scala.concurrent.duration.DurationInt
import scala.concurrent.{ExecutionContext, Future}

class FlywayMigrations(jdbcUrl: String)(implicit logCtx: LoggingContext) {
  private val logger = ContextualizedLogger.get(this.getClass)

  private val dbType = DbType.jdbcType(jdbcUrl)

  def validate()(implicit executionContext: ExecutionContext): Future[Unit] =
    dataSource.use { ds =>
      Future {
        val flyway = configurationBase(dbType).dataSource(ds).load()
        logger.info("Running Flyway validation...")
        flyway.validate()
        logger.info("Flyway schema validation finished successfully.")
      }
    }

  def migrate(allowExistingSchema: Boolean = false)(
      implicit executionContext: ExecutionContext
  ): Future[Unit] =
    dataSource.use { ds =>
      Future {
        val flyway = configurationBase(dbType)
          .dataSource(ds)
          .baselineOnMigrate(allowExistingSchema)
          .baselineVersion(MigrationVersion.fromVersion("0"))
          .load()
        logger.info("Running Flyway migration...")
        val stepsTaken = flyway.migrate()
        logger.info(s"Flyway schema migration finished successfully, applying $stepsTaken steps.")
      }
    }

  def reset()(implicit executionContext: ExecutionContext): Future[Unit] =
    dataSource.use { ds =>
      Future {
        val flyway = configurationBase(dbType).dataSource(ds).load()
        logger.info("Running Flyway clean...")
        flyway.clean()
        logger.info("Flyway schema clean finished successfully.")
      }
    }

  private def dataSource: ResourceOwner[HikariDataSource] =
    HikariConnection.owner(
      serverRole = ServerRole.IndexMigrations,
      jdbcUrl = jdbcUrl,
      minimumIdle = 2,
      maxPoolSize = 2,
      connectionTimeout = 250.millis,
      metrics = None,
    )
}

object FlywayMigrations {
  def configurationBase(dbType: DbType): FluentConfiguration =
    Flyway.configure().locations("classpath:db/migration/" + dbType.name)
} 
Example 16
Source File: implicits.scala    From freestyle   with Apache License 2.0 5 votes vote down vote up
package examples.todolist
package runtime

import java.util.Properties

import cats.effect.IO
import cats.Monad
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie._
import doobie.hikari._
import doobie.hikari.implicits._
import doobie.implicits._
import examples.todolist.persistence._
import examples.todolist.persistence.runtime._

import scala.concurrent.ExecutionContext

object implicits extends ExecutionImplicits with RepositoryHandlersImplicits with DoobieImplicits

trait RepositoryHandlersImplicits {

  implicit def appRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): AppRepository.Handler[F] =
    new AppRepositoryHandler[F]

  implicit def todoItemRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): TodoItemRepository.Handler[F] =
    new TodoItemRepositoryHandler[F]

  implicit def todoListRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): TodoListRepository.Handler[F] =
    new TodoListRepositoryHandler[F]

  implicit def tagRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): TagRepository.Handler[F] =
    new TagRepositoryHandler[F]

}

trait DoobieImplicits {

  implicit val xa: HikariTransactor[IO] =
    HikariTransactor[IO](new HikariDataSource(new HikariConfig(new Properties {
      setProperty("driverClassName", "org.h2.Driver")
      setProperty("jdbcUrl", "jdbc:h2:mem:todo")
      setProperty("username", "sa")
      setProperty("password", "")
      setProperty("maximumPoolSize", "10")
      setProperty("minimumIdle", "10")
      setProperty("idleTimeout", "600000")
      setProperty("connectionTimeout", "30000")
      setProperty("connectionTestQuery", "SELECT 1")
      setProperty("maxLifetime", "1800000")
      setProperty("autoCommit", "true")
    })))
}

trait ExecutionImplicits {

  implicit val ec: ExecutionContext =
    scala.concurrent.ExecutionContext.Implicits.global

} 
Example 17
Source File: implicits.scala    From freestyle   with Apache License 2.0 5 votes vote down vote up
package examples.todolist
package runtime

import cats._
import cats.effect.IO
import com.twitter.util._
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie._
import doobie.hikari._
import doobie.hikari.implicits._
import doobie.implicits._
import examples.todolist.persistence._
import examples.todolist.persistence.runtime._
import java.util.Properties
import scala.concurrent.ExecutionContext

object implicits extends ProductionImplicits


trait ProductionImplicits {

  implicit val xa: HikariTransactor[IO] =
    HikariTransactor[IO](new HikariDataSource(new HikariConfig(new Properties {
      setProperty("driverClassName", "org.h2.Driver")
      setProperty("jdbcUrl", "jdbc:h2:mem:todo")
      setProperty("username", "sa")
      setProperty("password", "")
      setProperty("maximumPoolSize", "10")
      setProperty("minimumIdle", "10")
      setProperty("idleTimeout", "600000")
      setProperty("connectionTimeout", "30000")
      setProperty("connectionTestQuery", "SELECT 1")
      setProperty("maxLifetime", "1800000")
      setProperty("autoCommit", "true")
    })))

  implicit val task2Future: IO ~> Future = new (IO ~> Future) {
    override def apply[A](fa: IO[A]): Future[A] = {
      val promise = new Promise[A]()
      fa.unsafeRunAsync(_.fold(promise.setException, promise.setValue))
      promise
    }
  }

  implicit def appRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): AppRepository.Handler[F] =
    new AppRepositoryHandler[F]

  implicit def todoItemRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): TodoItemRepository.Handler[F] =
    new TodoItemRepositoryHandler[F]

  implicit def todoListRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): TodoListRepository.Handler[F] =
    new TodoListRepositoryHandler[F]

  implicit def tagRepositoryHandler[F[_]: Monad](
      implicit T: Transactor[F]): TagRepository.Handler[F] =
    new TagRepositoryHandler[F]

  implicit val executionContext: ExecutionContext =
    scala.concurrent.ExecutionContext.Implicits.global

} 
Example 18
Source File: OracleDDLGeneratorTest.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core.ddl

import java.util.UUID

import com.yahoo.maha.core._
import com.yahoo.maha.jdbc.JdbcConnection
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}



class OracleDDLGeneratorTest extends BaseDDLGeneratorTest {

  val oracleDDLGenerator = new OracleDDLGenerator

  private var dataSource: HikariDataSource = null
  private var jdbcConnection: JdbcConnection = null

  override protected def beforeAll(): Unit = {
    val config = new HikariConfig()
    config.setJdbcUrl("jdbc:h2:mem:" + UUID.randomUUID().toString.replace("-",
      "") + ";MODE=Oracle;DB_CLOSE_DELAY=-1")
    config.setUsername("sa")
    config.setPassword("sa")
    config.setMaximumPoolSize(1)
    dataSource = new HikariDataSource(config)
    jdbcConnection = new JdbcConnection(dataSource)
  }

  override protected def afterAll(): Unit = {
    dataSource.close()
  }

  def removePartitionBy(ddl : String) : String = {
    val partitionString1 = """PARTITION BY LIST(stats_date)
                             |( PARTITION p_default VALUES(TO_DATE('01-JAN-1970 00:00:00', 'DD-MON-YYYY HH24:MI:SS'))
                             |)
                             |;""".stripMargin
    ddl.replace(partitionString1, ";")
  }

  test("test ddl for fact") {
    val oracleFacts = pubFact.factList.filter(f => f.engine.equals(OracleEngine))
    val ddlMap : Map[String, String] = oracleFacts.map(fact => fact.name -> removePartitionBy(oracleDDLGenerator.toDDL(fact))).toMap
    assert(ddlMap.keySet.contains("cb_ad_k_stats"),
      "Oracle DDL Generator should generate ddl for oracle table cb_ad_k_stats")
    assert(!ddlMap.keySet.contains("ad_k_stats"),
      "Oracle DDL Generator should not generate ddl for hive table ad_k_stats")

    ddlMap.foreach {
      case(fact, ddl) =>
        val result = jdbcConnection.execute(ddl)
        assert(result.isSuccess && result.toOption.get === false, result.failed.toString)
    }
  }

  test("test ddl for dimension") {
    val oracleDims = pubDim.dimList.filter(d => d.engine.equals(OracleEngine))
    val ddlMap : Map[String, String] = oracleDims.map(dim => dim.name -> removePartitionBy(oracleDDLGenerator.toDDL(dim))).toMap
    assert(ddlMap.keySet.contains("oracle_advertiser"),
      "Oracle DDL Generator should generate ddl for oracle table oracle_advertiser")
    assert(!ddlMap.keySet.contains("cache_advertiser"),
      "Oracle DDL Generator should not generate ddl for hive table cache_advertiser")

    ddlMap.foreach {
      case(dim, ddl) =>
        val result = jdbcConnection.execute(ddl)
        assert(result.isSuccess && result.toOption.get === false, result.failed.toString)
    }
  }



} 
Example 19
Source File: DataSource.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2018, Oath Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package utils

import java.util.Properties

import com.zaxxer.hikari.{HikariDataSource, HikariConfig}

case class DatabaseConnDetails (jdbcUrl: String, dbUsername: String, dbPassword: String, maxPoolSize: Int)

object HikariCpDataSource {

  private var dataSource: Option[HikariDataSource] = None

  def get(databaseConnDetails: DatabaseConnDetails): Option[HikariDataSource] = {

    val properties = new Properties()
    properties.put("oracle.net.CONNECT_TIMEOUT",5000.asInstanceOf[AnyRef])
    properties.put("oracle.jdbc.ReadTimeout",60000.asInstanceOf[AnyRef])

    val config = new HikariConfig()
    config.setJdbcUrl(databaseConnDetails.jdbcUrl)
    config.setUsername(databaseConnDetails.dbUsername)
    config.setPassword(databaseConnDetails.dbPassword)
    config.setMaximumPoolSize(databaseConnDetails.maxPoolSize)
    config.setValidationTimeout(1000)
    config.setMaxLifetime(1800000)
    config.setConnectionTimeout(60000)
    config.setIdleTimeout(60000)
    config.setDataSourceProperties(properties)

    Option(new HikariDataSource(config))
  }

  def close() = {
      dataSource.foreach(_.close())
  }

} 
Example 20
Source File: BaseJobServiceTest.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2018, Yahoo Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.

package com.yahoo.maha.job.service

import java.util.UUID

import com.google.common.io.Closer
import com.yahoo.maha.core.DailyGrain
import com.yahoo.maha.jdbc.JdbcConnection
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import org.joda.time.{DateTime, DateTimeZone}
import org.scalatest.{FunSuite, Matchers}


trait BaseJobServiceTest extends FunSuite with Matchers {

  protected var dataSource: Option[HikariDataSource] = None
  protected var jdbcConnection: Option[JdbcConnection] = None
  protected val closer : Closer = Closer.create()

  final val REGISTRY = "er"
  protected[this] val fromDate : String = DailyGrain.toFormattedString(DateTime.now(DateTimeZone.UTC).minusDays(7))
  protected[this] val toDate : String = DailyGrain.toFormattedString(DateTime.now(DateTimeZone.UTC))
  val h2dbId : String = UUID.randomUUID().toString.replace("-","")

  initJdbcToH2()

  val mahaJobWorkerTable =
    s"""
       | create table maha_worker_job(
       | jobId NUMBER(10) PRIMARY KEY,
       | jobType VARCHAR(100),
       | jobStatus VARCHAR(100),
       | jobResponse VARCHAR(100),
       | numAcquired NUMBER(2),
       | createdTimestamp TIMESTAMP,
       | acquiredTimestamp TIMESTAMP,
       | endedTimestamp TIMESTAMP,
       | jobParentId NUMBER(10),
       | jobRequest VARCHAR(100),
       | hostname VARCHAR(100),
       | cubeName VARCHAR(100),
       | isDeleted NUMBER(1)
       | );
     """.stripMargin
  val now = new DateTime()

  def initJdbcToH2(): Unit = {
    val config = new HikariConfig()
    config.setJdbcUrl(s"jdbc:h2:mem:$h2dbId;MODE=Oracle;DB_CLOSE_DELAY=-1")
    config.setUsername("sa")
    config.setPassword("h2.test.database.password")
    config.setMaximumPoolSize(1)
    dataSource = Option(new HikariDataSource(config))
    jdbcConnection = dataSource.map(JdbcConnection(_))
    assert(jdbcConnection.isDefined)
  }

  val result = jdbcConnection.get.execute(mahaJobWorkerTable)
  assert(result.isSuccess, s"Failed to create job table $result")

} 
Example 21
Source File: DataSourceConnectionPoolFactoryTest.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.service.factory

import java.util.UUID

import javax.sql.DataSource
import com.yahoo.maha.jdbc.JdbcConnection
import com.yahoo.maha.service.{DefaultMahaServiceConfigContext, MahaServiceConfigContext}
import com.zaxxer.hikari.HikariDataSource
import org.json4s.JValue
import org.json4s.jackson.JsonMethods._
import org.scalatest.{FunSuite, Matchers}


class DataSourceConnectionPoolFactoryTest extends BaseFactoryTest{
  implicit val context: MahaServiceConfigContext = DefaultMahaServiceConfigContext()


  test("Test Creation of HikariDataSource") {
    val uuid = UUID.randomUUID().toString.replace("-","")
    val jsonString =   s"""
                         |{
                         |"driverClassName" : "org.h2.Driver",
                         |"jdbcUrl" : "jdbc:h2:mem:$uuid;MODE=Oracle;DB_CLOSE_DELAY=-1",
                         |"username" : "sa",
                         |"passwordProviderFactoryClassName" : "com.yahoo.maha.service.factory.PassThroughPasswordProviderFactory",
                         |"passwordProviderConfig" : [{"key" : "value"}],
                         |"passwordKey" : "h2.test.database.password",
                         |"poolName" : "test-pool",
                         |"maximumPoolSize" : 10,
                         |"minimumIdle" : 1,
                         |"autoCommit": true,
                         |"connectionTestQuery" : "SELECT 1 FROM DUAL",
                         |"validationTimeout" : 1000000,
                         |"idleTimeout" : 1000000,
                         |"maxLifetime" : 10000000,
                         |"dataSourceProperties": [{"key": "propertyKey" , "value": "propertyValue"}]
                         |}
                       """.stripMargin

    val factoryResult =  getFactory[DataSourceFactory]("com.yahoo.maha.service.factory.HikariDataSourceFactory", closer)
    assert(factoryResult.isSuccess)
    val factory = factoryResult.toOption.get
    val json = parse(jsonString)
    val generatorResult = factory.fromJson(json)
    assert(generatorResult.isSuccess, generatorResult)
    assert(generatorResult.toList.head.isInstanceOf[DataSource])
    generatorResult.foreach {
      ds=>
        val connection = new JdbcConnection(ds)
        assert(ds.asInstanceOf[HikariDataSource].getIdleTimeout == 1000000)
        assert(ds.asInstanceOf[HikariDataSource].getPoolName == "test-pool")
        val ddlResult = connection.executeUpdate("create table test(key varchar(20), value varchar(20));")
        assert(ddlResult.isSuccess)
    }
    assert(KvPair.fieldJSONW.write(KvPair("1", "2")).isInstanceOf[JValue])
  }
} 
Example 22
Source File: ExampleMahaService.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.api.jersey.example

import java.io.File
import java.util.UUID

import com.yahoo.maha.core.ddl.OracleDDLGenerator
import com.yahoo.maha.jdbc.{JdbcConnection, List, Seq}
import com.yahoo.maha.service.{DefaultMahaService, MahaService, MahaServiceConfig}
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import grizzled.slf4j.Logging
import org.apache.commons.io.FileUtils
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat

object ExampleMahaService extends Logging {

  val REGISTRY_NAME = "academic";

  private var dataSource: Option[HikariDataSource] = None
  private var jdbcConnection: Option[JdbcConnection] = None
  val h2dbId = UUID.randomUUID().toString.replace("-","")
  val today: String = DateTimeFormat.forPattern("yyyy-MM-dd").print(DateTime.now())
  val yesterday: String = DateTimeFormat.forPattern("yyyy-MM-dd").print(DateTime.now().minusDays(1))

  def initJdbcToH2(): Unit = {
    val config = new HikariConfig()
    config.setJdbcUrl(s"jdbc:h2:mem:$h2dbId;MODE=Oracle;DB_CLOSE_DELAY=-1")
    config.setUsername("sa")
    config.setPassword("h2.test.database.password")
    config.setMaximumPoolSize(2)
    dataSource = Option(new HikariDataSource(config))
    jdbcConnection = dataSource.map(new JdbcConnection(_))
    assert(jdbcConnection.isDefined, "Failed to connect to h2 local server")
  }

  def getMahaService(scope: String = "main"): MahaService = {
    val jsonString = FileUtils.readFileToString(new File(s"src/$scope/resources/maha-service-config.json"))
      .replaceAll("h2dbId", s"$h2dbId")

    initJdbcToH2()

    val mahaServiceResult = MahaServiceConfig.fromJson(jsonString.getBytes("utf-8"))
    if (mahaServiceResult.isFailure) {
      mahaServiceResult.leftMap {
        res=>
          error(s"Failed to launch Example MahaService, MahaService Error list is: ${res.list.toList}")
      }
    }
    val mahaServiceConfig = mahaServiceResult.toOption.get
    val mahaService: MahaService = new DefaultMahaService(mahaServiceConfig)
    stageStudentData(mahaServiceConfig)
    mahaService
  }

  def stageStudentData(mahaServiceConfig: MahaServiceConfig) : Unit = {

    val ddlGenerator = new OracleDDLGenerator
    val erRegistryConfig = mahaServiceConfig.registry.get(ExampleMahaService.REGISTRY_NAME).get
    val erRegistry= erRegistryConfig.registry
    erRegistry.factMap.values.foreach {
      publicFact =>
        publicFact.factList.foreach {
          fact=>
            val ddl = ddlGenerator.toDDL(fact)
            assert(jdbcConnection.get.executeUpdate(ddl).isSuccess)
        }
    }

    val insertSql =
      """
        INSERT INTO student_grade_sheet (year, section_id, student_id, class_id, total_marks, date, comment)
        VALUES (?, ?, ?, ?, ?, ?, ?)
      """

    val rows: List[Seq[Any]] = List(
      Seq(1, 100, 213, 200, 125, ExampleMahaService.today, "some comment")
    )

    rows.foreach {
      row =>
        val result = jdbcConnection.get.executeUpdate(insertSql, row)
        assert(result.isSuccess)
    }
    var count = 0
    jdbcConnection.get.queryForObject("select * from student_grade_sheet") {
      rs =>
        while (rs.next()) {
          count += 1
        }
    }
    assert(rows.size == count)
  }
} 
Example 23
Source File: DataSourceConnectionProviderSpec.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.sql

import java.sql.SQLException
import java.util.Properties

import com.typesafe.config.ConfigFactory
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers

import scala.collection.JavaConverters._


class DataSourceConnectionProviderSpec
    extends Matchers
    with AnyFlatSpecLike
    with BeforeAndAfterAll {

  val properties = new Properties
  val cfg = ConfigFactory.load().getConfig("db-cfg")
  cfg
    .entrySet()
    .asScala
    .foreach(e => properties.setProperty(e.getKey(), cfg.getString(e.getKey())))

  private val hikariConfig = new HikariConfig(properties)

  private val ds = new HikariDataSource(hikariConfig)

  override def afterAll() = ds.close()

  "The DataSourceConnectionProvider" should "establish a connection" in {
    val p = new DataSourceConnectionProvider(ds)
    p.getConnection().isValid(1) shouldBe true
  }

  it should "close the connection" in {
    val p = new DataSourceConnectionProvider(ds)
    p.close()
    intercept[SQLException](p.getConnection())
  }

  "The DriverManagerConnectionProvider" should "be configured properly" in {
    val config = ConfigFactory.parseString("""
        |connection.url = url
        |connection.user = test
        |connection.password = password
        |connection.max.retries = 20
        |connection.retry.backoff = 10s
      """.stripMargin)

    val c = DriverManagerConnectionProvider(config)
    c.password shouldBe "password"
    c.connectionUrl shouldBe "url"
    c.username shouldBe "test"
    c.retryBackoff.toSeconds shouldBe 10
    c.maxConnectionAttempts shouldBe 20
    c.close()
  }

  it should "return a new connection" in {
    val config = ConfigFactory.parseString(
      """
        |connection.url = "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"
        |connection.max.retries = 2
        |connection.retry.backoff = 1s
      """.stripMargin
    )

    val c = DriverManagerConnectionProvider(config)
    c.getConnection() should not be null
    c.getNewConnection() should not be null
    c.close()
    c.connection.isValid(2) shouldBe false
  }
} 
Example 24
Source File: FlywayMigrator.scala    From zorechka-bot   with MIT License 5 votes vote down vote up
package com.wix.zorechka.repos

import com.zaxxer.hikari.HikariDataSource
import doobie.hikari.HikariTransactor
import org.flywaydb.core.Flyway
import zio.console.{Console, putStrLn}
import zio.{Task, ZIO}

trait FlywayMigrator {
  val flywayMigrator: FlywayMigrator.Service
}

object FlywayMigrator {

  trait Service {
    def migrate(dbTransactor: HikariTransactor[Task]): ZIO[Console, Throwable, Unit]
  }

  trait Live extends FlywayMigrator {

    val flywayMigrator: Service = new Service {
      override def migrate(dbTransactor: HikariTransactor[Task]): ZIO[Console, Throwable, Unit] = for {
        _ <- putStrLn("Starting Flyway migration")
        _ <- dbTransactor.configure(dataSource => loadFlyWayAndMigrate(dataSource))
        _ <- putStrLn("Finished Flyway migration")
      } yield ()
    }

    private def loadFlyWayAndMigrate(dataSource: HikariDataSource) = ZIO.effect {
      Flyway.configure()
        .dataSource(dataSource)
        .load()
        .migrate()
    }
  }

  def migrate(dbTransactor: HikariTransactor[Task]): ZIO[FlywayMigrator with Console, Throwable, Unit] =
    ZIO.accessM[FlywayMigrator with Console](_.flywayMigrator.migrate(dbTransactor))
}