slick.jdbc.JdbcProfile Scala Examples

The following examples show how to use slick.jdbc.JdbcProfile. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ReservationViewEndpoint.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales.app

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import ecommerce.sales.ReadEndpoint
import ecommerce.sales.view.ReservationDao
import org.json4s.Formats
import pl.newicom.dddd.view.sql.SqlViewStore
import slick.jdbc.JdbcProfile

import scala.concurrent.ExecutionContext

class ReservationViewEndpoint(implicit ec: ExecutionContext, profile: JdbcProfile, formats: Formats) extends ReadEndpoint {

  lazy val dao = new ReservationDao

  def route(viewStore: SqlViewStore): Route = {
    path("reservation" / "all") {
      get {
        complete {
          viewStore.run {
            dao.all
          }
        }
      }
    } ~
    path("reservation" / Segment) { id =>
      get {
        onSuccess(viewStore.run(dao.byId(id))) {
          case Some(res) => complete(res)
          case None => complete(StatusCodes.NotFound -> "unknown reservation")
        }
      }
    }

  }

} 
Example 2
Source File: Tables.scala    From warp-core   with MIT License 5 votes vote down vote up
package com.workday.warp.persistence

import com.workday.warp.common.CoreWarpProperty.WARP_DATABASE_DRIVER
import com.workday.warp.common.exception.WarpConfigurationException
import slick.jdbc.JdbcProfile


  def unsupportedDriverException(driver: String): WarpConfigurationException = {
    new WarpConfigurationException(s"unsupported persistence driver: $driver. must be one of (${Drivers.mysql}, ${Drivers.h2})")
  }
}

trait CommonTables {
  // pass to row constructors as a dummy id when we're inserting something that has an autoinc
  val nullId: Int = 0

  val disableForeignKeys: String = WARP_DATABASE_DRIVER.value match {
    case Drivers.mysql | Drivers.cjMysql => "SET FOREIGN_KEY_CHECKS=0;"
    case Drivers.h2 => "SET REFERENTIAL_INTEGRITY FALSE;"
    case unsupported => throw Drivers.unsupportedDriverException(unsupported)
  }

  val enableForeignKeys: String = WARP_DATABASE_DRIVER.value match {
    case Drivers.mysql | Drivers.cjMysql => "SET FOREIGN_KEY_CHECKS=1;"
    case Drivers.h2 => "SET REFERENTIAL_INTEGRITY TRUE;"
    case unsupported => throw Drivers.unsupportedDriverException(unsupported)
  }
}


trait HasProfile {
  // shameless assumption
  val profile: JdbcProfile = WARP_DATABASE_DRIVER.value match {
    case Drivers.mysql | Drivers.cjMysql => com.workday.warp.persistence.mysql.WarpMySQLProfile
    case Drivers.h2 => slick.jdbc.H2Profile
    case unsupported => throw Drivers.unsupportedDriverException(unsupported)
  }
} 
Example 3
Source File: BankAccountSpecSupport.scala    From akka-ddd-cqrs-es-example   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.bank.adaptor.util

import akka.actor.ActorSystem
import com.github.j5ik2o.bank.adaptor.generator.IdGenerator
import com.github.j5ik2o.bank.domain.model.{ BankAccountId, BankAccountName }
import org.scalacheck.Gen
import org.sisioh.baseunits.scala.money.Money
import slick.basic.DatabaseConfig
import slick.jdbc.JdbcProfile

trait BankAccountSpecSupport {

  val system: ActorSystem

  lazy val dbConfig: DatabaseConfig[JdbcProfile] =
    DatabaseConfig.forConfig[JdbcProfile](path = "slick", system.settings.config)

  lazy val bankAccountIdGenerator: IdGenerator[BankAccountId] =
    IdGenerator.ofBankAccountId(dbConfig.profile, dbConfig.db)

  val bankAccountNameGen: Gen[BankAccountName] =
    Gen.alphaStr.suchThat(v => v.nonEmpty && v.length <= 256).map(BankAccountName)
  val depositMoneyGen: Gen[Money]  = Gen.choose(1L, 100L).map(v => Money.yens(BigDecimal(v)))
  val withdrawMoneyGen: Gen[Money] = Gen.choose(1L, 50L).map(v => Money.yens(BigDecimal(v)))

  val bankAccountOldNameAndNewNameGen: Gen[(BankAccountName, BankAccountName)] = for {
    oldName <- bankAccountNameGen
    newName <- bankAccountNameGen
  } yield (oldName, newName)

  val bankAccountNameAndDepositMoneyGen: Gen[(BankAccountName, Money)] = for {
    name    <- bankAccountNameGen
    deposit <- depositMoneyGen
  } yield (name, deposit)

  val bankAccountNameAndDepositMoneyAndWithDrawMoneyGen: Gen[(BankAccountName, Money, Money)] = (for {
    name     <- bankAccountNameGen
    deposit  <- depositMoneyGen
    withdraw <- withdrawMoneyGen
  } yield (name, deposit, withdraw)).suchThat { case (_, deposit, withdraw) => deposit > withdraw }

} 
Example 4
Source File: IdGeneratorOnJDBC.scala    From akka-ddd-cqrs-es-example   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.bank.adaptor.generator

import com.github.j5ik2o.bank.domain.model.{ BankAccountEventId, BankAccountId }
import slick.jdbc.JdbcProfile

import scala.concurrent.{ ExecutionContext, Future }

abstract class AbstractIdGeneratorOnJDBC[ID](val profile: JdbcProfile, val db: JdbcProfile#Backend#Database)
    extends IdGenerator[ID] {
  import profile.api._

  val tableName: String

  protected def internalGenerateId()(
      implicit ec: ExecutionContext
  ): Future[Long] = {
    val action = for {
      updateResult <- sqlu"UPDATE #${tableName} SET id = LAST_INSERT_ID(id+1)"
      _            <- if (updateResult == 1) DBIO.successful(Some(updateResult)) else DBIO.successful(None)
      selectResult <- sql"SELECT LAST_INSERT_ID() AS id".as[Long].headOption
    } yield selectResult
    db.run(action.transactionally).flatMap {
      case Some(id) => Future.successful(id)
      case None     => Future.failed(new Exception("Occurred id generation error"))
    }
  }
}

class BankAccountEventIdGeneratorOnJDBC(profile: JdbcProfile, db: JdbcProfile#Backend#Database)
    extends AbstractIdGeneratorOnJDBC[BankAccountEventId](profile, db) {
  override val tableName: String = "bank_account_event_id_sequence_number"

  override def generateId()(implicit ec: ExecutionContext): Future[BankAccountEventId] =
    internalGenerateId().map(BankAccountEventId)
}

class BankAccountIdGeneratorOnJDBC(profile: JdbcProfile, db: JdbcProfile#Backend#Database)
    extends AbstractIdGeneratorOnJDBC[BankAccountId](profile, db) {
  override val tableName: String = "bank_account_id_sequence_number"

  override def generateId()(implicit ec: ExecutionContext): Future[BankAccountId] =
    internalGenerateId().map(BankAccountId)
} 
Example 5
Source File: Main.scala    From akka-ddd-cqrs-es-example   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.bank.apiServer

import akka.actor.{ ActorRef, ActorSystem }
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import com.github.j5ik2o.bank.adaptor.aggregate.{ BankAccountAggregateFlowsImpl, ShardedBankAccountAggregates }
import com.github.j5ik2o.bank.adaptor.controller.Routes
import com.github.j5ik2o.bank.adaptor.dao.BankAccountReadModelFlowsImpl
import com.github.j5ik2o.bank.adaptor.generator.IdGenerator
import com.github.j5ik2o.bank.adaptor.readJournal.JournalReaderImpl
import com.github.j5ik2o.bank.useCase.{ BankAccountAggregateUseCase, BankAccountReadModelUseCase }
import com.typesafe.config.{ Config, ConfigFactory }
import pureconfig._
import slick.basic.DatabaseConfig
import slick.jdbc.JdbcProfile

import scala.concurrent.ExecutionContextExecutor

object Main extends App {
  val rootConfig: Config                    = ConfigFactory.load()
  val dbConfig: DatabaseConfig[JdbcProfile] = DatabaseConfig.forConfig[JdbcProfile](path = "slick", rootConfig)

  implicit val system: ActorSystem                        = ActorSystem("bank-system", config = rootConfig)
  implicit val materializer: ActorMaterializer            = ActorMaterializer()
  implicit val executionContext: ExecutionContextExecutor = system.dispatcher

  val bankAccountIdGenerator = IdGenerator.ofBankAccountId(dbConfig.profile, dbConfig.db)

  val bankAccountAggregatesRef: ActorRef =
    system.actorOf(ShardedBankAccountAggregates.props, ShardedBankAccountAggregates.name)

  val bankAccountAggregateUseCase: BankAccountAggregateUseCase = new BankAccountAggregateUseCase(
    new BankAccountAggregateFlowsImpl(bankAccountAggregatesRef)
  )

  val bankAccountReadModelUseCase: BankAccountReadModelUseCase =
    new BankAccountReadModelUseCase(new BankAccountReadModelFlowsImpl(dbConfig.profile, dbConfig.db),
                                    new JournalReaderImpl())

  val routes: Routes = Routes(bankAccountIdGenerator, bankAccountAggregateUseCase, bankAccountReadModelUseCase)

  val ApiServerConfig(host, port) =
    loadConfigOrThrow[ApiServerConfig](system.settings.config.getConfig("bank.api-server"))

  val bindingFuture = Http().bindAndHandle(routes.root, host, port)

  sys.addShutdownHook {
    bindingFuture
      .flatMap(_.unbind())
      .onComplete(_ => system.terminate())
  }
} 
Example 6
Source File: Main.scala    From akka-ddd-cqrs-es-example   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.bank.readModelUpdater

import akka.actor.ActorSystem
import com.github.j5ik2o.bank.adaptor.dao.BankAccountReadModelFlowsImpl
import com.github.j5ik2o.bank.adaptor.readJournal.JournalReaderImpl
import com.github.j5ik2o.bank.useCase.BankAccountReadModelUseCase
import com.typesafe.config.ConfigFactory
import slick.basic.DatabaseConfig
import slick.jdbc.JdbcProfile

object Main extends App {
  val rootConfig      = ConfigFactory.load()
  implicit val system = ActorSystem("bank-system", config = rootConfig)
  val dbConfig        = DatabaseConfig.forConfig[JdbcProfile](path = "slick", rootConfig)

  new BankAccountReadModelUseCase(new BankAccountReadModelFlowsImpl(dbConfig.profile, dbConfig.db),
                                  new JournalReaderImpl())
    .execute()

  sys.addShutdownHook {
    system.terminate()
  }
} 
Example 7
Source File: Slick3SpecSupport.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.util

import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Suite }
import slick.basic.DatabaseConfig
import slick.jdbc.SetParameter.SetUnit
import slick.jdbc.{ JdbcProfile, SQLActionBuilder }

import scala.concurrent.Future

trait Slick3SpecSupport extends BeforeAndAfter with BeforeAndAfterAll with ScalaFutures with JdbcSpecSupport {
  self: Suite with FlywayWithMySQLSpecSupport =>

  private var _dbConfig: DatabaseConfig[JdbcProfile] = _

  private var _profile: JdbcProfile = _

  protected def dbConfig = _dbConfig

  protected def profile = _profile

  after {
    implicit val ec = dbConfig.db.executor.executionContext
    val futures = tables.map { table =>
      val q = SQLActionBuilder(List(s"TRUNCATE TABLE $table"), SetUnit).asUpdate
      dbConfig.db.run(q)
    }
    Future.sequence(futures).futureValue
  }

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    val config = ConfigFactory.parseString(s"""
         |dddbase {
         |  profile = "slick.jdbc.MySQLProfile$$"
         |  db {
         |    connectionPool = disabled
         |    driver = "com.mysql.jdbc.Driver"
         |    url = "jdbc:mysql://localhost:$jdbcPort/dddbase?useSSL=false"
         |    user = "dddbase"
         |    password = "dddbase"
         |  }
         |}
      """.stripMargin)
    _dbConfig = DatabaseConfig.forConfig[JdbcProfile]("dddbase", config)
    _profile = dbConfig.profile
  }

  override protected def afterAll(): Unit = {
    dbConfig.db.shutdown
    super.afterAll()
  }

} 
Example 8
Source File: AbstractUserAccountRepositoryBySlick.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.slick

import com.github.j5ik2o.dddbase.example.dao.slick.UserAccountComponent
import com.github.j5ik2o.dddbase.example.model._
import com.github.j5ik2o.dddbase.example.repository.{ BySlick, UserAccountRepository }
import com.github.j5ik2o.dddbase.slick.{
  AggregateMultiReadFeature,
  AggregateMultiWriteFeature,
  AggregateSingleReadFeature,
  AggregateSingleWriteFeature
}
import monix.eval.Task
import slick.jdbc.JdbcProfile
import slick.lifted.Rep

abstract class AbstractUserAccountRepositoryBySlick(val profile: JdbcProfile, val db: JdbcProfile#Backend#Database)
    extends UserAccountRepository[BySlick]
    with AggregateSingleReadFeature
    with AggregateMultiReadFeature
    with AggregateSingleWriteFeature
    with AggregateMultiWriteFeature
    with UserAccountComponent {
  override type RecordType = UserAccountRecord
  override type TableType  = UserAccounts
  override protected val dao = UserAccountDao

  override protected def byCondition(id: IdType): TableType => Rep[Boolean] = {
    import profile.api._
    _.id === id.value
  }

  override protected def byConditions(ids: Seq[IdType]): TableType => Rep[Boolean] = {
    import profile.api._
    _.id.inSet(ids.map(_.value))
  }

  override protected def convertToAggregate: UserAccountRecord => Task[UserAccount] = { record =>
    Task.pure {
      UserAccount(
        id = UserAccountId(record.id),
        status = Status.withName(record.status),
        emailAddress = EmailAddress(record.email),
        password = HashedPassword(record.password),
        firstName = record.firstName,
        lastName = record.lastName,
        createdAt = record.createdAt,
        updatedAt = record.updatedAt
      )
    }
  }

  override protected def convertToRecord: UserAccount => Task[UserAccountRecord] = { aggregate =>
    Task.pure {
      UserAccountRecord(
        id = aggregate.id.value,
        status = aggregate.status.entryName,
        email = aggregate.emailAddress.value,
        password = aggregate.password.value,
        firstName = aggregate.firstName,
        lastName = aggregate.lastName,
        createdAt = aggregate.createdAt,
        updatedAt = aggregate.updatedAt
      )
    }
  }

} 
Example 9
Source File: AbstractUserMessageRepositoryBySlick.scala    From scala-ddd-base   with MIT License 5 votes vote down vote up
package com.github.j5ik2o.dddbase.example.repository.slick
import com.github.j5ik2o.dddbase.example.dao.slick.UserMessageComponent
import com.github.j5ik2o.dddbase.example.model.{ Status, UserMessage, UserMessageId }
import com.github.j5ik2o.dddbase.example.repository.{ BySlick, UserMessageRepository }
import com.github.j5ik2o.dddbase.slick._
import monix.eval.Task
import slick.jdbc.JdbcProfile
import slick.lifted.Rep

abstract class AbstractUserMessageRepositoryBySlick(val profile: JdbcProfile, val db: JdbcProfile#Backend#Database)
    extends UserMessageRepository[BySlick]
    with AggregateSingleReadFeature
    with AggregateMultiReadFeature
    with AggregateSingleWriteFeature
    with AggregateMultiWriteFeature
    with UserMessageComponent {

  override type RecordType = UserMessageRecord
  override type TableType  = UserMessages
  override protected val dao = UserMessageDao

  override protected def byCondition(id: IdType): TableType => Rep[Boolean] = { v =>
    import profile.api._
    v.userId === id.userId && v.messageId === id.messageId
  }

  override protected def byConditions(ids: Seq[IdType]): TableType => Rep[Boolean] = { v =>
    import profile.api._
    ids
      .map { id =>
        v.userId === id.userId && v.messageId === id.messageId
      }
      .reduceLeft(_ || _)
  }

  override protected def convertToAggregate: UserMessageRecord => Task[UserMessage] = { record =>
    Task.pure {
      UserMessage(
        id = UserMessageId(record.userId, record.messageId),
        status = Status.withName(record.status),
        message = record.message,
        createdAt = record.createdAt,
        updatedAt = record.updatedAt
      )
    }
  }

  override protected def convertToRecord: UserMessage => Task[UserMessageRecord] = { aggregate =>
    Task.pure {
      UserMessageRecord(
        messageId = aggregate.id.messageId,
        userId = aggregate.id.userId,
        status = aggregate.status.entryName,
        message = aggregate.message,
        createdAt = aggregate.createdAt,
        updatedAt = aggregate.updatedAt
      )
    }
  }

} 
Example 10
Source File: ShipmentViewEndpoint.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.shipping.app

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import ecommerce.shipping.view.ShipmentDao
import ecommerce.shipping.ReadEndpoint
import org.json4s.Formats
import pl.newicom.dddd.view.sql.SqlViewStore

import scala.concurrent.ExecutionContext
import slick.jdbc.JdbcProfile

class ShipmentViewEndpoint(implicit val ec: ExecutionContext, profile: JdbcProfile, formats: Formats) extends ReadEndpoint {

  lazy val dao = new ShipmentDao

  def route(viewStore: SqlViewStore): Route = {
    path("shipment" / "all") {
      get {
        complete {
          viewStore.run {
            dao.all
          }
        }
      }
    } ~
    path("shipment" / Segment) { id =>
      get {
        onSuccess(viewStore.run(dao.byId(id))) {
          case Some(res) => complete(res)
          case None => complete(StatusCodes.NotFound -> "unknown shipment")
        }
      }
    } ~
    path("shipment" / "order" / Segment) { id =>
      get {
        onSuccess(viewStore.run(dao.byOrderId(id))) {
          case seq if seq.isEmpty =>
            complete(StatusCodes.NotFound -> "unknown order")
          case orders =>
            complete(orders)
        }
      }
    }

  }
} 
Example 11
Source File: ShippingViewUpdateService.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.shipping

import ecommerce.shipping.view.{ShipmentDao, ShipmentProjection}
import pl.newicom.dddd.messaging.event.EventStoreProvider
import pl.newicom.dddd.view.sql.{SqlViewStore, SqlViewUpdateConfig, SqlViewUpdateService}
import pl.newicom.eventstore.EventSourceProvider
import slick.dbio.DBIO
import slick.jdbc.JdbcProfile

class ShippingViewUpdateService(viewStore: SqlViewStore)(override implicit val profile: JdbcProfile)
  extends SqlViewUpdateService(viewStore) with EventStoreProvider with EventSourceProvider {

  lazy val shipmentDao: ShipmentDao = new ShipmentDao()

  override def vuConfigs: Seq[SqlViewUpdateConfig] = {
    List(
      SqlViewUpdateConfig("shipping-shipments", ShippingOfficeId, new ShipmentProjection(shipmentDao))
    )
  }

  override def viewUpdateInitAction: DBIO[Unit] = {
      super.viewUpdateInitAction >>
        shipmentDao.ensureSchemaCreated
  }
} 
Example 12
Source File: SlickJdbcScheduledMessagesRepository.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.transport.amqpjdbc.slick

import java.sql.Timestamp

import rhttpc.transport.amqpjdbc.{MessageToSchedule, ScheduledMessage, ScheduledMessagesRepository}
import slick.jdbc.{JdbcBackend, JdbcProfile}

import scala.concurrent.{ExecutionContext, Future}

private[amqpjdbc] class SlickJdbcScheduledMessagesRepository(profile: JdbcProfile, db: JdbcBackend.Database)
                                                            (implicit ec: ExecutionContext) extends ScheduledMessagesRepository {

  class V1_001__AddingPropertiesToScheduledMessagesMigration extends AddingPropertiesToScheduledMessagesMigration {
    override protected val profile: JdbcProfile = SlickJdbcScheduledMessagesRepository.this.profile
  }

  val messagesMigration = new V1_001__AddingPropertiesToScheduledMessagesMigration

  import messagesMigration._
  import profile.api._

  override def save(msg: MessageToSchedule): Future[Unit] = {
    import msg._
    val action = for {
      currentTimestamp <- sql"select current_timestamp".as[Timestamp].head
      plannedRun = new Timestamp(currentTimestamp.getTime + msg.delay.toMillis)
      messageToAdd = ScheduledMessage(None, queueName, content, properties, plannedRun)
      insertResult <- scheduledMessages += messageToAdd
    } yield ()
    db.run(action.transactionally)
  }

  override def fetchMessagesShouldByRun(queueName: String, batchSize: Int)
                                       (onMessages: (Seq[ScheduledMessage]) => Future[Any]): Future[Int] = {
    def drain(): Future[Int] = {
      val fetchAction = for {
        currentTimestamp <- sql"select current_timestamp".as[Timestamp].head
        fetched <- scheduledMessages.filter { msg =>
          msg.queueName === queueName &&
            msg.plannedRun <= currentTimestamp
        }.sortBy(_.plannedRun desc).take(batchSize).result
      } yield fetched

      def consumeAction(fetched: Seq[ScheduledMessage]) = {
        val fetchedIds = fetched.flatMap(_.id)
        for {
          deleted <- scheduledMessages.filter(_.id inSet fetchedIds).delete
          _ <- {
            if (deleted != fetched.size) {
              DBIO.failed(ConcurrentFetchException)
            } else {
              DBIO.successful(Unit)
            }
          }
          _ <- DBIO.from(onMessages(fetched))
        } yield fetched.size
      }

      val consumedFuture = for {
        fetched <- db.run(fetchAction.transactionally)
        consumed <- db.run(consumeAction(fetched).transactionally)
      } yield consumed

      val consumedRecovered = consumedFuture.recover {
        case ConcurrentFetchException => 0
      }

      for {
        consumed <- consumedRecovered
        consumedNext <- {
          if (consumed == batchSize)
            drain()
          else
            Future.successful(0)
        }
      } yield consumed + consumedNext
    }
    drain()
  }

  override def queuesStats(names: Set[String]): Future[Map[String, Int]] = {
    val action = scheduledMessages
      .filter(_.queueName inSet names)
      .groupBy(_.queueName).map {
      case (queueName, msgs) =>
        (queueName, msgs.size)
    }.result
    db.run(action).map(_.toMap)
  }
}

case object ConcurrentFetchException extends Exception(s"Concurrent fetch detected") 
Example 13
Source File: HttpService.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales

import akka.actor.{Actor, ActorLogging, Props}
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives
import pl.newicom.dddd.streams.ImplicitMaterializer
import akka.util.Timeout
import com.typesafe.config.Config
import ecommerce.sales.app.ReservationViewEndpoint
import io.github.lhotari.akka.http.health.HealthEndpoint.createDefaultHealthRoute
import org.json4s.Formats
import pl.newicom.dddd.serialization.JsonSerHints._
import pl.newicom.dddd.view.sql.SqlViewStore

import scala.concurrent.duration.FiniteDuration
import slick.jdbc.{JdbcProfile, PostgresProfile}

object HttpService {
  def props(interface: String, port: Int, askTimeout: FiniteDuration): Props =
    Props(new HttpService(interface, port)(askTimeout))
}

class HttpService(interface: String, port: Int)(implicit askTimeout: Timeout) extends Actor with ActorLogging
  with SalesReadFrontConfiguration with ImplicitMaterializer with Directives {

  import context.dispatcher

  implicit val formats: Formats = fromConfig(config)
  implicit val profile: JdbcProfile = PostgresProfile

  Http(context.system).bindAndHandle(route, interface, port)

  log.info(s"Listening on $interface:$port")

  override def receive: Receive = Actor.emptyBehavior
  override def config: Config = context.system.settings.config

  lazy val endpoints: ReservationViewEndpoint = new ReservationViewEndpoint

  private def route = pathPrefix("ecommerce" / "sales") {
    createDefaultHealthRoute() ~
    provide(new SqlViewStore(config))(endpoints)
  }

} 
Example 14
Source File: SalesViewUpdateService.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales

import ecommerce.sales.view.{ReservationDao, ReservationProjection}
import pl.newicom.dddd.view.sql.{SqlViewStore, SqlViewUpdateConfig, SqlViewUpdateService}
import pl.newicom.eventstore.{EventSourceProvider, EventStoreProvider}
import slick.dbio.DBIO
import slick.jdbc.JdbcProfile

class SalesViewUpdateService(viewStore: SqlViewStore)(override implicit val profile: JdbcProfile)
  extends SqlViewUpdateService(viewStore) with EventStoreProvider with EventSourceProvider {

  lazy val resevationDao = new ReservationDao

  override def vuConfigs: Seq[SqlViewUpdateConfig] = {
    List(
      SqlViewUpdateConfig("sales-reservations", ReservationOfficeId, new ReservationProjection(resevationDao))
    )
  }

  override def viewUpdateInitAction: DBIO[Unit] = {
      super.viewUpdateInitAction >>
        resevationDao.ensureSchemaCreated
  }
} 
Example 15
Source File: ReservationDao.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales.view

import java.sql.Date
import ecommerce.sales.ReservationStatus
import ReservationStatus.ReservationStatus
import pl.newicom.dddd.aggregate.EntityId
import slick.jdbc.meta.MTable._
import scala.concurrent.ExecutionContext
import slick.jdbc.JdbcProfile

class ReservationDao(implicit val profile: JdbcProfile, ec: ExecutionContext)  {
  import profile.api._

  implicit val reservationStatusColumnType = MappedColumnType.base[ReservationStatus, String](
    { c => c.toString },
    { s => ReservationStatus.withName(s)}
  )

  val ReservationsTableName = "reservations"

  class Reservations(tag: Tag) extends Table[ReservationView](tag, ReservationsTableName) {
    def id = column[EntityId]("ID", O.PrimaryKey)
    def clientId = column[EntityId]("CLIENT_ID")
    def status = column[ReservationStatus]("STATUS")
    def createDate = column[Date]("CREATE_DATE")
    def * = (id, clientId, status, createDate) <> (ReservationView.tupled, ReservationView.unapply)
  }

  val reservations = TableQuery[Reservations]

  

  def createOrUpdate(view: ReservationView) = {
    reservations.insertOrUpdate(view)
  }

  def updateStatus(viewId: EntityId, status: ReservationStatus.Value) = {
    reservations.filter(_.id === viewId).map(_.status).update(status)
  }

  def all =  reservations.result

  def byId(id: EntityId) = by_id(id).result.headOption

  def byClientId(clientId: EntityId) = by_client_id(clientId).result

  def remove(id: EntityId) = by_id(id).delete

  def ensureSchemaDropped =
    getTables(ReservationsTableName).headOption.flatMap {
      case Some(table) => reservations.schema.drop.map(_ => ())
      case None => DBIO.successful(())
    }

  def ensureSchemaCreated =
    getTables(ReservationsTableName).headOption.flatMap {
      case Some(table) => DBIO.successful(())
      case None => reservations.schema.create.map(_ => ())
    }

} 
Example 16
Source File: SlickTestEntityReadSide.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.persistence.slick

import com.lightbend.lagom.scaladsl.persistence.ReadSideProcessor.ReadSideHandler
import com.lightbend.lagom.scaladsl.persistence.TestEntity.Evt
import com.lightbend.lagom.scaladsl.persistence.AggregateEventTag
import com.lightbend.lagom.scaladsl.persistence.EventStreamElement
import com.lightbend.lagom.scaladsl.persistence.ReadSideProcessor
import com.lightbend.lagom.scaladsl.persistence.TestEntity

import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import slick.jdbc.JdbcBackend.Database
import slick.jdbc.JdbcProfile

trait Tables {
  val profile: JdbcProfile
  import profile.api._
  implicit val ec: ExecutionContext

  case class TestCount(id: String, count: Long)
  class TestCounts(tag: Tag) extends Table[TestCount](tag, "testcounts") {
    def id    = column[String]("id", O.PrimaryKey)
    def count = column[Long]("count")
    def *     = (id, count) <> (TestCount.tupled, TestCount.unapply)
  }
  lazy val testCounts: TableQuery[TestCounts] = TableQuery[TestCounts]

  def createTable: DBIO[_] =
    testCounts.schema.createIfNotExists

  def countUpdate(id: String, diff: Int = 1): DBIO[_] = {
    val q: Query[TestCounts, TestCount, Seq] = testCounts.filter(_.id === id)
    for {
      select <- q.result
      updated <- select.headOption match {
        case Some(testCount) =>
          q.update(testCount.copy(count = testCount.count + diff))
        case None =>
          testCounts += TestCount(id, diff)
      }
    } yield updated
  }
}

object SlickTestEntityReadSide {
  class TestEntityReadSideProcessor(readSide: SlickReadSide, db: Database, val profile: JdbcProfile)(
      implicit val ec: ExecutionContext
  ) extends ReadSideProcessor[TestEntity.Evt]
      with Tables {
    def buildHandler(): ReadSideHandler[TestEntity.Evt] =
      readSide
        .builder[TestEntity.Evt]("test-entity-read-side")
        .setGlobalPrepare(createTable)
        .setEventHandler(updateCount)
        .build()

    def aggregateTags: Set[AggregateEventTag[Evt]] = TestEntity.Evt.aggregateEventShards.allTags

    def updateCount(event: EventStreamElement[TestEntity.Appended]) = countUpdate(event.entityId, 1)
  }
}

class SlickTestEntityReadSide(db: Database, val profile: JdbcProfile)(implicit val ec: ExecutionContext)
    extends Tables {
  import profile.api._

  def getAppendCount(id: String): Future[Long] = db.run {
    testCounts
      .filter(_.id === id)
      .map(_.count)
      .result
      .headOption
      .map(_.getOrElse(0L))
  }
} 
Example 17
Source File: JdbcProfileComponent.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.db

import java.nio.file.{Files, Path, Paths}

import org.bitcoins.core.util.BitcoinSLogger
import slick.basic.DatabaseConfig
import slick.jdbc.JdbcProfile

trait JdbcProfileComponent[+ConfigType <: AppConfig] extends BitcoinSLogger {

  def appConfig: ConfigType

  
  // todo: what happens to this if we
  // dont use SQLite?
  lazy val dbName: String = {
    appConfig.config.getString(s"${appConfig.moduleName}.db.name")
  }

  private def createDbFileIfDNE(): Unit = {
    //should add a check in here that we are using sqlite
    if (!Files.exists(dbPath)) {
      val _ = {
        logger.debug(s"Creating database directory=$dbPath")
        Files.createDirectories(dbPath)
        val dbFilePath = dbPath.resolve(dbName)
        logger.debug(s"Creating database file=$dbFilePath")
        Files.createFile(dbFilePath)
      }

      ()
    }
  }
} 
Example 18
Source File: DBConfigProvider.scala    From CodeAnalyzerTutorial   with Apache License 2.0 5 votes vote down vote up
package tutor.repo

import slick.dbio.{DBIOAction, NoStream}
import slick.jdbc.{H2Profile, JdbcProfile, OracleProfile}

import scala.concurrent.Future

trait DBConfigProvider {
  val jdbcProfile: JdbcProfile
  def run[T](action: DBIOAction[T, NoStream, Nothing]):Future[T]
}

trait OracleDB extends DBConfigProvider {
  val jdbcProfile: JdbcProfile = OracleProfile
}

trait H2DB extends DBConfigProvider {
  val jdbcProfile: JdbcProfile = H2Profile

  def run[T](action: DBIOAction[T, NoStream, Nothing]):Future[T] = {
    import jdbcProfile.api._

    val db = Database.forConfig("h2mem1")
    try {
      db.run(action)
    }finally db.close()
  }
} 
Example 19
Source File: SlickJdbcSpec.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.transport.amqpjdbc.slick.helpers

import com.typesafe.config.ConfigFactory
import org.scalatest._
import slick.jdbc.{HsqldbProfile, JdbcBackend, JdbcProfile}

trait SlickJdbcSpec extends fixture.FlatSpec {
  import scala.concurrent.ExecutionContext.Implicits.global

  private val _profile = HsqldbProfile

  protected def profile: JdbcProfile = _profile

  override protected def withFixture(test: OneArgTest): Outcome = {
    val config = ConfigFactory.load()
    val db = JdbcBackend.Database.forConfig("db", config)
    try {
      new DatabaseInitializer(db).initDatabase()
      test(createFixture(db))
    } finally {
      db.close()
    }
  }

  protected def createFixture(db: JdbcBackend.Database): FixtureParam
} 
Example 20
Source File: package.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.transport

import akka.actor.ActorSystem
import com.rabbitmq.client.Connection
import slick.jdbc.{JdbcBackend, JdbcProfile}

package object amqpjdbc {

  implicit def transport(implicit actorSystem: ActorSystem,
                         connection: Connection,
                         profile: JdbcProfile,
                         db: JdbcBackend.Database): PubSubTransport =
    AmqpJdbcTransport(
      connection = connection,
      profile = profile,
      db = db
    )

} 
Example 21
Source File: SlickJdbcMigration.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.transport.amqpjdbc.slick

import java.io.PrintWriter
import java.lang.reflect.{InvocationHandler, Method, Proxy}
import java.sql.Connection
import java.util.logging.Logger

import javax.sql.DataSource
import org.flywaydb.core.api.migration.{BaseJavaMigration, Context}
import slick.jdbc.JdbcProfile

import scala.concurrent.Await
import scala.concurrent.duration._

trait SlickJdbcMigration extends BaseJavaMigration {

  protected val profile: JdbcProfile

  import profile.api._

  def migrateActions: DBIOAction[Any, NoStream, _ <: Effect]

  override final def migrate(context: Context): Unit = {
    val database = Database.forDataSource(new AlwaysUsingSameConnectionDataSource(context.getConnection), None)
    Await.result(database.run(migrateActions), 10 minute)
  }

}

class AlwaysUsingSameConnectionDataSource(conn: Connection) extends DataSource {
  private val notClosingConnection = Proxy.newProxyInstance(
    ClassLoader.getSystemClassLoader,
    Array[Class[_]](classOf[Connection]),
    SuppressCloseHandler
  ).asInstanceOf[Connection]

  object SuppressCloseHandler extends InvocationHandler {
    override def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = {
      if (method.getName != "close") {
        method.invoke(conn, args : _*)
      } else {
        null
      }
    }
  }

  override def getConnection: Connection = notClosingConnection
  override def getConnection(username: String, password: String): Connection = notClosingConnection
  override def unwrap[T](iface: Class[T]): T = conn.unwrap(iface)
  override def isWrapperFor(iface: Class[_]): Boolean = conn.isWrapperFor(iface)

  override def setLogWriter(out: PrintWriter): Unit = ???
  override def getLoginTimeout: Int = ???
  override def setLoginTimeout(seconds: Int): Unit = ???
  override def getParentLogger: Logger = ???
  override def getLogWriter: PrintWriter = ???
}