com.datastax.driver.core.Row Scala Examples

The following examples show how to use com.datastax.driver.core.Row. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: EventsByInterval.scala    From spark-streaming-demo   with Apache License 2.0 6 votes vote down vote up
package com.datastax.examples.meetup

import com.datastax.driver.core.{Cluster, Session, Row}
import com.websudos.phantom.CassandraTable
import com.websudos.phantom.Implicits._
import scala.concurrent.Future


case class EventModel (
                event: String,
                interval: String,
                dimension: String,
                subtotal: Long
            )

sealed class EventRecord extends CassandraTable[EventRecord, EventModel]
{
  override val tableName = "events_by_interval"
  object event extends StringColumn(this) with PartitionKey[String]
  object interval extends StringColumn(this) with ClusteringOrder[String] with Descending
  object dimension extends StringColumn(this) with ClusteringOrder[String] with Ascending
  object subtotal extends CounterColumn(this)

  override def fromRow(row: Row): EventModel = {
    EventModel(
      event(row),
      interval(row),
      dimension(row),
      subtotal(row)
    )
  }
}

object Event extends EventRecord
{
  val keyspace = "demo"
  val cluster = Cluster.builder().addContactPoint("127.0.0.1").build()
  implicit val session = cluster.connect(keyspace)

//  def hourly(hashtag: String): Future[Seq[(String, Long)]] = {
//    select (_.interval, _.subtotal) where (_.event eqs hashtag) and (_.interval gte "M") and (_.interval lt "N") limit 60 fetch
//  }

  def dimensions(event: String, interval: String): Future[Seq[(String, Long)]] = {
    select (_.dimension, _.subtotal) where (_.event eqs event) and (_.interval eqs interval) limit 500 fetch
  }

} 
Example 2
Source File: Database.scala    From gemini   with GNU General Public License v3.0 5 votes vote down vote up
package tech.sourced.gemini

import com.datastax.driver.core.{Row, Session}
import com.datastax.driver.core.querybuilder.QueryBuilder

import scala.collection.JavaConverters._

case class MetaCols(sha: String, repo: String, commit: String, path: String)
case class HashtablesCols(sha: String, hashtable: String, value: String)
case class FeaturesDocsCols(id: String, docs: String)
case class FeaturesFreqCols(id: String, feature: String, weight: String)


  def findFilesByHash(sha: String, conn: Session, keyspace: String, tables: Tables): Iterable[RepoFile] = {
    val query = QueryBuilder.select().all().from(keyspace, tables.meta)
      .where(QueryBuilder.eq(tables.metaCols.sha, sha))

    conn.execute(query).asScala.map(rowToRepoFile(tables))
  }

  def rowToRepoFile(tables: Tables)(row: Row): RepoFile = {
    val cols = tables.metaCols
    RepoFile(row.getString(cols.repo), row.getString(cols.commit), row.getString(cols.path), row.getString(cols.sha))
  }
} 
Example 3
Source File: CassandraStreamContext.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill

import com.datastax.driver.core.{ Cluster, ResultSet, Row }
import com.typesafe.config.Config
import io.getquill.context.cassandra.util.FutureConversions._
import io.getquill.util.{ ContextLogger, LoadConfig }
import monix.eval.Task
import monix.execution.Scheduler
import monix.execution.Scheduler.Implicits
import monix.reactive.Observable

import scala.jdk.CollectionConverters._
import scala.util.{ Failure, Success }

class CassandraStreamContext[N <: NamingStrategy](
  naming:                     N,
  cluster:                    Cluster,
  keyspace:                   String,
  preparedStatementCacheSize: Long
)
  extends CassandraClusterSessionContext[N](naming, cluster, keyspace, preparedStatementCacheSize) {

  def this(naming: N, config: CassandraContextConfig) = this(naming, config.cluster, config.keyspace, config.preparedStatementCacheSize)
  def this(naming: N, config: Config) = this(naming, CassandraContextConfig(config))
  def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))

  private val logger = ContextLogger(classOf[CassandraStreamContext[_]])

  override type Result[T] = Observable[T]
  override type RunQueryResult[T] = T
  override type RunQuerySingleResult[T] = T
  override type RunActionResult = Unit
  override type RunBatchActionResult = Unit

  protected def page(rs: ResultSet): Task[Iterable[Row]] = Task.defer {
    val available = rs.getAvailableWithoutFetching
    val page = rs.asScala.take(available)

    if (rs.isFullyFetched)
      Task.now(page)
    else
      Task.fromFuture(rs.fetchMoreResults().asScala(Implicits.global)).map(_ => page)
  }

  def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor): Observable[T] = {

    Observable
      .fromTask(prepareRowAndLog(cql, prepare))
      .mapEvalF(p => session.executeAsync(p).asScala(Implicits.global))
      .flatMap(Observable.fromAsyncStateAction((rs: ResultSet) => page(rs).map((_, rs)))(_))
      .takeWhile(_.nonEmpty)
      .flatMap(Observable.fromIterable)
      .map(extractor)
  }

  def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor): Observable[T] =
    executeQuery(cql, prepare, extractor)

  def executeAction[T](cql: String, prepare: Prepare = identityPrepare): Observable[Unit] = {
    Observable
      .fromTask(prepareRowAndLog(cql, prepare))
      .mapEvalF(p => session.executeAsync(p).asScala(Implicits.global))
      .map(_ => ())
  }

  def executeBatchAction(groups: List[BatchGroup]): Observable[Unit] =
    Observable.fromIterable(groups).flatMap {
      case BatchGroup(cql, prepare) =>
        Observable.fromIterable(prepare)
          .flatMap(executeAction(cql, _))
          .map(_ => ())
    }

  private def prepareRowAndLog(cql: String, prepare: Prepare = identityPrepare): Task[PrepareRow] = {
    Task.async0[PrepareRow] { (scheduler, callback) =>
      implicit val executor: Scheduler = scheduler

      super.prepareAsync(cql)
        .map(prepare)
        .onComplete {
          case Success((params, bs)) =>
            logger.logQuery(cql, params)
            callback.onSuccess(bs)
          case Failure(ex) =>
            callback.onError(ex)
        }
    }
  }
} 
Example 4
Source File: CassandraMonixContext.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill

import com.datastax.driver.core.{ Cluster, ResultSet, Row }
import com.typesafe.config.Config
import io.getquill.context.cassandra.CqlIdiom
import io.getquill.context.monix.{ MonixContext, Runner }
import io.getquill.util.{ ContextLogger, LoadConfig }
import io.getquill.context.cassandra.util.FutureConversions._
import monix.eval.Task
import monix.execution.Scheduler
import monix.reactive.Observable

import scala.jdk.CollectionConverters._
import scala.util.{ Failure, Success }

class CassandraMonixContext[N <: NamingStrategy](
  naming:                     N,
  cluster:                    Cluster,
  keyspace:                   String,
  preparedStatementCacheSize: Long
)
  extends CassandraClusterSessionContext[N](naming, cluster, keyspace, preparedStatementCacheSize)
  with MonixContext[CqlIdiom, N] {

  // not using this here
  override val effect = Runner.default

  def this(naming: N, config: CassandraContextConfig) = this(naming, config.cluster, config.keyspace, config.preparedStatementCacheSize)
  def this(naming: N, config: Config) = this(naming, CassandraContextConfig(config))
  def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))

  private val logger = ContextLogger(classOf[CassandraMonixContext[_]])

  override type StreamResult[T] = Observable[T]
  override type RunActionResult = Unit
  override type Result[T] = Task[T]

  override type RunQueryResult[T] = List[T]
  override type RunQuerySingleResult[T] = T
  override type RunBatchActionResult = Unit

  protected def page(rs: ResultSet): Task[Iterable[Row]] = Task.defer {
    val available = rs.getAvailableWithoutFetching
    val page = rs.asScala.take(available)

    if (rs.isFullyFetched)
      Task.now(page)
    else
      Task.fromFuture(rs.fetchMoreResults().asScalaWithDefaultGlobal).map(_ => page)
  }

  def streamQuery[T](fetchSize: Option[Int], cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor): Observable[T] = {

    Observable
      .fromTask(prepareRowAndLog(cql, prepare))
      .mapEvalF(p => session.executeAsync(p).asScalaWithDefaultGlobal)
      .flatMap(Observable.fromAsyncStateAction((rs: ResultSet) => page(rs).map((_, rs)))(_))
      .takeWhile(_.nonEmpty)
      .flatMap(Observable.fromIterable)
      .map(extractor)
  }

  def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor): Task[List[T]] = {
    streamQuery[T](None, cql, prepare, extractor)
      .foldLeftL(List[T]())({ case (l, r) => r +: l }).map(_.reverse)
  }

  def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor): Task[T] =
    executeQuery(cql, prepare, extractor).map(handleSingleResult(_))

  def executeAction[T](cql: String, prepare: Prepare = identityPrepare): Task[Unit] = {
    prepareRowAndLog(cql, prepare)
      .flatMap(r => Task.fromFuture(session.executeAsync(r).asScalaWithDefaultGlobal))
      .map(_ => ())
  }

  def executeBatchAction(groups: List[BatchGroup]): Task[Unit] =
    Observable.fromIterable(groups).flatMap {
      case BatchGroup(cql, prepare) =>
        Observable.fromIterable(prepare)
          .flatMap(prep => Observable.fromTask(executeAction(cql, prep)))
          .map(_ => ())
    }.completedL

  private def prepareRowAndLog(cql: String, prepare: Prepare = identityPrepare): Task[PrepareRow] = {
    Task.async0[PrepareRow] { (scheduler, callback) =>
      implicit val executor: Scheduler = scheduler

      super.prepareAsync(cql)
        .map(prepare)
        .onComplete {
          case Success((params, bs)) =>
            logger.logQuery(cql, params)
            callback.onSuccess(bs)
          case Failure(ex) =>
            callback.onError(ex)
        }
    }
  }
} 
Example 5
Source File: CassandraTraceRecordReadResultListener.scala    From haystack-traces   with Apache License 2.0 5 votes vote down vote up
package com.expedia.www.haystack.trace.storage.backends.cassandra.store

import com.codahale.metrics.{Meter, Timer}
import com.datastax.driver.core.exceptions.NoHostAvailableException
import com.datastax.driver.core.{ResultSet, ResultSetFuture, Row}
import com.expedia.open.tracing.api.Trace
import com.expedia.open.tracing.backend.TraceRecord
import com.expedia.www.haystack.trace.storage.backends.cassandra.client.CassandraTableSchema
import com.google.protobuf.ByteString
import org.slf4j.{Logger, LoggerFactory}

import scala.collection.JavaConverters._
import scala.concurrent.Promise
import scala.util.{Failure, Success, Try}

object CassandraTraceRecordReadResultListener {
  protected val LOGGER: Logger = LoggerFactory.getLogger(classOf[CassandraTraceRecordReadResultListener])
}

class CassandraTraceRecordReadResultListener(asyncResult: ResultSetFuture,
                                             timer: Timer.Context,
                                             failure: Meter,
                                             promise: Promise[Seq[TraceRecord]]) extends Runnable {

  import CassandraTraceRecordReadResultListener._

  override def run(): Unit = {
    timer.close()

    Try(asyncResult.get)
      .flatMap(tryGetTraceRows)
      .flatMap(mapTraceRecords)
    match {
      case Success(records) =>
        promise.success(records)
      case Failure(ex) =>
        if (fatalError(ex)) {
          LOGGER.error("Fatal error in reading from cassandra, tearing down the app", ex)
        } else {
          LOGGER.error("Failed in reading the record from cassandra", ex)
        }
        failure.mark()
        promise.failure(ex)
    }
  }

  private def fatalError(ex: Throwable): Boolean = {
    if (ex.isInstanceOf[NoHostAvailableException]) true else ex.getCause != null && fatalError(ex.getCause)
  }

  private def tryGetTraceRows(resultSet: ResultSet): Try[Seq[Row]] = {
    val rows = resultSet.all().asScala
    if (rows.isEmpty) Failure(new RuntimeException()) else Success(rows)
  }

  private def mapTraceRecords(rows: Seq[Row]): Try[List[TraceRecord]] = {
    Try {
      rows.map(row => {
        val spanBytes = row.getBytes(CassandraTableSchema.SPANS_COLUMN_NAME).array()
        val timeStamp = row.getLong(CassandraTableSchema.TIMESTAMP_COLUMN_NAME)
        val traceId = row.getString(CassandraTableSchema.ID_COLUMN_NAME)
        val record = TraceRecord.newBuilder()
          .setSpans(ByteString.copyFrom(spanBytes))
          .setTimestamp(timeStamp)
          .setTraceId(traceId)
          .build()
        record
      }).toList
    }
  }
} 
Example 6
Source File: CassFormatDecoderVersionSpecific.scala    From scala-cass   with MIT License 5 votes vote down vote up
package com.weather.scalacass

import com.datastax.driver.core.{ Row, TupleValue }
import com.google.common.reflect.TypeToken

object CassFormatDecoderVersionSpecific extends CassFormatDecoderVersionSpecific {
  def codecCassFormatDecoder[T <: AnyRef](_typeToken: TypeToken[T]) = new CassFormatDecoder[T] {
    type From = T
    val typeToken = _typeToken
    def f2t(f: From) = Right(f)
    def extract(r: Row, name: String) = r get (name, typeToken)
    def tupleExtract(tup: TupleValue, pos: Int) = tup get (pos, typeToken)
  }
}
trait CassFormatDecoderVersionSpecific extends LowPriorityCassFormatDecoder {
  import CassFormatDecoder.{ sameTypeCassFormatDecoder, safeConvertCassFormatDecoder }
  implicit val dateFormat: CassFormatDecoder[java.util.Date] =
    sameTypeCassFormatDecoder[java.util.Date](TypeToken.of(classOf[java.util.Date]), _ getTimestamp _, _ getTimestamp _)
  implicit val datastaxLocalDateFormat: CassFormatDecoder[com.datastax.driver.core.LocalDate] =
    sameTypeCassFormatDecoder[com.datastax.driver.core.LocalDate](TypeToken.of(classOf[com.datastax.driver.core.LocalDate]), _ getDate _, _ getDate _)
  implicit val timeFormat: CassFormatDecoder[Time] = safeConvertCassFormatDecoder[Time, java.lang.Long](TypeToken.of(classOf[java.lang.Long]), Time(_), _ getTime _, _ getTime _)
} 
Example 7
Source File: Nullable.scala    From scala-cass   with MIT License 5 votes vote down vote up
package com.weather.scalacass

import com.datastax.driver.core.{ DataType, Row, TupleValue }

sealed trait Nullable[+A] {
  def toOption: Option[A]
}
final case class Is[+A](x: A) extends Nullable[A] {
  def toOption: Option[A] = Some(x)
}
case object IsNotNull extends Nullable[Nothing] {
  def toOption: Option[Nothing] = None
}
case object IsNull extends Nullable[Nothing] {
  def toOption: Option[Nothing] = None
}

object Nullable {
  def apply[A](x: A): Nullable[A] = if (x.isNull) IsNull else Is(x)
  def empty[A]: Nullable[A] = IsNull
  implicit def nullable2iterable[A](xo: Nullable[A]): Iterable[A] = xo.toOption.toList

  implicit class NullableOption[+A](val opt: Option[A]) extends AnyVal {
    def toNullable: Nullable[A] = opt.fold[Nullable[A]](IsNull)(Is.apply)
  }
  implicit def option2nullable[A](opt: Option[A]): Nullable[A] = opt.toNullable
  implicit def nullable2option[A](nullable: Nullable[A]): Option[A] = nullable.toOption

  implicit def encoder[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[Nullable[A]] = new CassFormatEncoder[Nullable[A]] {
    type From = Nullable[underlying.From]

    def cassDataType: DataType = underlying.cassDataType

    def encode(f: Nullable[A]): Result[Nullable[underlying.From]] = f match {
      case Is(x)     => underlying.encode(x).right.map(Is.apply)
      case IsNotNull => Right(IsNotNull)
      case IsNull    => Right(IsNull)
    }

    override def withQuery(instance: Nullable[A], name: String): String = instance match {
      case v: Is[A]  => super.withQuery(v, name)
      case IsNotNull => s"$name!=NULL"
      case IsNull    => s"$name=NULL"
    }
  }

  implicit def decoder[A](implicit underlying: CassFormatDecoder[A]): CassFormatDecoder[Nullable[A]] = new CassFormatDecoder[Nullable[A]] {
    type From = underlying.From
    val typeToken = underlying.typeToken
    def f2t(f: From): Result[Nullable[A]] = underlying.f2t(f).right.map(Is.apply)
    def extract(r: Row, name: String): From = underlying.extract(r, name)

    override def decode(r: Row, name: String): Result[Nullable[A]] = super.decode(r, name) match {
      case Left(Recoverable(_)) => Right(IsNull)
      case other                => other
    }
    def tupleExtract(tup: TupleValue, pos: Int): From = underlying.tupleExtract(tup, pos)

    override def tupleDecode(tup: TupleValue, pos: Int): Result[Nullable[A]] = super.tupleDecode(tup, pos) match {
      case Left(Recoverable(_)) => Right(IsNull)
      case other                => other
    }
  }
} 
Example 8
Source File: CCCassFormatDecoder.scala    From scala-cass   with MIT License 5 votes vote down vote up
package com.weather.scalacass

import com.datastax.driver.core.Row
import shapeless.labelled.{ FieldType, field }
import shapeless.{ ::, HList, HNil, LabelledGeneric, Lazy, Witness }

abstract class DerivedCCCassFormatDecoder[T] extends CCCassFormatDecoder[T]

object DerivedCCCassFormatDecoder {
  implicit val hNilDecoder: DerivedCCCassFormatDecoder[HNil] = new DerivedCCCassFormatDecoder[HNil] {
    def decode(r: Row): Result[HNil] = Right(HNil)
  }

  implicit def hConsDecoder[K <: Symbol, H, T <: HList](implicit w: Witness.Aux[K], tdH: Lazy[CassFormatDecoder[H]], tdT: Lazy[DerivedCCCassFormatDecoder[T]]): DerivedCCCassFormatDecoder[FieldType[K, H] :: T] =
    new DerivedCCCassFormatDecoder[FieldType[K, H] :: T] {
      def decode(r: Row) = for {
        h <- tdH.value.decode(r, w.value.name.toString).right
        t <- tdT.value.decode(r).right
      } yield field[K](h) :: t
    }

  implicit def ccConverter[T, Repr](implicit gen: LabelledGeneric.Aux[T, Repr], hListDecoder: Lazy[DerivedCCCassFormatDecoder[Repr]]): DerivedCCCassFormatDecoder[T] =
    new DerivedCCCassFormatDecoder[T] {
      def decode(r: Row): Result[T] = hListDecoder.value.decode(r).right.map(gen.from)
    }
}

trait CCCassFormatDecoder[T] { self =>
  private[scalacass] def decode(r: Row): Result[T]
  final def map[U](f: T => U): CCCassFormatDecoder[U] = new CCCassFormatDecoder[U] {
    def decode(r: Row): Result[U] = self.decode(r).right.map(f)
  }
  final def flatMap[U](f: T => Result[U]): CCCassFormatDecoder[U] = new CCCassFormatDecoder[U] {
    def decode(r: Row): Result[U] = self.decode(r).right.flatMap(f)
  }

  final def as(r: Row): T = decode(r) match {
    case Right(v)  => v
    case Left(exc) => throw exc
  }
  final def getOrElse(r: Row)(default: => T): T = decode(r).right.getOrElse(default)
  final def attemptAs(r: Row): Result[T] = decode(r)
}

object CCCassFormatDecoder extends ProductCCCassFormatDecoders {
  implicit def derive[T](implicit derived: Lazy[DerivedCCCassFormatDecoder[T]]): CCCassFormatDecoder[T] = derived.value
  def apply[T](implicit decoder: CCCassFormatDecoder[T]) = decoder

  implicit def optionalCodec[T](implicit decoder: CCCassFormatDecoder[T]): CCCassFormatDecoder[Option[T]] =
    new CCCassFormatDecoder[Option[T]] {
      private[scalacass] def decode(r: Row): Result[Option[T]] = decoder.decode(r) match {
        case Left(Recoverable(_)) => Right(None)
        case other                => other.right.map(Option.apply)
      }
    }
} 
Example 9
Source File: syntax.scala    From scala-cass   with MIT License 5 votes vote down vote up
package com.weather.scalacass

import com.datastax.driver.core.Row

object syntax {
  implicit class RichRow(val r: Row) extends AnyVal {
    def as[T](name: String)(implicit d: CassFormatDecoder[T]): T = d.as(r)(name)
    def getAs[T](name: String)(implicit d: CassFormatDecoder[Option[T]]): Option[T] = d.as(r)(name)
    def getOrElse[T](name: String, default: => T)(implicit d: CassFormatDecoder[Option[T]]): T = d.as(r)(name).getOrElse(default)
    def attemptAs[T](name: String)(implicit d: CassFormatDecoder[T]): Result[T] = d.attemptAs(r)(name)

    def as[T](implicit ccd: CCCassFormatDecoder[T]): T = ccd.as(r)
    def getAs[T](implicit ccd: CCCassFormatDecoder[Option[T]]): Option[T] = ccd.as(r)
    def getOrElse[T](default: => T)(implicit ccd: CCCassFormatDecoder[Option[T]]): T = ccd.as(r).getOrElse(default)
    def attemptAs[T](implicit ccd: CCCassFormatDecoder[T]): Result[T] = ccd.decode(r)
  }

  implicit class RichIterator(val it: Iterator[Row]) extends AnyVal {
    def as[T](name: String)(implicit d: CassFormatDecoder[T]): Iterator[T] = it.map(r => d.as(r)(name))
    def getAs[T](name: String)(implicit d: CassFormatDecoder[Option[T]]): Iterator[Option[T]] = it.map(r => d.as(r)(name))
    def getOrElse[T](name: String, default: => T)(implicit d: CassFormatDecoder[Option[T]]): Iterator[T] = it.map(r => d.as(r)(name).getOrElse(default))
    def attemptAs[T](name: String)(implicit d: CassFormatDecoder[T]): Iterator[Result[T]] = it.map(r => d.attemptAs(r)(name))

    def as[T](implicit ccd: CCCassFormatDecoder[T]): Iterator[T] = it.map(r => ccd.as(r))
    def getAs[T](implicit ccd: CCCassFormatDecoder[Option[T]]): Iterator[Option[T]] = it.map(r => ccd.as(r))
    def getOrElse[T](default: => T)(implicit ccd: CCCassFormatDecoder[Option[T]]): Iterator[T] = it.map(r => ccd.as(r).getOrElse(default))
    def attemptAs[T](implicit ccd: CCCassFormatDecoder[T]): Iterator[Result[T]] = it.map(r => ccd.attemptAs(r))
  }

  implicit class RichOption(val opt: Option[Row]) extends AnyVal {
    def as[T](name: String)(implicit d: CassFormatDecoder[T]): Option[T] = opt.map(r => d.as(r)(name))
    def getAs[T](name: String)(implicit d: CassFormatDecoder[Option[T]]): Option[Option[T]] = opt.map(r => d.as(r)(name))
    def getOrElse[T](name: String, default: => T)(implicit d: CassFormatDecoder[Option[T]]): Option[T] = opt.map(r => d.as(r)(name).getOrElse(default))
    def attemptAs[T](name: String)(implicit d: CassFormatDecoder[T]): Option[Result[T]] = opt.map(r => d.attemptAs(r)(name))

    def as[T](implicit ccd: CCCassFormatDecoder[T]): Option[T] = opt.map(r => ccd.as(r))
    def getAs[T](implicit ccd: CCCassFormatDecoder[Option[T]]): Option[Option[T]] = opt.map(r => ccd.as(r))
    def getOrElse[T](default: => T)(implicit ccd: CCCassFormatDecoder[Option[T]]): Option[T] = opt.map(r => ccd.as(r).getOrElse(default))
    def attemptAs[T](implicit ccd: CCCassFormatDecoder[T]): Option[Result[T]] = opt.map(r => ccd.attemptAs(r))
  }

  type UpdateBehavior[F[_], A] = ScalaSession.UpdateBehavior[F, A]
  val UpdateBehavior = ScalaSession.UpdateBehavior

  type Star = ScalaSession.Star
  val Star = ScalaSession.Star

  type NoQuery = ScalaSession.NoQuery
  val NoQuery = ScalaSession.NoQuery

  type NoUpdate = ScalaSession.NoUpdate
  val NoUpdate = ScalaSession.NoUpdate
} 
Example 10
Source File: ChirpRepository.scala    From lagom-scala-chirper   with Apache License 2.0 5 votes vote down vote up
package sample.chirper.chirp.impl

import java.time.Instant

import akka.NotUsed
import akka.stream.scaladsl.Source
import com.datastax.driver.core.Row
import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession
import sample.chirper.chirp.api.Chirp

import scala.collection.immutable.Seq
import scala.concurrent.{ExecutionContext, Future}


trait ChirpRepository {

  def getHistoricalChirps(userIds: Seq[String], timestamp: Long): Source[Chirp, NotUsed]

  def getRecentChirps(userIds: Seq[String]): Future[Seq[Chirp]]

}

class ChirpRepositoryImpl(
                           db: CassandraSession
                         )(implicit val ec: ExecutionContext) extends ChirpRepository {

  private val NumRecentChirps = 10
  private val SelectHistoricalChirps = "SELECT * FROM chirp WHERE userId = ? AND timestamp >= ? ORDER BY timestamp ASC"
  private val SelectRecentChirps = "SELECT * FROM chirp WHERE userId = ? ORDER BY timestamp DESC LIMIT ?"

  override def getHistoricalChirps(userIds: Seq[String], timestamp: Long): Source[Chirp, NotUsed] = {
    val sources = userIds.map(getHistoricalChirps(_, timestamp))
    // Chirps from one user are ordered by timestamp, but chirps from different
    // users are not ordered. That can be improved by implementing a smarter
    // merge that takes the timestamps into account.
    Source(sources).flatMapMerge(sources.size, identity)
  }

  override def getRecentChirps(userIds: Seq[String]): Future[Seq[Chirp]] =
    Future
    .sequence(userIds.map(getRecentChirps))
    .map(_.flatten)
    .map(limitRecentChirps)

  // Helpers -----------------------------------------------------------------------------------------------------------

  private def getHistoricalChirps(userId: String, timestamp: Long): Source[Chirp, NotUsed] =
    db.select(
      SelectHistoricalChirps,
      userId,
      Long.box(timestamp)
    ).map(mapChirp)

  private def getRecentChirps(userId: String) =
    db.selectAll(
      SelectRecentChirps,
      userId,
      Int.box(NumRecentChirps)
    ).map(_.map(mapChirp))

  private def mapChirp(row: Row): Chirp = Chirp(
    row.getString("userId"),
    row.getString("message"),
    Instant.ofEpochMilli(row.getLong("timestamp")),
    row.getString("uuid")
  )

  private def limitRecentChirps(all: Seq[Chirp]): Seq[Chirp] = {
    // FIXME: this can be streamed
    val limited = all
      .sortWith(_.timestamp.toEpochMilli < _.timestamp.toEpochMilli)
      .take(NumRecentChirps)
    limited.reverse
  }

} 
Example 11
Source File: Main.scala    From troy   with Apache License 2.0 5 votes vote down vote up
package demo6

import java.util.UUID
import com.datastax.driver.core.{Row, Cluster, Session}
import troy.dsl._
import troy.driver.DSL._

import scala.concurrent.Await
import scala.concurrent.duration.Duration

case class Post(id: UUID, title: String)

object Main extends App {
  import scala.concurrent.ExecutionContext.Implicits.global

  val port: Int = 9042
  val host: String = "127.0.0.1"

  private val cluster =
    new Cluster.Builder().addContactPoints(host).withPort(port).build()

  implicit val session: Session = cluster.connect()

  val create = withSchema {
    (authorId: String, title: String) =>
      cql"""
         INSERT INTO test.posts (author_id , post_id , post_title )
         VALUES ( $authorId, now(), $title);
       """.prepared.executeAsync
  }

  val listByAuthor = withSchema {
    (authorId: String) =>
      cql"""
         SELECT post_id, post_title
         FROM test.posts
         WHERE author_id = $authorId
       """
        .prepared
        .executeAsync
        .as(Post)
  }

  println(Await.result(create("test", "title"), Duration(1, "second")))
  println(Await.result(listByAuthor("test"), Duration(1, "second")))

  session.close()
  cluster.close()
} 
Example 12
Source File: Main.scala    From troy   with Apache License 2.0 5 votes vote down vote up
package demo2

import java.util.UUID
import com.datastax.driver.core.{Row, Cluster, Session}
import troy.dsl._
import troy.driver.DSL._

import scala.concurrent.Await
import scala.concurrent.duration.Duration

case class Post(id: UUID, title: String)

object Main extends App {
  import scala.concurrent.ExecutionContext.Implicits.global

  val port: Int = 9042
  val host: String = "127.0.0.1"

  private val cluster =
    new Cluster.Builder().addContactPoints(host).withPort(port).build()

  implicit val session: Session = cluster.connect()

  val listByAuthor = withSchema {
    (authorId: String) =>
      cql"""
         SELECT post_id, post_title
         FROM test.posts
         WHERE author_id = $authorId
       """
        .prepared
        .executeAsync
        .as(Post)
  }

  val result = listByAuthor("test")
  println(Await.result(result, Duration(1, "second")))

  session.close()
  cluster.close()
} 
Example 13
Source File: Main.scala    From troy   with Apache License 2.0 5 votes vote down vote up
package demo3

import java.util.UUID
import com.datastax.driver.core.{Row, Cluster, Session}
import troy.dsl._
import troy.driver.DSL._

import scala.concurrent.Await
import scala.concurrent.duration.Duration

case class Post(id: UUID, title: String)

object Main extends App {
  import scala.concurrent.ExecutionContext.Implicits.global

  val port: Int = 9042
  val host: String = "127.0.0.1"

  private val cluster =
    new Cluster.Builder().addContactPoints(host).withPort(port).build()

  implicit val session: Session = cluster.connect()

  val get = withSchema {
    (authorId: String, postId: UUID) =>
      cql"""
         SELECT post_id, post_title
         FROM test.posts
         WHERE author_id = $authorId AND post_id = $postId
       """
        .prepared
        .executeAsync
        .oneOption
        .as(Post)
  }

  val result = get("test", UUID.fromString("a4a70900-24e1-11df-8924-001ff3591711"))
  val maybePost: Option[Post] = Await.result(result, Duration(1, "second"))
  println(maybePost.map(_.title).getOrElse("Post not found"))

  session.close()
  cluster.close()
} 
Example 14
Source File: package.scala    From troy   with Apache License 2.0 5 votes vote down vote up
package troy

import com.datastax.driver.core.{ Row, ResultSet, Statement }

import scala.annotation.compileTimeOnly
import scala.concurrent.Future

package object meta {
  implicit class RichStringContext(val context: StringContext) extends AnyVal {
    @compileTimeOnly("cql Strings can be used only inside troy.dsl.withSchema block")
    def cql(args: Any*): MacroDSL.TroyCql = ???
  }

  implicit class MacroDsl_RichStatement(val statement: Statement) extends ParsingOps {
    type ParseAs[R] = Future[Seq[R]]
  }

  implicit class MacroDsl_RichFutureBoundStatement(val xxx: Future[Statement]) extends ParsingOps {
    type ParseAs[R] = Future[Seq[R]]
  }

  implicit class MacroDsl_RichResultSet(val xxx: ResultSet) extends ParsingOps {
    type ParseAs[R] = Seq[R]
  }

  implicit class MacroDsl_RichFutureOfResultSet(val xxx: Future[ResultSet]) extends ParsingOps {
    type ParseAs[R] = Future[Seq[R]]
  }

  implicit class MacroDsl_RichFutureOfSeqOfRow(val xxx: Future[Seq[Row]]) extends ParsingOps {
    type ParseAs[R] = Future[Seq[R]]
  }

  implicit class MacroDsl_RichFutureOfOptionOfRow(val xxx: Future[Option[Row]]) extends ParsingOps {
    type ParseAs[R] = Future[Option[R]]
  }

  implicit class MacroDsl_RichSeqOfRow(val xxx: Seq[Row]) extends ParsingOps {
    type ParseAs[R] = Seq[R]
  }

  implicit class MacroDsl_RichJavaListOfRow(val xxx: java.util.List[Row]) extends ParsingOps {
    type ParseAs[R] = Seq[R]
  }

  implicit class MacroDsl_RichOptionOfRow(val xxx: Option[Row]) extends ParsingOps {
    type ParseAs[R] = Option[R]
  }
} 
Example 15
Source File: Dsl.scala    From troy   with Apache License 2.0 5 votes vote down vote up
package troy
package driver

import com.datastax.driver.core.{ Session, Row, ResultSet, Statement }

import scala.concurrent.{ ExecutionContext, Future }

object DSL {
  import JavaConverters._
  import scala.collection.JavaConverters._

  implicit class ExternalDSL_RichStatement(val statement: Statement) extends AnyVal {
    def executeAsync(implicit session: Session, executionContext: ExecutionContext): Future[ResultSet] =
      session.executeAsync(statement).asScala

    def execute(implicit session: Session): ResultSet =
      session.execute(statement)

    def all(implicit session: Session, executionContext: ExecutionContext): Future[Seq[Row]] =
      statement.executeAsync.all

    def oneOption(implicit session: Session, executionContext: ExecutionContext): Future[Option[Row]] =
      statement.executeAsync.oneOption
  }

  implicit class ExternalDSL_FutureOfRichStatement(val statement: Future[Statement]) extends AnyVal {
    def executeAsync(implicit session: Session, executionContext: ExecutionContext): Future[ResultSet] =
      statement.flatMap(_.executeAsync)

    def all(implicit session: Session, executionContext: ExecutionContext): Future[Seq[Row]] =
      statement.executeAsync.all

    def oneOption(implicit session: Session, executionContext: ExecutionContext): Future[Option[Row]] =
      statement.executeAsync.oneOption
  }

  implicit class RichResultSet(val resultSet: ResultSet) extends AnyVal {
    def all() =
      resultSet.all.asScala

    def oneOption() =
      Option(resultSet.one)
  }

  implicit class RichFutureOfResultSet(val resultSet: Future[ResultSet]) extends AnyVal {
    def all(implicit executionContext: ExecutionContext): Future[Seq[Row]] =
      resultSet.map(_.all.asScala)

    def oneOption(implicit executionContext: ExecutionContext): Future[Option[Row]] =
      resultSet.map(r => Option(r.one()))
  }
} 
Example 16
Source File: CassandraOffsetStore.scala    From lagom   with Apache License 2.0 4 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cassandra

import akka.Done
import akka.actor.ActorSystem
import akka.persistence.cassandra.session.scaladsl.CassandraSession
import akka.persistence.query.NoOffset
import akka.persistence.query.Offset
import akka.persistence.query.Sequence
import akka.persistence.query.TimeBasedUUID
import akka.util.Timeout
import com.datastax.driver.core.BoundStatement
import com.datastax.driver.core.PreparedStatement
import com.datastax.driver.core.Row
import com.lightbend.lagom.internal.persistence.ReadSideConfig
import com.lightbend.lagom.internal.persistence.cluster.ClusterStartupTask
import com.lightbend.lagom.spi.persistence.OffsetDao
import com.lightbend.lagom.spi.persistence.OffsetStore

import scala.concurrent.Future


private[lagom] final class CassandraOffsetDao(
    session: CassandraSession,
    statement: PreparedStatement,
    eventProcessorId: String,
    tag: String,
    override val loadedOffset: Offset
) extends OffsetDao {
  override def saveOffset(offset: Offset): Future[Done] = {
    session.executeWrite(bindSaveOffset(offset))
  }
  def bindSaveOffset(offset: Offset): BoundStatement = {
    offset match {
      case NoOffset            => statement.bind(eventProcessorId, tag, null, null)
      case seq: Sequence       => statement.bind(eventProcessorId, tag, null, java.lang.Long.valueOf(seq.value))
      case uuid: TimeBasedUUID => statement.bind(eventProcessorId, tag, uuid.value, null)
    }
  }
}