java.time.OffsetDateTime Scala Examples

The following examples show how to use java.time.OffsetDateTime. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: PersistenceRecord.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.persistence

import java.time.OffsetDateTime

import io.vamp.common.json.{ OffsetDateTimeSerializer, SerializationFormat }
import io.vamp.common.notification.NotificationProvider
import io.vamp.common.{ Artifact, Config, Namespace, NamespaceProvider }
import io.vamp.model.Model
import io.vamp.persistence.notification.UnknownDataFormatException
import org.json4s.Formats
import org.json4s.native.Serialization
import org.json4s.native.Serialization.write

import scala.util.Try

object PersistenceRecord {

  def apply(name: String, kind: String): PersistenceRecord = PersistenceRecord(Model.version, Model.uuid, OffsetDateTime.now(), name, kind, None)

  def apply(name: String, kind: String, artifact: String): PersistenceRecord = PersistenceRecord(Model.version, Model.uuid, OffsetDateTime.now(), name, kind, Option(artifact))
}

case class PersistenceRecord(version: String, instance: String, timestamp: OffsetDateTime, name: String, kind: String, artifact: Option[String])

abstract class PersistenceRecordTransformer(namespace: Namespace) {

  def timeDependent: Boolean = false

  def read(input: String): String

  def write(input: String): String

}

trait PersistenceRecordMarshaller {
  this: NamespaceProvider ⇒

  protected val transformersPath = "vamp.persistence.transformers.classes"

  private lazy val transformers = {
    val transformerClasses = if (Config.has(transformersPath)(namespace)()) Config.stringList(transformersPath)() else Nil
    transformerClasses.map { clazz ⇒
      Class.forName(clazz).getConstructor(classOf[Namespace]).newInstance(namespace).asInstanceOf[PersistenceRecordTransformer]
    }
  }

  lazy val timeDependent: Boolean = transformers.exists(_.timeDependent)

  def marshallRecord(record: PersistenceRecord): String = {
    val content = write(record)(SerializationFormat(OffsetDateTimeSerializer))
    transformers.foldLeft[String](content)((input, transformer) ⇒ transformer.write(input))
  }

  def unmarshallRecord(source: String): PersistenceRecord = {
    val input = transformers.foldRight[String](source)((transformer, source) ⇒ transformer.read(source))
    implicit val format: Formats = SerializationFormat(OffsetDateTimeSerializer)
    Serialization.read[PersistenceRecord](input)
  }
}

trait PersistenceDataReader extends PersistenceRecordMarshaller with PersistenceMarshaller {
  this: PersistenceApi with NamespaceProvider with NotificationProvider ⇒

  protected def dataSet(artifact: Artifact, kind: String): Artifact

  protected def dataDelete(name: String, kind: String): Unit

  protected def dataRead(data: String): PersistenceRecord = {
    val record = Try(unmarshallRecord(data)).getOrElse(throwException(UnknownDataFormatException("")))
    record.artifact match {
      case Some(content) ⇒ unmarshall(record.kind, content).map(a ⇒ dataSet(a, record.kind)).getOrElse(throwException(UnknownDataFormatException(record.kind)))
      case None          ⇒ dataDelete(record.name, record.kind)
    }
    record
  }
} 
Example 2
Source File: Encoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.async

import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime }
import java.util.Date

import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

trait Encoders {
  this: AsyncContext[_, _, _] =>

  type Encoder[T] = AsyncEncoder[T]

  type EncoderSqlType = SqlTypes.SqlTypes

  case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T])
    extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder.apply(index, value, row)
  }

  def encoder[T](sqlType: DecoderSqlType): Encoder[T] =
    encoder(identity[T], sqlType)

  def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] =
    AsyncEncoder[T](sqlType)(new BaseEncoder[T] {
      def apply(index: Index, value: T, row: PrepareRow) =
        row :+ f(value)
    })

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    AsyncEncoder(e.sqlType)(new BaseEncoder[I] {
      def apply(index: Index, value: I, row: PrepareRow) =
        e(index, mapped.f(value), row)
    })

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] {
      def apply(index: Index, value: Option[T], row: PrepareRow) = {
        value match {
          case None    => nullEncoder(index, null, row)
          case Some(v) => d(index, v, row)
        }
      }
    })

  private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL)

  implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal](SqlTypes.REAL)
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN)
  implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT)
  implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT)
  implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER)
  implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT)
  implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT)
  implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY)
  implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP)
  implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE)
  implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP)
  implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP)

  implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] =
    MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId)))

  implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] =
    MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId)))

  implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] =
    MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth))

  implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] =
    MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond))

  implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] =
    MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli))

  implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder)
} 
Example 3
Source File: DateTimeSpecs.scala    From guardrail   with MIT License 5 votes vote down vote up
package dateTime.server.springMvc.dateTime

import java.time.{ LocalDate, OffsetDateTime }
import java.util.concurrent.CompletableFuture

import org.junit.runner.RunWith
import org.mockito.{ ArgumentMatchersSugar, MockitoSugar }
import org.scalatest.{ BeforeAndAfterAll, FreeSpec, Matchers }
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.autoconfigure.EnableAutoConfiguration
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.ComponentScan
import org.springframework.http.MediaType
import org.springframework.test.context.TestContextManager
import org.springframework.test.context.junit4.SpringRunner
import org.springframework.test.web.servlet.MockMvc
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders.{ asyncDispatch, post, get}
import org.springframework.test.web.servlet.result.MockMvcResultHandlers.print
import org.springframework.test.web.servlet.result.MockMvcResultMatchers.{ request, status }
import spring.test.TestApplication

@RunWith(classOf[SpringRunner])
@SpringBootTest(classes = Array(classOf[TestApplication]))
@AutoConfigureMockMvc
@ComponentScan
@EnableAutoConfiguration
class DateTimeSpecs extends FreeSpec with Matchers with BeforeAndAfterAll with MockitoSugar with ArgumentMatchersSugar {
  @Autowired var mvc: MockMvc                    = _
  @Autowired var handlerMock: DateTimeHandler = _

  new TestContextManager(this.getClass).prepareTestInstance(this)

  "test jsre310 stuff" - {
    "dates everywhere" in {

      val offsetDtNow = OffsetDateTime.now
      val localDtNow = LocalDate.now

      when(
        handlerMock.getSomething(
          eqTo(offsetDtNow),
          eqTo(java.util.Optional.of(offsetDtNow)),
          eqTo(localDtNow),
          eqTo(java.util.Optional.of(localDtNow))
        )
      ).thenReturn(CompletableFuture.completedFuture(DateTimeHandler.GetSomethingResponse.NoContent))

      val mvcResult = mvc
        .perform(
          get("/foo")
            .param("dateTime", offsetDtNow.toString)
            .param("optionalDateTime", offsetDtNow.toString)
            .param("date", localDtNow.toString)
            .param("optionalDate", localDtNow.toString)
            .contentType(MediaType.APPLICATION_JSON)
        )
        .andExpect(request.asyncStarted)
        .andReturn

      mvc.perform(asyncDispatch(mvcResult)).andDo(print()).andExpect(status.isNoContent)
    }
  }
} 
Example 4
Source File: GithubIssue389.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.avro4s.github

import java.time.OffsetDateTime
import java.time.format.DateTimeFormatter

import com.sksamuel.avro4s.{AvroSchema, Decoder, Encoder}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class GithubIssue389 extends AnyWordSpec with Matchers {

  "OffsetDateTime" must {

    val NOW = OffsetDateTime.now()
    val MAX = OffsetDateTime.MAX
    val MIN = OffsetDateTime.MIN

    "generate a schema with a logical type backed by a string" in {
      val schema = AvroSchema[OffsetDateTime]
      val expected = new org.apache.avro.Schema.Parser().parse(this.getClass.getResourceAsStream("/github/github_389.json"))
      schema shouldBe expected
    }

    "encode to an iso formatted String" in {
      def testEncode(datetime: OffsetDateTime): Unit = {
        val encoded = Encoder[OffsetDateTime].encode(datetime)
        encoded shouldBe datetime.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)
      }
      testEncode(NOW)
      testEncode(MAX)
      testEncode(MIN)
    }

    "decode an iso formatted String to an equivalent OffsetDatetime object" in {
      def testDecode(datetime: OffsetDateTime): Unit = {
        val dateTimeString = datetime.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)
        val decoder = Decoder[OffsetDateTime].decode(dateTimeString)
        decoder shouldBe datetime
      }
      testDecode(NOW)
      testDecode(MAX)
      testDecode(MIN)
    }

    "round trip encode and decode into an equivalent object" in {
      def testRoundTrip(datetime: OffsetDateTime): Unit = {
        val encoded = Encoder[OffsetDateTime].encode(datetime)
        val decoded = Decoder[OffsetDateTime].decode(encoded)
        decoded shouldBe datetime
      }
      testRoundTrip(NOW)
      testRoundTrip(MAX)
      testRoundTrip(MIN)
    }

  }
} 
Example 5
Source File: CustomScalars.scala    From graphql-gateway   with Apache License 2.0 5 votes vote down vote up
package sangria.gateway.schema

import java.time.format.DateTimeFormatter
import java.time.{Instant, OffsetDateTime, ZoneOffset, ZonedDateTime}

import sangria.schema._
import sangria.ast
import sangria.validation.ValueCoercionViolation

import scala.util.{Failure, Success, Try}

object CustomScalars {
  implicit val DateTimeType = ScalarType[ZonedDateTime]("DateTime",
    description = Some("DateTime is a scalar value that represents an ISO8601 formatted date and time."),
    coerceOutput = (date, _) ⇒ DateTimeFormatter.ISO_INSTANT.format(date),
    coerceUserInput = {
      case s: String ⇒ parseDateTime(s) match {
        case Success(date) ⇒ Right(date)
        case Failure(_) ⇒ Left(DateCoercionViolation)
      }
      case _ ⇒ Left(DateCoercionViolation)
    },
    coerceInput = {
      case ast.StringValue(s, _, _, _, _) ⇒ parseDateTime(s) match {
        case Success(date) ⇒ Right(date)
        case Failure(_) ⇒ Left(DateCoercionViolation)
      }
      case _ ⇒ Left(DateCoercionViolation)
    })

  def parseDateTime(s: String) = Try(DateTimeFormatter.ISO_ZONED_DATE_TIME.parse(s).asInstanceOf[ZonedDateTime])

  case object DateCoercionViolation extends ValueCoercionViolation("Date value expected")
} 
Example 6
Source File: Entity.scala    From template-site   with MIT License 5 votes vote down vote up
package models

import java.time.OffsetDateTime

import models.db.{AccountRole, Tables}

case class Entity[T](id:Int, data:T)

case class Account(name: String, email: String, role: AccountRole.Value) {
  def isAdmin: Boolean = role == AccountRole.admin
}

object Account {
  def apply(row: Tables.AccountRow): Entity[Account] = {
    Entity(
      id = row.id,
      data = Account(
        name = row.name,
        email = row.email,
        role = row.role
      )
    )
  }
}

case class Message(content:String, tagSet:Set[String]) {
  def toRow() = {
    val now = OffsetDateTime.now()
    Tables.MessageRow(
      id = -1,
      content = content,
      tagList = tagSet.toList,
      createdAt = now,
      updatedAt = now
    )
  }
}

object Message {
  def apply(row: Tables.MessageRow): Entity[Message] = {
    Entity(
      id = row.id,
      data = Message(
        content = row.content,
        tagSet = row.tagList.toSet
      )
    )
  }

  def formApply(content:String, tags:String):Message = {
    Message(
      content = content.trim,
      tagSet = tags.split(",").map(_.trim).filterNot(_.isEmpty).toSet
    )
  }

  def formUnapply(m:Message):Option[(String, String)] = {
    Some((m.content, m.tagSet.mkString(",")))
  }
} 
Example 7
Source File: offsetdatetime.scala    From cats-time   with MIT License 5 votes vote down vote up
package io.chrisdavenport.cats.time.instances

import cats._
import cats.implicits._
import java.time.OffsetDateTime
import java.time.format.DateTimeFormatter
import java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME

trait offsetdatetime {
  final def showOffsetDateTime(formatter: DateTimeFormatter): Show[OffsetDateTime] =
    Show[String].contramap(_.format(formatter))

  implicit final val offsetdatetimeInstances = 
    new Show[OffsetDateTime] with Order[OffsetDateTime] with Hash[OffsetDateTime]{
      override def hash(x: OffsetDateTime): Int = x.hashCode
      override def compare(x: OffsetDateTime, y: OffsetDateTime): Int = x.compareTo(y)
      override def show(x: OffsetDateTime): String = x.format(ISO_OFFSET_DATE_TIME)
    }
}

object offsetdatetime extends offsetdatetime 
Example 8
Source File: ParameterMappers.scala    From neotypes   with MIT License 5 votes vote down vote up
package neotypes
package implicits.mappers

import java.time.{Duration, LocalDate, LocalDateTime, LocalTime, Period, OffsetDateTime, OffsetTime, ZonedDateTime}
import java.util.UUID

import mappers.ParameterMapper

import org.neo4j.driver.v1.Value
import org.neo4j.driver.v1.types.{IsoDuration, Point}

import scala.collection.Iterable
import scala.jdk.CollectionConverters._

trait ParameterMappers {
  implicit final val BooleanParameterMapper: ParameterMapper[Boolean] =
    ParameterMapper.fromCast(Boolean.box)

  implicit final val ByteArrayParameterMapper: ParameterMapper[Array[Byte]] =
    ParameterMapper.identity

  implicit final val DoubleParameterMapper: ParameterMapper[Double] =
    ParameterMapper.fromCast(Double.box)

  implicit final val DurationParameterMapper: ParameterMapper[Duration] =
    ParameterMapper.identity

  implicit final val FloatParameterMapper: ParameterMapper[Float] =
    ParameterMapper.fromCast(Float.box)

  implicit final val IntParameterMapper: ParameterMapper[Int] =
    ParameterMapper.fromCast(Int.box)

  implicit final val IsoDurationParameterMapper: ParameterMapper[IsoDuration] =
    ParameterMapper.identity

  implicit final val LocalDateParameterMapper: ParameterMapper[LocalDate] =
    ParameterMapper.identity

  implicit final val LocalDateTimeParameterMapper: ParameterMapper[LocalDateTime] =
    ParameterMapper.identity

  implicit final val LocalTimeParameterMapper: ParameterMapper[LocalTime] =
    ParameterMapper.identity

  implicit final val LongParameterMapper: ParameterMapper[Long] =
    ParameterMapper.fromCast(Long.box)

  implicit final val OffsetDateTimeParameterMapper: ParameterMapper[OffsetDateTime] =
    ParameterMapper.identity

  implicit final val OffsetTimeParameterMapper: ParameterMapper[OffsetTime] =
    ParameterMapper.identity

  implicit final val PeriodParameterMapper: ParameterMapper[Period] =
    ParameterMapper.identity

  implicit final val PointParameterMapper: ParameterMapper[Point] =
    ParameterMapper.identity

  implicit final val StringParameterMapper: ParameterMapper[String] =
    ParameterMapper.identity

  implicit final val UUIDParameterMapper: ParameterMapper[UUID] =
    ParameterMapper[String].contramap(_.toString)

  implicit final val ValueParameterMapper: ParameterMapper[Value] =
    ParameterMapper.identity

  implicit final val ZonedDateTimeParameterMapper: ParameterMapper[ZonedDateTime] =
    ParameterMapper.identity

  private final def iterableParameterMapper[T](mapper: ParameterMapper[T]): ParameterMapper[Iterable[T]] =
    ParameterMapper.fromCast { col =>
      col.iterator.map(v => mapper.toQueryParam(v).underlying).asJava
    }

  implicit final def collectionParameterMapper[T, C[_]](implicit mapper: ParameterMapper[T], ev: C[T] <:< Iterable[T]): ParameterMapper[C[T]] =
    iterableParameterMapper(mapper).contramap(ev)

  private final def iterableMapParameterMapper[V](mapper: ParameterMapper[V]): ParameterMapper[Iterable[(String, V)]] =
    ParameterMapper.fromCast { col =>
      col.iterator.map {
        case (key, v) => key -> mapper.toQueryParam(v).underlying
      }.toMap.asJava
    }

  implicit final def mapParameterMapper[V, M[_, _]](implicit mapper: ParameterMapper[V], ev: M[String, V] <:< Iterable[(String, V)]): ParameterMapper[M[String, V]] =
    iterableMapParameterMapper(mapper).contramap(ev)

  implicit final def optionAnyRefParameterMapper[T](implicit mapper: ParameterMapper[T]): ParameterMapper[Option[T]] =
    ParameterMapper.fromCast { optional =>
      optional.map(v => mapper.toQueryParam(v).underlying).orNull
    }
} 
Example 9
Source File: LogApiController.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.operation.controller

import java.time.{ OffsetDateTime, ZoneId }
import java.util.Date

import akka.actor.{ ActorRef, Props }
import akka.stream.actor.ActorPublisher
import akka.stream.actor.ActorPublisherMessage.{ Cancel, Request }
import akka.stream.scaladsl.Source
import ch.qos.logback.classic.spi.ILoggingEvent
import akka.http.scaladsl.model.sse.ServerSentEvent
import io.vamp.common.Namespace
import io.vamp.common.akka._
import io.vamp.common.json.{ OffsetDateTimeSerializer, SerializationFormat }
import org.json4s.native.Serialization._

import scala.concurrent.duration.FiniteDuration

case class LogEvent(logger: String, level: String, message: String, timestamp: OffsetDateTime)

trait LogApiController extends AbstractController {

  private val eventType = "log"

  def sourceLog(level: String, logger: Option[String], keepAlivePeriod: FiniteDuration)(implicit namespace: Namespace): Source[ServerSentEvent, ActorRef] = {
    Source.actorPublisher[ServerSentEvent](Props(new ActorPublisher[ServerSentEvent] {
      def receive: Receive = {
        case Request(_) ⇒ openLogStream(self, level, logger, { event ⇒
          ServerSentEvent(write(encode(event))(SerializationFormat(OffsetDateTimeSerializer)), eventType)
        })
        case Cancel                                  ⇒ closeLogStream(self)
        case sse: ServerSentEvent if totalDemand > 0 ⇒ onNext(sse)
        case _                                       ⇒
      }

    })).keepAlive(keepAlivePeriod, () ⇒ ServerSentEvent.heartbeat)
  }

  def openLogStream(to: ActorRef, level: String, logger: Option[String], encoder: (ILoggingEvent) ⇒ AnyRef)(implicit namespace: Namespace): Unit = {
    LogPublisherHub.subscribe(to, level, logger, encoder)
  }

  def closeLogStream(to: ActorRef): Unit = LogPublisherHub.unsubscribe(to)

  def encode(loggingEvent: ILoggingEvent) = LogEvent(
    loggingEvent.getLoggerName,
    loggingEvent.getLevel.toString,
    loggingEvent.getFormattedMessage,
    OffsetDateTime.ofInstant(new Date(loggingEvent.getTimeStamp).toInstant, ZoneId.of("UTC"))
  )
} 
Example 10
Source File: Encoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.jasync

import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime }
import java.util.Date

import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

trait Encoders {
  this: JAsyncContext[_, _, _] =>

  type Encoder[T] = AsyncEncoder[T]

  type EncoderSqlType = SqlTypes.SqlTypes

  case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T])
    extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder.apply(index, value, row)
  }

  def encoder[T](sqlType: DecoderSqlType): Encoder[T] =
    encoder(identity[T], sqlType)

  def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] =
    AsyncEncoder[T](sqlType)(new BaseEncoder[T] {
      def apply(index: Index, value: T, row: PrepareRow) =
        row :+ f(value)
    })

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    AsyncEncoder(e.sqlType)(new BaseEncoder[I] {
      def apply(index: Index, value: I, row: PrepareRow) =
        e(index, mapped.f(value), row)
    })

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] {
      def apply(index: Index, value: Option[T], row: PrepareRow) = {
        value match {
          case None    => nullEncoder(index, null, row)
          case Some(v) => d(index, v, row)
        }
      }
    })

  private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL)

  implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal]((bd: BigDecimal) => bd.bigDecimal, SqlTypes.REAL)
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN)
  implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT)
  implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT)
  implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER)
  implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT)
  implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT)
  implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY)
  implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP)
  implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE)
  implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP)
  implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP)

  implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] =
    MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId)))

  implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] =
    MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId)))

  implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] =
    MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth))

  implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] =
    MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond))

  implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] =
    MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli))

  implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder)
} 
Example 11
Source File: EventReader.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.model.reader

import java.time.OffsetDateTime

import io.vamp.model.event.{ Aggregator, Event, EventQuery, TimeRange }
import io.vamp.model.notification.{ EventTimestampError, UnsupportedAggregatorError }
import io.vamp.model.reader.EventReader.<<?
import io.vamp.model.reader.YamlSourceReader._
import io.vamp.model.validator.EventValidator

object EventReader extends YamlReader[Event] with EventValidator {

  override protected def expand(implicit source: YamlSourceReader) = {
    expandToList("tags")
    source
  }

  override protected def parse(implicit source: YamlSourceReader): Event = {
    val version = <<?[String]("version")
    val tags = <<![List[String]]("tags").toSet
    val value = <<?[AnyRef]("value") match {
      case None                         ⇒ None
      case Some(yaml: YamlSourceReader) ⇒ yaml.flatten()
      case Some(any)                    ⇒ any
    }

    val timestamp = <<?[String]("timestamp") match {
      case None ⇒ OffsetDateTime.now
      case Some(time) ⇒ try OffsetDateTime.parse(time) catch {
        case e: Exception ⇒ throwException(EventTimestampError(time))
      }
    }

    val `type` = <<?[String]("type").getOrElse(Event.defaultType)

    Event(version, tags, value, timestamp, `type`)
  }

  override def validate(event: Event): Event = validateEvent(event)
}

object EventQueryReader extends YamlReader[EventQuery] with EventValidator {

  override protected def expand(implicit source: YamlSourceReader) = {
    expandToList("tags")
    source
  }

  override protected def parse(implicit source: YamlSourceReader): EventQuery = {
    // TODO: add version val version = <<?[String]("version")
    val tags = <<![List[String]]("tags").toSet
    val `type` = <<?[String]("type")

    val timestamp = <<?[Any]("timestamp").flatMap { _ ⇒
      Some(TimeRange(<<?[String]("timestamp" :: "lt"), <<?[String]("timestamp" :: "lte"), <<?[String]("timestamp" :: "gt"), <<?[String]("timestamp" :: "gte")))
    }

    val aggregator = <<?[Any]("aggregator") match {
      case None ⇒ None
      case Some(_) ⇒
        val `type` = <<![String]("aggregator" :: "type").toLowerCase
        Aggregator.values.find(agg ⇒ agg.toString.toLowerCase == `type`) match {
          case None      ⇒ throwException(UnsupportedAggregatorError(`type`))
          case Some(agg) ⇒ Some(Aggregator(agg, <<?[String]("aggregator" :: "field")))
        }
    }

    EventQuery(tags, `type`, timestamp, aggregator)
  }

  override def validate(eventQuery: EventQuery): EventQuery = validateEventQuery(eventQuery)
} 
Example 12
Source File: EventQuery.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.model.event

import java.time.OffsetDateTime
import java.time.format.DateTimeFormatter

case class EventQuery(tags: Set[String], `type`: Option[String], timestamp: Option[TimeRange], aggregator: Option[Aggregator] = None)

case class TimeRange(lt: Option[String], lte: Option[String], gt: Option[String], gte: Option[String])

object TimeRange {
  def apply(from: Option[OffsetDateTime], to: Option[OffsetDateTime], includeLower: Boolean, includeUpper: Boolean): TimeRange = {
    def convert(time: Option[OffsetDateTime]): Option[String] = time.flatMap(t ⇒ Some(t.format(DateTimeFormatter.ISO_DATE_TIME)))

    val lt = if (to.isDefined && !includeUpper) convert(to) else None
    val lte = if (to.isDefined && includeUpper) convert(to) else None
    val gt = if (from.isDefined && !includeLower) convert(from) else None
    val gte = if (from.isDefined && includeLower) convert(from) else None

    TimeRange(lt, lte, gt, gte)
  }
} 
Example 13
Source File: SlaEvent.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.model.notification

import java.time.OffsetDateTime

import io.vamp.common.notification.Notification
import io.vamp.model.artifact.{ Deployment, DeploymentCluster }
import io.vamp.model.event.Event

object SlaEvent {
  def slaTags(deployment: Deployment, cluster: DeploymentCluster): Set[String] = {
    ("sla" :: s"deployment${Event.tagDelimiter}${deployment.name}" :: s"cluster${Event.tagDelimiter}${cluster.name}" :: Nil).toSet
  }
}

trait SlaEvent {
  def deployment: Deployment

  def cluster: DeploymentCluster

  def timestamp: OffsetDateTime

  def value: AnyRef = None

  def tags: Set[String] = Set()
}

object Escalate {
  def tags: Set[String] = Set(s"sla${Event.tagDelimiter}escalate")
}

case class Escalate(deployment: Deployment, cluster: DeploymentCluster, timestamp: OffsetDateTime = OffsetDateTime.now()) extends Notification with SlaEvent {
  override def tags: Set[String] = Escalate.tags ++ SlaEvent.slaTags(deployment, cluster)
}

object DeEscalate {
  def tags: Set[String] = Set(s"sla${Event.tagDelimiter}deescalate")
}

case class DeEscalate(deployment: Deployment, cluster: DeploymentCluster, timestamp: OffsetDateTime = OffsetDateTime.now()) extends Notification with SlaEvent {
  override def tags: Set[String] = DeEscalate.tags ++ SlaEvent.slaTags(deployment, cluster)
} 
Example 14
Source File: EventReaderSpec.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.model.reader

import java.time.OffsetDateTime

import io.vamp.model.event.{ Aggregator, TimeRange }
import io.vamp.model.notification._
import org.junit.runner.RunWith
import org.scalatest._
import org.scalatest.junit.JUnitRunner

@RunWith(classOf[JUnitRunner])
class EventReaderSpec extends FlatSpec with Matchers with ReaderSpec {

  "EventReader" should "read the event" in {
    EventReader.read(res("event/event1.yml")) should have(
      'tags(Set("server", "service")),
      'timestamp(OffsetDateTime.parse("2015-06-05T15:12:38.000Z")),
      'value(0),
      'type("metrics")
    )
  }

  it should "expand tags" in {
    EventReader.read(res("event/event2.yml")) should have(
      'tags(Set("server")),
      'value(Map("response" → Map("time" → 50))),
      'type("metrics")
    )
  }

  it should "fail on no tag" in {
    expectedError[MissingPathValueError]({
      EventReader.read(res("event/event3.yml"))
    })
  }

  it should "fail on empty tags" in {
    expectedError[NoTagEventError.type]({
      EventReader.read(res("event/event4.yml"))
    })
  }

  it should "fail on invalid timestamp" in {
    expectedError[EventTimestampError]({
      EventReader.read(res("event/event5.yml"))
    })
  }

  it should "parse no value" in {
    EventReader.read(res("event/event6.yml")) should have(
      'tags(Set("server")),
      'value(None)
    )
  }

  it should "fail on unsupported type" in {
    expectedError[EventTypeError]({
      EventReader.read(res("event/event7.yml"))
    })
  }

  "EventQueryReader" should "read the query" in {
    EventQueryReader.read(res("event/query1.yml")) should have(
      'tags(Set("server", "service")),
      'type(None),
      'timestamp(Some(TimeRange(None, None, Some("now() - 10m"), None))),
      'aggregator(Some(Aggregator(Aggregator.average, Some("response.time"))))
    )
  }

  it should "expand tags" in {
    EventQueryReader.read(res("event/query2.yml")) should have(
      'tags(Set("server")),
      'timestamp(None),
      'aggregator(None)
    )
  }

  it should "fail on invalid time range" in {
    expectedError[EventQueryTimeError.type]({
      EventQueryReader.read(res("event/query3.yml"))
    })
  }

  it should "fail on unsupported aggregator" in {
    expectedError[UnsupportedAggregatorError]({
      EventQueryReader.read(res("event/query4.yml"))
    })
  }

  it should "read the query type" in {
    EventQueryReader.read(res("event/query5.yml")) should have(
      'tags(Set("server", "service")),
      'type(Option("router")),
      'timestamp(None),
      'aggregator(None)
    )
  }
} 
Example 15
Source File: JobMessage.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.message.job

import java.io.File
import java.nio.charset.Charset
import java.nio.file.Path
import java.time.OffsetDateTime
import java.util.concurrent.TimeUnit

import akka.http.scaladsl.server.directives.FileInfo
import fusion.json.CborSerializable
import helloscala.common.data.{ IntValueName, StringValueName, ValueName }
import mass.common.page.{ Page, PageResult }
import mass.core.job.JobResult
import mass.model.CommonStatus
import mass.model.job._

import scala.concurrent.duration.FiniteDuration

sealed trait JobMessage extends CborSerializable
sealed trait JobResponse extends CborSerializable
final case class JobErrorResponse(status: Int, message: String) extends JobResponse

final case class ProgramVersionItem(programId: String, versions: Seq[StringValueName])
final case class JobGetAllOptionReq() extends JobMessage
final case class JobGetAllOptionResp(
    program: Seq[StringValueName],
    triggerType: Seq[ValueName[String]],
    programVersion: Seq[ProgramVersionItem],
    jobStatus: Seq[IntValueName])
    extends JobResponse

final case class JobScheduleReq(key: String) extends JobMessage

final case class JobCreateReq(key: Option[String], item: JobItem, trigger: JobTrigger) extends JobMessage
final case class JobCreateResp(schedule: Option[JobSchedule]) extends JobResponse

final case class JobUpdateReq(
    key: String,
    program: Option[Program] = None,
    programOptions: Option[Seq[String]] = None,
    programMain: Option[String] = None,
    programArgs: Option[Seq[String]] = None,
    programVersion: Option[String] = None,
    resources: Option[Map[String, String]] = None,
    data: Option[Map[String, String]] = None,
    description: Option[String] = None,
    dependentJobKeys: Option[Seq[String]] = None,
    name: Option[String] = None,
    triggerType: Option[TriggerType] = None,
    triggerEvent: Option[String] = None,
    startTime: Option[OffsetDateTime] = None,
    endTime: Option[OffsetDateTime] = None,
    // 重复次数
    repeat: Option[Int] = None,
    // 每次重复间隔
    interval: Option[FiniteDuration] = None,
    cronExpress: Option[String] = None,
    failedRetries: Option[Int] = None,
    timeout: Option[FiniteDuration] = None,
    alarmEmails: Option[Seq[String]] = None,
    status: Option[CommonStatus] = None)
    extends JobMessage

final case class JobFindReq(key: String) extends JobMessage
final case class JobSchedulerResp(schedule: Option[JobSchedule]) extends JobResponse

final case class JobPageReq(page: Int = 1, size: Int = 20, key: Option[String] = None) extends Page with JobMessage
final case class JobPageResp(content: Seq[JobSchedule], totalElements: Long, page: Int, size: Int)
    extends PageResult[JobSchedule]
    with JobResponse

final case class JobListReq(key: String) extends JobMessage
final case class JobListResp(items: Seq[JobSchedule]) extends JobResponse

final case class SchedulerJobResult(
    start: OffsetDateTime,
    end: OffsetDateTime,
    exitValue: Int,
    outPath: String,
    errPath: String)
    extends JobResult {
  def runDuration: FiniteDuration =
    FiniteDuration(java.time.Duration.between(start, end).toNanos, TimeUnit.NANOSECONDS).toCoarsest
}

final case class JobUploadJobReq(file: Path, fileName: String, charset: Charset) extends JobMessage
final case class JobUploadJobResp(resps: Seq[JobCreateResp]) extends JobResponse

final case class JobUploadFilesReq(items: Seq[(FileInfo, File)]) extends JobMessage
final case class JobUploadFilesResp(resources: Seq[IntValueName]) extends JobResponse 
Example 16
Source File: JobTrigger.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.model.job

import java.time.OffsetDateTime

import fusion.json.CborSerializable
import helloscala.common.Configuration
import mass.core.job.JobConstants

import scala.concurrent.duration._

// #JobTrigger
case class JobTrigger(
    triggerType: TriggerType,
    triggerEvent: String = "",
    startTime: Option[OffsetDateTime] = None,
    endTime: Option[OffsetDateTime] = None,
    // 重复次数
    repeat: Int = JobConstants.TRIGGER_REPEAT,
    // 每次重复间隔
    interval: FiniteDuration = JobConstants.TRIGGER_INTERVAL,
    cronExpress: String = "",
    failedRetries: Int = 0,
    timeout: FiniteDuration = JobConstants.RUN_TIMEOUT,
    alarmEmails: Seq[String] = Nil)
    extends CborSerializable
// #JobTrigger

object JobTrigger {
  def apply(c: Configuration): JobTrigger = {
    val triggerType = TriggerType.fromValue(c.getString("trigger-type"))
    JobTrigger(
      triggerType,
      c.getOrElse[String]("trigger-event", ""),
      c.get[Option[OffsetDateTime]]("start-time"),
      c.get[Option[OffsetDateTime]]("end-time"),
      c.getOrElse[Int]("repeat", JobConstants.TRIGGER_REPEAT),
      c.getOrElse[FiniteDuration]("duration", JobConstants.TRIGGER_INTERVAL),
      c.getOrElse[String]("cron-express", ""),
      c.getOrElse[Int]("failed-retries", 0),
      c.getOrElse[FiniteDuration]("timeout", JobConstants.RUN_TIMEOUT),
      c.getOrElse[Seq[String]]("alarm-emails", Nil))
  }
} 
Example 17
Source File: MemoryCacheSnapshot.scala    From AckCord   with MIT License 5 votes vote down vote up
package ackcord

import java.time.temporal.ChronoUnit
import java.time.{Instant, OffsetDateTime}

import ackcord.CacheSnapshot.BotUser
import ackcord.cachehandlers.CacheSnapshotBuilder
import ackcord.data._
import ackcord.util.CoreCompat
import shapeless.tag.@@


  def cleanGarbage(keepMessagesFor: Int, keepTypedFor: Int): CacheProcessor = (processor, builder) => {
    val messagesCleanThreshold = OffsetDateTime.now().minusMinutes(keepMessagesFor)
    val typedCleanThreshold    = Instant.now().minus(keepTypedFor, ChronoUnit.MINUTES)

    builder.messageMap.foreach {
      case (_, messageMap) =>
        CoreCompat.filterInPlace(messageMap)((_, m) => m.timestamp.isAfter(messagesCleanThreshold))
    }

    builder.lastTypedMap.foreach {
      case (_, typedMap) => CoreCompat.filterInPlace(typedMap)((_, i) => i.isAfter(typedCleanThreshold))
    }

    processor
  }
} 
Example 18
Source File: JsonSpec.scala    From kanadi   with MIT License 5 votes vote down vote up
package org.zalando.kanadi
package api

import java.util.UUID

import cats.syntax.either._
import cats.instances.either._
import org.specs2.Specification
import org.specs2.specification.core.SpecStructure
import io.circe._
import io.circe.parser._
import io.circe.syntax._
import org.zalando.kanadi.models.{EventId, SpanCtx}
import java.time.OffsetDateTime

import io.circe.CursorOp.DownField

class JsonSpec extends Specification {
  override def is: SpecStructure = s2"""
    Parse business events         $businessEvent
    Parse data events             $dataEvent
    Parse undefined events        $undefinedEvent
    SpanCtx decoding example      $decodeSpnCtx
    SpanCtx encoding example      $encodeSpnCtx
    SpanCtx fail decoding example $badDecodeSpnCtx
    """

  val uuid      = UUID.randomUUID()
  val testEvent = SomeEvent("Bart", "Simpson", uuid)
  val now       = OffsetDateTime.now()
  val md        = Metadata(eid = EventId("4ae5011e-eb01-11e5-8b4a-1c6f65464fc6"), occurredAt = now)

  val coreEventJson = s"""
    "first_name": "Bart",
    "last_name": "Simpson",
    "uuid": "${uuid.toString}"
  """

  val metadata =
    s""""eid": "4ae5011e-eb01-11e5-8b4a-1c6f65464fc6", "occurred_at": ${now.asJson}"""

  val businessEventJson = s"""{
    "metadata": {$metadata},
    $coreEventJson
  }"""

  val dataEventJson = s"""{
    "metadata": {$metadata},
    "data_op": "C",
    "data": {$coreEventJson},
    "data_type": "blah"
  }"""

  val undefinedEventJson = s"{$coreEventJson}"

  def businessEvent =
    decode[Event[SomeEvent]](businessEventJson) must beRight(Event.Business(testEvent, md))

  def dataEvent =
    decode[Event[SomeEvent]](dataEventJson) must beRight(Event.DataChange(testEvent, "blah", DataOperation.Create, md))

  def undefinedEvent =
    decode[Event[SomeEvent]](undefinedEventJson) must beRight(Event.Undefined(testEvent))

  // Sample data is taken from official Nakadi source at https://github.com/zalando/nakadi/blob/effb2ed7e95bd329ab73ce06b2857aa57510e539/src/test/java/org/zalando/nakadi/validation/JSONSchemaValidationTest.java

  val spanCtxJson =
    """{"eid":"5678","occurred_at":"1992-08-03T10:00:00Z","span_ctx":{"ot-tracer-spanid":"b268f901d5f2b865","ot-tracer-traceid":"e9435c17dabe8238","ot-baggage-foo":"bar"}}"""

  val spanCtxBadJson =
    """{"eid":"5678","occurred_at":"1992-08-03T10:00:00Z","span_ctx":{"ot-tracer-spanid":"b268f901d5f2b865","ot-tracer-traceid":42,"ot-baggage-foo":"bar"}}"""

  val spanCtxEventMetadata = Metadata(
    eid = EventId("5678"),
    occurredAt = OffsetDateTime.parse("1992-08-03T10:00:00Z"),
    spanCtx = Some(
      SpanCtx(
        Map(
          "ot-tracer-spanid"  -> "b268f901d5f2b865",
          "ot-tracer-traceid" -> "e9435c17dabe8238",
          "ot-baggage-foo"    -> "bar"
        )))
  )

  def decodeSpnCtx =
    decode[Metadata](spanCtxJson) must beRight(spanCtxEventMetadata)

  def encodeSpnCtx =
    spanCtxEventMetadata.asJson.pretty(Printer.noSpaces.copy(dropNullValues = true)) mustEqual spanCtxJson

  def badDecodeSpnCtx =
    decode[Metadata](spanCtxBadJson) must beLeft(
      DecodingFailure("String", List(DownField("ot-tracer-traceid"), DownField("span_ctx"))))
} 
Example 19
Source File: WiredApiService.scala    From bay-scalajs.g8   with Apache License 2.0 5 votes vote down vote up
package services

import java.time.OffsetDateTime

import play.api.libs.Files
import play.api.mvc.MultipartFormData
import shared.models.WiredApiModel.ApiResult
import shared.models.slick.default._
import shared.services.WiredApi
import shared.utils.Implicits
import shared.utils.LoremIpsum

class WiredApiService(user: User,
                      services: Services,
                      files: Seq[MultipartFormData.FilePart[Files.TemporaryFile]])
    extends WiredApi
    with Implicits {

  override def ping(): ApiResult[String] = "pong".asResult

  override def now(): ApiResult[OffsetDateTime] = OffsetDateTime.now.asResult

  override def createLoremIpsum(): ApiResult[List[String]] = {
    Thread.sleep(2000)
    LoremIpsum.paragraphs(15).asResult
  }
} 
Example 20
Source File: HomeScreen.scala    From bay-scalajs.g8   with Apache License 2.0 5 votes vote down vote up
package screens

import java.time.OffsetDateTime

import _root_.utils.HtmlTags
import autowire._
import japgolly.scalajs.react._
import japgolly.scalajs.react.extra.router.RouterCtl
import models.Locs.Loc
import services.AjaxClient
import shared.services.WiredApi
import shared.utils.{Codecs, LoremIpsum}

object HomeScreen extends HtmlTags with Codecs {

  case class Props(c: RouterCtl[Loc])

  case class State()

  class Backend($ : BackendScope[Props, State]) {

    def mounted() = Callback {
      AjaxClient[WiredApi].now().call().foreach {
        case Right(time) => println(time.toString)
        case Left(e)     => org.scalajs.dom.window.alert(e.toString)
      }
    }

    def render(props: Props, state: State): VdomTag = {
      <.div(
        <.h2("Lorem Ipsum"),
        <.div(
          LoremIpsum.paragraphs(4).map(<.p(_)).toTagMod,
          OffsetDateTime.now.toString
        )
      )
    }
  }

  private val component = ScalaComponent
    .builder[Props]("HomeScreen")
    .initialState(State())
    .renderBackend[Backend]
    .componentDidMount(_.backend.mounted())
    .build

  def apply(c: RouterCtl[Loc]) = component(Props(c))
} 
Example 21
Source File: InMemoryCommitCommitManager.scala    From reactive-nakadi   with MIT License 5 votes vote down vote up
package org.zalando.react.nakadi

import java.time.{ZoneId, OffsetDateTime}

import org.zalando.react.nakadi.commit.OffsetTracking
import org.zalando.react.nakadi.commit.handlers.BaseCommitManager

import scala.concurrent.Future


object InMemoryCommitCommitManager extends BaseCommitManager {

  private val store = scala.collection.concurrent.TrieMap.empty[String, String]
  private def generateKey(group: String, eventType: String, partition: String) = s"$group-$eventType-$partition"
  private def generateValue(offset: String, leaseHolder: String, leaseCounter: Long) = s"$offset-$leaseHolder-$leaseCounter"

  def create(key: String, partitionId: String, checkpointId: String, leaseHolder: String) = {
    val value = generateValue(checkpointId, leaseHolder, 0)
    store.put(key, value)
    OffsetTracking(
      partitionId = partitionId,
      checkpointId = value.split("-")(0),
      leaseHolder = value.split("-")(1),
      leaseCounter = Option(1),
      leaseTimestamp =OffsetDateTime.now,
      leaseId = None
    )
  }

  def update(key: String, value: String, partitionId: String) = {
    val count = value.split("-")(2).toLong
    val leaseCounter = count + 1

    val offset = OffsetTracking(
      partitionId = partitionId,
      checkpointId = value.split("-")(0),
      leaseHolder = value.split("-")(1),
      leaseCounter = Option(leaseCounter),
      leaseTimestamp = OffsetDateTime.now,
      leaseId = None
    )
    store.put(key, generateValue(offset.checkpointId, offset.leaseHolder, count))
    offset
  }

  override def put(groupId: String, eventType: String, offset: OffsetTracking): Future[OffsetTracking] = Future.successful {
    val key = generateKey(groupId, eventType, offset.partitionId)
    store.get(key)
      .fold(create(key, offset.partitionId, offset.checkpointId, offset.leaseHolder))(update(key, _, offset.partitionId))
  }

  override def get(groupId: String, eventType: String, partitionId: String): Future[Option[OffsetTracking]] = {
    Future.successful {
      val key = generateKey(groupId, eventType, partitionId)
      store.get(key).map { value =>
        OffsetTracking(
          partitionId = partitionId,
          checkpointId = value.split("-")(0),
          leaseHolder = value.split("-")(1),
          leaseCounter = Option(1),
          leaseTimestamp = OffsetDateTime.now,
          leaseId = None
        )
      }
    }
  }
} 
Example 22
Source File: Models.scala    From reactive-nakadi   with MIT License 5 votes vote down vote up
package org.zalando.react.nakadi.client.models

import java.time.OffsetDateTime

case class EventMetadata(
  eid: String,
  event_type: Option[String] = None,
  occurred_at: OffsetDateTime,
  received_at: Option[OffsetDateTime] = None,
  parent_eids: Option[Seq[String]] = None,
  flow_id: Option[String] = None
)

sealed trait Event {
  def metadata: EventMetadata
}

case class BusinessEvent(
  metadata: EventMetadata,
  payload: EventPayload
) extends Event

case class DataChangeEvent(
  data_type: String,
  data_op: DataOpEnum.DataOp,
  data: EventPayload, // Raw payload for event
  metadata: EventMetadata
) extends Event

case class Cursor(
  partition: String,
  offset: String
)

case class EventStreamBatch(
  cursor: Cursor,
  events: Option[Seq[Event]] = None
)

case class EventTypeStatistics(
  expectedWriteRate: Option[Long] = None,
  messageSize: Option[Long] = None,
  readParallelism: Option[Long] = None,
  writeParallelism: Option[Long] = None
)

case class EventType(
  name: String,
  statistics: Option[EventTypeStatistics] = None,
  partitionKeyFields: Seq[String],
  dataKeyFields: Option[Seq[String]] = None,
  owningApplication: String,
  validationStrategies: Option[Seq[String]] = None,
  partitionResolutionStrategy: Option[play.api.libs.json.JsValue] = None,
  schema: Option[play.api.libs.json.JsValue] = None,
  category: EventTypeCategoryEnum.Category,
  enrichmentStrategies: Seq[String]
) 
Example 23
Source File: PostgresProjectionSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.cli.postgres

import java.time.OffsetDateTime

import cats.effect.{Blocker, IO}
import ch.epfl.bluebrain.nexus.cli.Console
import ch.epfl.bluebrain.nexus.cli.config.AppConfig
import ch.epfl.bluebrain.nexus.cli.modules.postgres.PostgresProjection
import ch.epfl.bluebrain.nexus.cli.modules.postgres.PostgresProjection.TimeMeta.javatime._
import ch.epfl.bluebrain.nexus.cli.sse.Offset
import doobie.util.transactor.Transactor
import fs2.io

//noinspection SqlNoDataSourceInspection
class PostgresProjectionSpec extends AbstractPostgresSpec {

  "A PostgresProjection" should {
    "project all schemas" in {
      import doobie.implicits._
      (xa: Transactor[IO], proj: PostgresProjection[IO]) =>
        for {
          _                                <- proj.run
          count                            <- sql"select count(id) from schemas;".query[Int].unique.transact(xa)
          _                                 = count shouldEqual 175
          maxImport                        <- sql"select id, count(import) from schema_imports group by id order by count desc limit 1;"
                                                .query[(String, Int)]
                                                .unique
                                                .transact(xa)
          (maxImportSchema, maxImportCount) = maxImport
          _                                 = maxImportSchema shouldEqual "https://neuroshapes.org/commons/entity"
          _                                 = maxImportCount shouldEqual 7
          lastUpdated                      <- sql"select last_updated from schemas where id = 'https://neuroshapes.org/commons/entity'"
                                                .query[OffsetDateTime]
                                                .unique
                                                .transact(xa)
          _                                 = lastUpdated.toInstant.toEpochMilli shouldEqual 1584615316089L
        } yield ()
    }
    "save offset" in { (cfg: AppConfig, blocker: Blocker, proj: PostgresProjection[IO], console: Console[IO]) =>
      implicit val b: Blocker     = blocker
      implicit val c: Console[IO] = console

      for {
        _      <- proj.run
        exists <- io.file.exists[IO](blocker, cfg.postgres.offsetFile)
        _       = exists shouldEqual true
        offset <- Offset.load(cfg.postgres.offsetFile)
        _       = offset.nonEmpty shouldEqual true
      } yield ()
    }
  }
} 
Example 24
Source File: ArrayOfOffsetDateTimesReading.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8
import java.time.OffsetDateTime

import com.avsystem.commons.serialization.json._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.AVSystemCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.BorerJsonEncodersDecoders._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.DslPlatformJson._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JacksonSerDesers._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JsoniterScalaCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.SprayFormats._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.UPickleReaderWriters._
import com.github.plokhotnyuk.jsoniter_scala.core._
import com.rallyhealth.weejson.v1.jackson.FromJson
import com.rallyhealth.weepickle.v1.WeePickle.ToScala
import io.circe.parser._
import org.openjdk.jmh.annotations.Benchmark
import play.api.libs.json.Json
import spray.json._

class ArrayOfOffsetDateTimesReading extends ArrayOfOffsetDateTimesBenchmark {
  @Benchmark
  def avSystemGenCodec(): Array[OffsetDateTime] = JsonStringInput.read[Array[OffsetDateTime]](new String(jsonBytes, UTF_8))

  @Benchmark
  def borer(): Array[OffsetDateTime] = io.bullet.borer.Json.decode(jsonBytes).to[Array[OffsetDateTime]].value

  @Benchmark
  def circe(): Array[OffsetDateTime] = decode[Array[OffsetDateTime]](new String(jsonBytes, UTF_8)).fold(throw _, identity)

  @Benchmark
  def dslJsonScala(): Array[OffsetDateTime] = dslJsonDecode[Array[OffsetDateTime]](jsonBytes)

  @Benchmark
  def jacksonScala(): Array[OffsetDateTime] = jacksonMapper.readValue[Array[OffsetDateTime]](jsonBytes)

  @Benchmark
  def jsoniterScala(): Array[OffsetDateTime] = readFromArray[Array[OffsetDateTime]](jsonBytes)

  @Benchmark
  def playJson(): Array[OffsetDateTime] = Json.parse(jsonBytes).as[Array[OffsetDateTime]]

  @Benchmark
  def sprayJson(): Array[OffsetDateTime] = JsonParser(jsonBytes).convertTo[Array[OffsetDateTime]]

  @Benchmark
  def uPickle(): Array[OffsetDateTime] = read[Array[OffsetDateTime]](jsonBytes)

  @Benchmark
  def weePickle(): Array[OffsetDateTime] = FromJson(jsonBytes).transform(ToScala[Array[OffsetDateTime]])
} 
Example 25
Source File: ShoppingCartServiceImpl.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.example.shoppingcart.impl

import java.time.OffsetDateTime

import akka.Done
import akka.NotUsed
import com.example.shoppingcart.api.ShoppingCartService
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.api.broker.Topic
import com.lightbend.lagom.scaladsl.api.transport.BadRequest
import com.lightbend.lagom.scaladsl.api.transport.NotFound
import com.lightbend.lagom.scaladsl.api.transport.TransportException
import com.lightbend.lagom.scaladsl.persistence.EventStreamElement

import scala.concurrent.ExecutionContext
import akka.cluster.sharding.typed.scaladsl.ClusterSharding
import scala.concurrent.duration._
import akka.util.Timeout
import akka.cluster.sharding.typed.scaladsl.EntityRef
import akka.actor.typed.ActorRef


  private def entityRef(id: String): EntityRef[ShoppingCartCommand] =
    clusterSharding.entityRefFor(ShoppingCart.typeKey, id)

  implicit val timeout = Timeout(5.seconds)

  override def get(id: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
    entityRef(id)
      .ask { reply: ActorRef[Summary] => Get(reply) }
      .map { cart => asShoppingCartView(id, cart) }
  }
  //#akka-persistence-reffor-after

  override def updateItem(id: String, productId: String, qty: Int): ServiceCall[NotUsed, String] = ServiceCall { update =>
    entityRef(id)
      .ask { replyTo: ActorRef[Confirmation] => UpdateItem(productId, qty, replyTo) }
      .map {
        case Accepted(summary)  => asShoppingCartView(id, summary)
        case Rejected(reason)   => throw BadRequest(reason)
      }
  }

  override def checkout(id: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
    entityRef(id)
      .ask(replyTo => Checkout(replyTo))
      .map {
        case Accepted(summary)  => asShoppingCartView(id, summary)
        case Rejected(reason)   => throw BadRequest(reason)
      }
  }

  private def asShoppingCartView(id: String, cart: Summary): String = {
    val items = cart.items.map {case (k, v) => s"$k=$v"}.mkString(":")
    val status = if (cart.checkedOut) "checkedout" else "open"
    s"$id:$items:$status"
  }

  override def getReport(cartId: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
    reportRepository.findById(cartId).map {
      case Some(cart) =>
      if (cart.checkedOut) "checkedout"
      else "open"
      case None => throw NotFound(s"Couldn't find a shopping cart report for '$cartId'")
    }
  }


} 
Example 26
Source File: ShoppingCartServiceImpl.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.example.shoppingcart.impl

import java.time.OffsetDateTime

import akka.{Done, NotUsed}
import com.example.shoppingcart.api.ShoppingCartService
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.api.transport.{BadRequest, NotFound, TransportException}
import com.lightbend.lagom.scaladsl.persistence.{EventStreamElement, PersistentEntityRegistry}

import scala.concurrent.ExecutionContext


  private def entityRef(id: String) =
    persistentEntityRegistry.refFor[ShoppingCartEntity](id)

  override def get(id: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
    entityRef(id)
      .ask(Get)
      .map(cart => asShoppingCartView(id, cart))
  }
  //#akka-persistence-reffor-before

  override def updateItem(id: String, productId: String, qty: Int): ServiceCall[NotUsed, String] = ServiceCall { update =>
    entityRef(id)
      .ask(UpdateItem(productId, qty))
      .map(cart => asShoppingCartView(id, cart))
      .recover {
        case ShoppingCartException(message) => throw BadRequest(message)
      }
  }

  override def checkout(id: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
    entityRef(id)
      .ask(Checkout)
      .map(cart => asShoppingCartView(id, cart))
      .recover {
        case ShoppingCartException(message) => throw BadRequest(message)
      }
  }

  private def asShoppingCartView(id: String, cart: Summary): String = {
    val items = cart.items.map {case (k, v) => s"$k=$v"}.mkString(":")
    val status = if (cart.checkedOut) "checkedout" else "open"
    s"$id:$items:$status"
  }

  override def getReport(cartId: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
    reportRepository.findById(cartId).map {
      case Some(cart) =>
        if (cart.checkedOut) "checkedout"
        else "open"
      case None => throw NotFound(s"Couldn't find a shopping cart report for '$cartId'")
    }
  }
} 
Example 27
Source File: CommandsTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package cqrs.commands

import java.time.{LocalDateTime, OffsetDateTime, ZoneOffset}
import java.util.UUID

import org.scalatest.BeforeAndAfterAll
import endpoints4s.play.client.{Endpoints, JsonEntitiesFromCodecs}
import endpoints4s.play.server.PlayComponents
import play.api.Mode
import play.api.libs.ws.ahc.{AhcWSClient, AhcWSClientConfig}
import play.core.server.{NettyServer, ServerConfig}

import scala.concurrent.Future
import scala.math.BigDecimal
import org.scalatest.freespec.AsyncFreeSpec

class CommandsTest extends AsyncFreeSpec with BeforeAndAfterAll {

  private val server =
    NettyServer.fromRouterWithComponents(ServerConfig(mode = Mode.Test)) { components =>
      new Commands(PlayComponents.fromBuiltInComponents(components)).routes
    }
  val app = server.applicationProvider.get.get
  import app.materializer
  private val wsClient = AhcWSClient(AhcWSClientConfig())

  object client
      extends Endpoints("http://localhost:9000", wsClient)
      with JsonEntitiesFromCodecs
      with CommandsEndpoints

  override def afterAll(): Unit = {
    server.stop()
    wsClient.close()
  }

  "Commands" - {

    val arbitraryDate = OffsetDateTime
      .of(LocalDateTime.of(2017, 1, 8, 12, 34, 56), ZoneOffset.UTC)
      .toInstant
    val arbitraryValue = BigDecimal(10)

    "create a new meter" in {
      client.command(CreateMeter("electricity")).map { maybeEvent =>
        assert(maybeEvent.collect {
          case StoredEvent(_, MeterCreated(_, "electricity")) => ()
        }.nonEmpty)
      }
    }
    "create a meter and add readings to it" in {
      for {
        maybeCreatedEvent <- client.command(CreateMeter("water"))
        id <-
          maybeCreatedEvent
            .collect { case StoredEvent(_, MeterCreated(id, _)) => id }
            .fold[Future[UUID]](Future.failed(new NoSuchElementException))(
              Future.successful
            )
        maybeAddedEvent <- client.command(
          AddRecord(id, arbitraryDate, arbitraryValue)
        )
        _ <-
          maybeAddedEvent
            .collect {
              case StoredEvent(
                    _,
                    RecordAdded(`id`, `arbitraryDate`, `arbitraryValue`)
                  ) =>
                ()
            }
            .fold[Future[Unit]](Future.failed(new NoSuchElementException))(
              Future.successful
            )
      } yield assert(true)
    }
  }

} 
Example 28
Source File: package.scala    From zio   with Apache License 2.0 5 votes vote down vote up
package zio

import java.time.{ DateTimeException, Instant, OffsetDateTime, ZoneId }
import java.util.concurrent.TimeUnit

import zio.duration.Duration

package object clock {

  type Clock = Has[Clock.Service]

  object Clock extends PlatformSpecific with Serializable {
    trait Service extends Serializable {
      def currentTime(unit: TimeUnit): UIO[Long]
      def currentDateTime: IO[DateTimeException, OffsetDateTime]
      def nanoTime: UIO[Long]
      def sleep(duration: Duration): UIO[Unit]
    }

    object Service {
      val live: Service = new Service {
        def currentTime(unit: TimeUnit): UIO[Long] =
          IO.effectTotal(System.currentTimeMillis).map(l => unit.convert(l, TimeUnit.MILLISECONDS))

        val nanoTime: UIO[Long] = IO.effectTotal(System.nanoTime)

        def sleep(duration: Duration): UIO[Unit] =
          UIO.effectAsyncInterrupt { cb =>
            val canceler = globalScheduler.schedule(() => cb(UIO.unit), duration)
            Left(UIO.effectTotal(canceler()))
          }

        def currentDateTime: IO[DateTimeException, OffsetDateTime] = {
          val dateTime =
            for {
              millis         <- currentTime(TimeUnit.MILLISECONDS)
              zone           <- ZIO(ZoneId.systemDefault)
              instant        <- ZIO(Instant.ofEpochMilli(millis))
              offsetDateTime <- ZIO(OffsetDateTime.ofInstant(instant, zone))
            } yield offsetDateTime
          dateTime.refineToOrDie[DateTimeException]
        }

      }
    }

    val any: ZLayer[Clock, Nothing, Clock] =
      ZLayer.requires[Clock]

    val live: Layer[Nothing, Clock] =
      ZLayer.succeed(Service.live)
  }

  
  def sleep(duration: => Duration): URIO[Clock, Unit] =
    ZIO.accessM(_.get.sleep(duration))

} 
Example 29
Source File: MockClock.scala    From zio   with Apache License 2.0 5 votes vote down vote up
package zio.test.mock

import java.time.{ DateTimeException, OffsetDateTime }
import java.util.concurrent.TimeUnit

import zio.clock.Clock
import zio.duration.Duration
import zio.{ Has, IO, UIO, URLayer, ZLayer }

object MockClock extends Mock[Clock] {

  object CurrentTime     extends Effect[TimeUnit, Nothing, Long]
  object CurrentDateTime extends Effect[Unit, DateTimeException, OffsetDateTime]
  object NanoTime        extends Effect[Unit, Nothing, Long]
  object Sleep           extends Effect[Duration, Nothing, Unit]

  val compose: URLayer[Has[Proxy], Clock] =
    ZLayer.fromService(proxy =>
      new Clock.Service {
        def currentTime(unit: TimeUnit): UIO[Long]                 = proxy(CurrentTime, unit)
        def currentDateTime: IO[DateTimeException, OffsetDateTime] = proxy(CurrentDateTime)
        val nanoTime: UIO[Long]                                    = proxy(NanoTime)
        def sleep(duration: Duration): UIO[Unit]                   = proxy(Sleep, duration)
      }
    )
} 
Example 30
Source File: LogWriter.scala    From zio-logging   with Apache License 2.0 5 votes vote down vote up
package zio.logging

import java.time.OffsetDateTime

import zio.{ Cause, URIO }
import zio.clock.{ currentDateTime, Clock }
import zio.console.{ putStrLn, Console }
import zio.logging.LogDatetimeFormatter.humanReadableDateTimeFormatter
import zio.logging.LogLevel._

import scala.io.AnsiColor._

trait LogWriter[R] {
  def writeLog(context: LogContext, line: => String): URIO[R, Unit]
}

object LogWriter {

  private val NL = System.lineSeparator()

  type LineFormatter = (LogContext, => String) => String

  case class SimpleConsoleLogWriter(format: LineFormatter = (_, s) => s) extends LogWriter[Console with Clock] {
    override def writeLog(context: LogContext, line: => String): URIO[Console with Clock, Unit] =
      for {
        date      <- currentDateTime.orDie
        level      = context(LogAnnotation.Level)
        loggerName = context(LogAnnotation.Name)
        maybeError = context
                       .get(LogAnnotation.Throwable)
                       .map(Cause.fail)
                       .orElse(context.get(LogAnnotation.Cause))
                       .map(cause => NL + cause.prettyPrint)
                       .getOrElse("")
        _         <- putStrLn(
                       humanReadableDateTimeFormatter
                         .format(date) + " " + level + " " + loggerName + " " + format(context, line) + " " + maybeError
                     )
      } yield ()
  }

  case class ColoredLogWriter(lineFormat: LineFormatter = (_, s) => s) extends LogWriter[Console with Clock] {
    private def withColor(color: String, s: String): String = s"$color$s$RESET"

    private def highlightLog(level: LogLevel, message: String): String = {
      val color = level match {
        case Error => RED
        case Warn  => YELLOW
        case Info  => CYAN
        case Debug => GREEN
        case Trace => MAGENTA
        case _     => RESET
      }
      withColor(color, message)
    }

    private def format(
      line: => String,
      time: OffsetDateTime,
      level: LogLevel,
      loggerName: String,
      maybeError: Option[String]
    ): String = {
      val logTag  = highlightLog(level, level.render)
      val logTime = withColor(BLUE, humanReadableDateTimeFormatter.format(time))
      val logMsg  =
        f"$logTime $logTag%14s [${withColor(WHITE, loggerName)}] ${highlightLog(level, line)}"
      maybeError.fold(logMsg)(err => s"$logMsg$NL${highlightLog(level, err)}")
    }

    override def writeLog(context: LogContext, line: => String): URIO[Console with Clock, Unit] =
      for {
        date      <- currentDateTime.orDie
        level      = context.get(LogAnnotation.Level)
        loggerName = context(LogAnnotation.Name)
        maybeError = context
                       .get(LogAnnotation.Throwable)
                       .map(Cause.fail)
                       .orElse(context.get(LogAnnotation.Cause))
                       .map(_.prettyPrint)
        _         <- putStrLn(format(lineFormat(context, line), date, level, loggerName, maybeError))
      } yield ()
  }
} 
Example 31
Source File: HTTPLogger.scala    From zio-logging   with Apache License 2.0 5 votes vote down vote up
package zio.logging.js

import java.time.OffsetDateTime
import java.util.UUID

import org.scalajs.dom.ext.Ajax
import zio.{ ZIO, ZLayer }
import zio.clock.{ currentDateTime, Clock }
import zio.logging.Logging
import zio.logging.{ LogAnnotation, LogContext, LogLevel, Logging }

import scala.scalajs.js
import scala.scalajs.js.JSON

object HTTPLogger {

  private def sendMessage(url: String, msg: js.Object) =
    Ajax.post(url, JSON.stringify(msg), headers = Map("Content-Type" -> "application/json"))

  
  type MessageFormatter = (OffsetDateTime, String, LogLevel, String, String, Throwable) => js.Object

  val defaultFormatter: MessageFormatter = (date, clientId, level, name, msg, cause) =>
    js.Dynamic.literal(
      date = date.toString,
      clientId = clientId,
      level = level match {
        case LogLevel.Fatal => "fatal"
        case LogLevel.Error => "error"
        case LogLevel.Warn  => "warn"
        case LogLevel.Info  => "info"
        case LogLevel.Debug => "debug"
        case LogLevel.Trace => "trace"
        case LogLevel.Off   => ""
      },
      name = name,
      msg = msg,
      cause = if (cause == null) "" else cause.toString
    )

  def makeWithName(
    url: String,
    clientId: String = UUID.randomUUID().toString,
    formatter: MessageFormatter = defaultFormatter
  )(name: String)(logFormat: (LogContext, => String) => String): ZLayer[Clock, Nothing, Logging] =
    make(url, clientId, formatter)((context, line) =>
      logFormat(context.annotate(LogAnnotation.Name, name :: Nil), line)
    )

  def make(url: String, clientId: String = UUID.randomUUID().toString, formatter: MessageFormatter = defaultFormatter)(
    logFormat: (LogContext, => String) => String
  ): ZLayer[Clock, Nothing, Logging] =
    Logging.make { (context, line) =>
      for {
        date      <- currentDateTime.orDie
        level      = context.get(LogAnnotation.Level)
        loggerName = LogAnnotation.Name.render(context.get(LogAnnotation.Name))
        msg        = formatter(date, clientId, level, loggerName, logFormat(context, line), null)
        _         <- ZIO.effectTotal(sendMessage(url, msg))
      } yield ()
    }

} 
Example 32
Source File: JdbcResultSpec.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package daf.dataset.query.jdbc

import java.sql.Timestamp
import java.time.{ LocalDateTime, OffsetDateTime }

import org.scalatest.{ MustMatchers, WordSpec }
import play.api.libs.json._

class JdbcResultSpec extends WordSpec with MustMatchers {

  "A JDBC Result container" must {

    "convert to CSV" in {
      JdbcResults.flat.toCsv.toList must be {
        List(
          """"int", "string", "bool", "timestamp"""",
          """1, "str1", true, "2018-06-25T09:00:00Z"""",
          """2, "str2", false, "2018-06-25T09:30:00Z"""",
          """<null>, <null>, false, <null>"""
        )
      }
    }

    "convert to json" in {
      JdbcResults.flat.toJson.toList must be {
        Seq(
          JsObject {
            Seq(
              "int"       -> JsNumber(1),
              "string"    -> JsString("str1"),
              "bool"      -> JsBoolean(true),
              "timestamp" -> JsString("2018-06-25T09:00:00Z")
            )
          },
          JsObject {
            Seq(
              "int"       -> JsNumber(2),
              "string"    -> JsString("str2"),
              "bool"      -> JsBoolean(false),
              "timestamp" -> JsString("2018-06-25T09:30:00Z")
            )
          },
          JsObject {
            Seq(
              "int"       -> JsNull,
              "string"    -> JsNull,
              "bool"      -> JsBoolean(false),
              "timestamp" -> JsNull
            )
          }
        )
      }
    }

  }

}

object JdbcResults {

  private val offset = OffsetDateTime.now().getOffset

  private def timestamp(dateTime: LocalDateTime) = Timestamp.from { dateTime.toInstant(offset) }

  val flat = JdbcResult(
    header = Seq("int", "string", "bool", "timestamp"),
    rows   = Vector(
      List(
        Int.box(1),
        "str1",
        Boolean.box(true),
        timestamp { LocalDateTime.of(2018, 6, 25, 9, 0) }
      ),
      List(
        Int.box(2),
        "str2",
        Boolean.box(false),
        timestamp { LocalDateTime.of(2018, 6, 25, 9, 30) }
      ),
      List(
        null,
        null,
        Boolean.box(false),
        null
      )
    )
  )

} 
Example 33
Source File: SeedNodesListOrderingResolverTest.scala    From akka-tools   with MIT License 5 votes vote down vote up
package no.nextgentel.oss.akkatools.cluster

import java.time.OffsetDateTime

import org.scalatest.{Matchers, FunSuite}

import scala.concurrent.duration.FiniteDuration

class SeedNodesListOrderingResolverTest extends FunSuite with Matchers {

  test("no live nodes") {
    val repo = new OurClusterNodeRepo(List())
    assert(AkkaClusterConfig(Some("host1"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host1"), 9999, List("host1:9999", "host2:9999"))))

    assert(AkkaClusterConfig(Some("host1"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host1"), 9999, List("host2:9999", "host1:9999"))))
  }

  test("alive nodes found") {
    val repo = new OurClusterNodeRepo(List(NodeInfo("akka.tcp://MobilityService@host1:9999", true)))
    assert(AkkaClusterConfig(Some("host2"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host2"), 9999, List("host1:9999", "host2:9999"))))

    assert(AkkaClusterConfig(Some("host2"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host2"), 9999, List("host2:9999", "host1:9999"))))
  }

  test("alive node (not joined yet) found ") {
    val repo = new OurClusterNodeRepo(List(NodeInfo("akka.tcp://MobilityService@host1:9999", false)))
    assert(AkkaClusterConfig(Some("host2"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host2"), 9999, List("host1:9999", "host2:9999"))))

    assert(AkkaClusterConfig(Some("host2"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host2"), 9999, List("host2:9999", "host1:9999"))))
  }

  test("This node is not a seedNode - with alive Nodes") {
    val repo = new OurClusterNodeRepo(List(NodeInfo("akka.tcp://MobilityService@host1:9999", true), NodeInfo("akka.tcp://MobilityService@host2:9999", true)))
    assert(AkkaClusterConfig(Some("host3"), 9999, List("host1:9999", "host2:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host3"), 9999, List("host1:9999", "host2:9999"))))
  }

  test("This node is not a seedNode - with no alive Nodes") {
    val repo = new OurClusterNodeRepo(List())
    assert(AkkaClusterConfig(Some("host3"), 9999, List("host2:9999", "host1:9999")) ==
      SeedNodesListOrderingResolver.resolveSeedNodesList(repo, AkkaClusterConfig(Some("host3"), 9999, List("host2:9999", "host1:9999"))))
  }

  case class NodeInfo(host:String, joined:Boolean)

  class OurClusterNodeRepo(aliveClusterNodes:List[NodeInfo]) extends ClusterNodeRepo {
    // Writes to db that this clusterNode is alive
    override def writeClusterNodeAlive(nodeNameAndPort: String, timestamp: OffsetDateTime, joined:Boolean): Unit = {}

    override def removeClusterNodeAlive(nodeNameAndPort: String): Unit = {}

    // Returns list of all nodeNameAndPorts that has written it is alive since aliveAfter
    override def findAliveClusterNodes(clusterNodesAliveSinceCheck: FiniteDuration, onlyJoined:Boolean): List[String] = {
      if ( onlyJoined) {
        aliveClusterNodes.filter(_.joined).map(_.host)
      } else {
        aliveClusterNodes.map(_.host)
      }
    }
  }
}