java.time.ZoneOffset Scala Examples

The following examples show how to use java.time.ZoneOffset. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: DamlDates.scala    From daml   with Apache License 2.0 7 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.client.binding.encoding
import java.time.{LocalDate, ZoneOffset}

import com.daml.api.util.TimestampConversion
import com.daml.ledger.client.binding.{Primitive => P}
import scalaz.std.stream

object DamlDates {
  val Min: LocalDate = TimestampConversion.MIN.atZone(ZoneOffset.UTC).toLocalDate
  val Max: LocalDate = TimestampConversion.MAX.atZone(ZoneOffset.UTC).toLocalDate

  
  val RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate: (LocalDate, LocalDate) =
    (LocalDate.parse("1582-10-05"), LocalDate.parse("1582-10-14"))

  def localDatesWithoutInjectiveFunctionToSqlDate: Stream[LocalDate] =
    stream
      .unfold(RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate._1) { a: LocalDate =>
        if (!a.isAfter(RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate._2))
          Some((a, a.plusDays(1)))
        else None
      }

  def damlDatesWithoutInjectiveFunctionToSqlDate: Stream[P.Date] =
    localDatesWithoutInjectiveFunctionToSqlDate.map(pDate)

  private def pDate(d: LocalDate): P.Date =
    P.Date
      .fromLocalDate(d)
      .getOrElse(sys.error(s"expected `P.Date` friendly `LocalDate`, but got: $d"))
} 
Example 2
Source File: TestTagWriter.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.query

import java.nio.ByteBuffer
import java.time.{ LocalDateTime, ZoneOffset }
import java.util.UUID

import akka.actor.ActorSystem
import akka.persistence.PersistentRepr
import akka.persistence.cassandra.BucketSize
import akka.persistence.cassandra.EventsByTagSettings
import akka.persistence.cassandra.PluginSettings
import akka.persistence.cassandra.formatOffset
import akka.persistence.cassandra.journal._
import akka.serialization.Serialization
import akka.serialization.Serializers
import com.datastax.oss.driver.api.core.CqlSession
import com.datastax.oss.driver.api.core.uuid.Uuids

private[akka] trait TestTagWriter {
  def system: ActorSystem
  def cluster: CqlSession
  val serialization: Serialization
  val settings: PluginSettings
  final def journalSettings: JournalSettings = settings.journalSettings
  final def eventsByTagSettings: EventsByTagSettings = settings.eventsByTagSettings

  lazy val (preparedWriteTagMessage, preparedWriteTagMessageWithMeta) = {
    val writeStatements: CassandraJournalStatements = new CassandraJournalStatements(settings)
    (cluster.prepare(writeStatements.writeTags(false)), cluster.prepare(writeStatements.writeTags(true)))
  }

  def clearAllEvents(): Unit = {
    cluster.execute(s"truncate ${journalSettings.keyspace}.${eventsByTagSettings.tagTable.name}")
  }

  def writeTaggedEvent(
      time: LocalDateTime,
      pr: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      bucketSize: BucketSize): Unit = {
    val timestamp = time.toInstant(ZoneOffset.UTC).toEpochMilli
    write(pr, tags, tagPidSequenceNr, uuid(timestamp), bucketSize)
  }

  def writeTaggedEvent(
      persistent: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      bucketSize: BucketSize): Unit = {
    val nowUuid = Uuids.timeBased()
    write(persistent, tags, tagPidSequenceNr, nowUuid, bucketSize)
  }

  def writeTaggedEvent(
      persistent: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      uuid: UUID,
      bucketSize: BucketSize): Unit =
    write(persistent, tags, tagPidSequenceNr, uuid, bucketSize)

  private def write(
      pr: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      uuid: UUID,
      bucketSize: BucketSize): Unit = {
    val event = pr.payload.asInstanceOf[AnyRef]
    val serializer = serialization.findSerializerFor(event)
    val serialized = ByteBuffer.wrap(serialization.serialize(event).get)
    val serManifest = Serializers.manifestFor(serializer, pr)
    val timeBucket = TimeBucket(Uuids.unixTimestamp(uuid), bucketSize)

    tags.foreach(tag => {
      val bs = preparedWriteTagMessage
        .bind()
        .setString("tag_name", tag)
        .setLong("timebucket", timeBucket.key)
        .setUuid("timestamp", uuid)
        .setLong("tag_pid_sequence_nr", tagPidSequenceNr)
        .setByteBuffer("event", serialized)
        .setString("event_manifest", pr.manifest)
        .setString("persistence_id", pr.persistenceId)
        .setInt("ser_id", serializer.identifier)
        .setString("ser_manifest", serManifest)
        .setString("writer_uuid", "ManualWrite")
        .setLong("sequence_nr", pr.sequenceNr)
      cluster.execute(bs)
    })

    system.log.debug(
      "Written event: {} Uuid: {} Timebucket: {} TagPidSeqNr: {}",
      pr.payload,
      formatOffset(uuid),
      timeBucket,
      tagPidSequenceNr)
  }
} 
Example 3
Source File: License.scala    From iep-apps   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.PrintStream
import java.time.ZonedDateTime
import java.time.ZoneOffset
import scala.io.Source
import sbt._


  """.stripMargin.trim

  def findFiles(dir: File): Seq[File] = {
    (dir ** "*.scala").get ++ (dir ** "*.java").get
  }

  def checkLicenseHeaders(log: Logger, srcDir: File): Unit = {
    val badFiles = findFiles(srcDir).filterNot(checkLicenseHeader)
    if (badFiles.nonEmpty) {
      badFiles.foreach { f => log.error(s"bad license header: $f") }
      sys.error(s"${badFiles.size} files with incorrect header, run formatLicenseHeaders to fix")
    } else {
      log.info("all files have correct license header")
    }
  }

  def checkLicenseHeader(file: File): Boolean = {
    val lines = Source.fromFile(file, "UTF-8").getLines().toList
    checkLicenseHeader(lines)
  }

  def checkLicenseHeader(lines: List[String]): Boolean = {
    val header = lines.takeWhile(!_.startsWith("package ")).mkString(lineSeparator)
    header == apache2
  }

  def formatLicenseHeaders(log: Logger, srcDir: File): Unit = {
    findFiles(srcDir).foreach { f => formatLicenseHeader(log, f) }
  }

  def formatLicenseHeader(log: Logger, file: File): Unit = {
    val lines = Source.fromFile(file, "UTF-8").getLines().toList
    if (!checkLicenseHeader(lines)) {
      log.info(s"fixing license header: $file")
      writeLines(file, apache2 :: removeExistingHeader(lines))
    }
  }

  def removeExistingHeader(lines: List[String]): List[String] = {
    val res = lines.dropWhile(!_.startsWith("package "))
    if (res.isEmpty) lines else res
  }

  def writeLines(file: File, lines: List[String]): Unit = {
    val out = new PrintStream(file)
    try lines.foreach(out.println) finally out.close()
  }
} 
Example 4
Source File: EventsByTagPubsubSpec.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.query

import java.time.{ LocalDate, ZoneOffset }

import akka.cluster.Cluster
import akka.persistence.cassandra.CassandraSpec
import akka.persistence.cassandra.journal.JournalSettings
import akka.persistence.query.{ EventEnvelope, NoOffset }
import akka.stream.testkit.scaladsl.TestSink
import com.typesafe.config.ConfigFactory

import scala.concurrent.duration._

object EventsByTagPubsubSpec {
  val today = LocalDate.now(ZoneOffset.UTC)

  val config = ConfigFactory.parseString(s"""
    akka.actor.provider = "akka.cluster.ClusterActorRefProvider"
    akka.actor.serialize-messages = off
    akka.actor.serialize-creators = off
    akka.remote.netty.tcp.port = 0
    akka.remote.artery.canonical.port = 0
    akka.remote.netty.tcp.hostname = "127.0.0.1"
    akka.persistence.cassandra {
      
      query.refresh-interval = 10s

      events-by-tag {
        pubsub-notification = on
        flush-interval = 0ms
        eventual-consistency-delay = 0s
      }
    }
    """).withFallback(EventsByTagSpec.config)
}

class EventsByTagPubsubSpec extends CassandraSpec(EventsByTagPubsubSpec.config) {

  val journalSettings = new JournalSettings(system, system.settings.config.getConfig("akka.persistence.cassandra"))

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    Cluster(system).join(Cluster(system).selfAddress)
  }

  "Cassandra query getEventsByTag when running clustered with pubsub enabled" must {
    "present new events to an ongoing getEventsByTag stream long before polling would kick in" in {
      val actor = system.actorOf(TestActor.props("EventsByTagPubsubSpec_a"))

      val blackSrc = queries.eventsByTag(tag = "black", offset = NoOffset)
      val probe = blackSrc.runWith(TestSink.probe[Any])
      probe.request(2)
      probe.expectNoMessage(300.millis)

      actor ! "a black car"
      probe.within(5.seconds) { // long before refresh-interval, which is 10s
        probe.expectNextPF { case e @ EventEnvelope(_, _, _, "a black car") => e }
      }
    }
  }
} 
Example 5
Source File: Sessionize.scala    From ml-in-scala   with The Unlicense 5 votes vote down vote up
package org.akozlov.chapter06

import java.io._

import java.time.ZoneOffset
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter

import org.apache.spark.{SparkConf,SparkContext}
import org.apache.spark.storage.StorageLevel


object Sessionize extends App {
  val sc = new SparkContext("local[8]", "Sessionize", new SparkConf())

  val checkoutPattern = ".*>checkout.*".r.pattern

  // a basic page view structure
  case class PageView(ts: String, path: String) extends Serializable with Ordered[PageView] {
    override def toString: String = {
      s"($ts #$path)"
    }
    def compare(other: PageView) = ts compare other.ts
  }

  // represent a session
  case class Session[A  <: PageView](id: String, visits: Seq[A]) extends Serializable {
    override def toString: String = {
      val vsts = visits.mkString("[", ",", "]")
      s"($id -> $vsts)"
    }
  }

  def toEpochSeconds(str: String) = { LocalDateTime.parse(str, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")).toEpochSecond(ZoneOffset.UTC) }

  val sessions = sc.textFile("data/clickstream")
    .map(line => {val parts = line.split("\t"); (parts(4), new PageView(parts(0), parts(20)))})
    .groupByKey.map(x => { new Session(x._1, x._2.toSeq.sorted) } )
    .cache

  // sessions.take(100).foreach(println)

  def findAllCheckoutSessions(s: Session[PageView]) = {
    s.visits.tails.filter {
      _ match { case PageView(ts1, "mycompanycom>homepage") :: PageView(ts2, page) :: tail if (page != "mycompanycom>homepage" ) => true; case _ => false }
    }
    .foldLeft(Seq[Session[PageView]]()) {
      case (r, x) => {
        x.find(y => checkoutPattern.matcher(y.path).matches) match {
          case Some(checkout) if (toEpochSeconds(checkout.ts) > toEpochSeconds(x.head.ts) + 60) => r.:+(new Session(s.id, x.slice(0, x.indexOf(checkout))))
          case _ => r
        }
      }
    }
  }

  val prodLandingSessions = sessions.flatMap(findAllCheckoutSessions)

  prodLandingSessions.collect.foreach(println)

  sc.stop()
} 
Example 6
Source File: data.scala    From scalalaz-gen   with Apache License 2.0 5 votes vote down vote up
package ru.scalalaz.gen

import java.nio.file.Path
import java.time.format.DateTimeFormatter
import java.time.{ LocalDate, ZoneOffset }

import knockoff.DefaultDiscounter._
import _root_.knockoff._


case class EpisodeSettings(title: String,
                           description: String,
                           audio: Enclosure,
                           page: String,
                           date: LocalDate) {

  def RFCDate: String = {
    val dateTime = date.atStartOfDay().atOffset(ZoneOffset.UTC)
    dateTime.format(DateTimeFormatter.RFC_1123_DATE_TIME)
  }

  def ISODate: String = {
    date.format(DateTimeFormatter.ISO_DATE)
  }
}

case class Episode(settings: EpisodeSettings, content: String) {

  def title: String = settings.title

  def asHtml: String = {
    val blocks = knockoff(content)
    toXHTML(blocks).mkString
  }

}
case class EpisodeFile(path: Path, episode: Episode)

case class SpecialPageSettings(title: String, date: LocalDate) {
  def ISODate: String = {
    date.format(DateTimeFormatter.ISO_DATE)
  }
}

case class Page(settings: SpecialPageSettings, content: String) {

  def title: String = settings.title

  def asHtml: String = {
    val blocks = knockoff(content)
    toXHTML(blocks).mkString
  }

}
case class PageFile(path: Path, page: Page) 
Example 7
Source File: PostgresEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.ndbc

import java.time.{ LocalDate, LocalDateTime, ZoneOffset }
import java.util.{ Date, UUID }

import io.getquill.dsl.CoreDsl
import io.trane.ndbc.PostgresPreparedStatement

import scala.language.implicitConversions
import scala.reflect.ClassTag

trait LowPriorityPostgresImplicits {
  this: CoreDsl =>

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: BaseEncoder[O]): BaseEncoder[I] =
    mappedBaseEncoder(mapped, e)
}

trait PostgresEncoders extends LowPriorityPostgresImplicits with io.getquill.dsl.LowPriorityImplicits {
  this: NdbcContext[_, _, PostgresPreparedStatement, _] =>

  type Encoder[T] = BaseEncoder[T]

  protected val zoneOffset: ZoneOffset

  def encoder[T, U](f: PostgresPreparedStatement => (Int, U) => PostgresPreparedStatement)(implicit ev: T => U): Encoder[T] =
    (idx, v, ps) =>
      if (v == null) ps.setNull(idx)
      else f(ps)(idx, v)

  def arrayEncoder[T, U: ClassTag, Col <: Seq[T]](f: PostgresPreparedStatement => (Int, Array[U]) => PostgresPreparedStatement)(ev: T => U): Encoder[Col] =
    (idx, v, ps) =>
      if (v == null) ps.setNull(idx)
      else f(ps)(idx, v.map(ev).toArray[U])

  implicit override def anyValMappedEncoder[I <: AnyVal, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] = mappedEncoder

  implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] =
    (idx, v, ps) =>
      if (v == null) ps.setNull(idx)
      else v match {
        case None    => ps.setNull(idx)
        case Some(v) => e(idx, v, ps)
      }

  implicit def toLocalDateTime(d: Date) = LocalDateTime.ofInstant(d.toInstant(), zoneOffset)

  implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUUID)
  implicit val stringEncoder: Encoder[String] = encoder(_.setString)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(_.setBigDecimal)(_.bigDecimal)
  implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBoolean)
  implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte)
  implicit val shortEncoder: Encoder[Short] = encoder(_.setShort)
  implicit val intEncoder: Encoder[Int] = encoder(_.setInteger)
  implicit val longEncoder: Encoder[Long] = encoder(_.setLong)
  implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat)
  implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(_.setByteArray)
  implicit val dateEncoder: Encoder[Date] = encoder(_.setLocalDateTime)
  implicit val localDateEncoder: Encoder[LocalDate] = encoder(_.setLocalDate)
  implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(_.setLocalDateTime)

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayEncoder[String, String, Col](_.setStringArray)(identity)
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, java.math.BigDecimal, Col](_.setBigDecimalArray)(_.bigDecimal)
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayEncoder[Boolean, java.lang.Boolean, Col](_.setBooleanArray)(_.booleanValue)
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayEncoder[Byte, java.lang.Short, Col](_.setShortArray)(identity)
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayEncoder[Short, java.lang.Short, Col](_.setShortArray)(_.shortValue)
  implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayEncoder[Int, java.lang.Integer, Col](_.setIntegerArray)(_.intValue)
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayEncoder[Long, java.lang.Long, Col](_.setLongArray)(_.longValue)
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayEncoder[Float, java.lang.Float, Col](_.setFloatArray)(_.floatValue)
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayEncoder[Double, java.lang.Double, Col](_.setDoubleArray)(_.doubleValue)
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayEncoder[Date, LocalDateTime, Col](_.setLocalDateTimeArray)(identity)
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, LocalDate, Col](_.setLocalDateArray)(identity)
} 
Example 8
Source File: ScheduleBucket.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.schedule

import java.time.{ LocalDateTime, ZoneOffset }

import aecor.encoding.WireProtocol
import aecor.macros.boopickle.BoopickleWireProtocol
import cats.tagless.{ Derive, FunctorK }

private[aecor] trait ScheduleBucket[F[_]] {
  def addScheduleEntry(entryId: String, correlationId: String, dueDate: LocalDateTime): F[Unit]

  def fireEntry(entryId: String): F[Unit]
}

private[aecor] object ScheduleBucket {
  import boopickle.Default._
  implicit val localDateTimePickler: Pickler[LocalDateTime] = transformPickler(
    (ldt: (Long, Int)) => LocalDateTime.ofEpochSecond(ldt._1, ldt._2, ZoneOffset.UTC)
  )((ldt: LocalDateTime) => (ldt.toEpochSecond(ZoneOffset.UTC), ldt.getNano))

  implicit def functorK: FunctorK[ScheduleBucket] = Derive.functorK
  implicit def wireProtocol: WireProtocol[ScheduleBucket] = BoopickleWireProtocol.derive
} 
Example 9
Source File: MistLoggingSpec.scala    From mist   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.mist.core.logging

import java.time.format.DateTimeFormatter
import java.time.{LocalDateTime, ZoneOffset}

import org.scalatest.{FunSpec, Matchers}

class MistLoggingSpec extends FunSpec with Matchers {

  describe("levels") {
    it("should restore level from int") {
      Level.fromInt(1) shouldBe Level.Debug
      Level.fromInt(2) shouldBe Level.Info
      Level.fromInt(3) shouldBe Level.Warn
      Level.fromInt(4) shouldBe Level.Error
    }
  }

  describe("log event") {

    it("should have correct format") {
      val ts = LocalDateTime.now(ZoneOffset.UTC)
      val e = LogEvent.mkInfo("job-id", "Message", ts.toInstant(ZoneOffset.UTC).toEpochMilli)

      val expectedDate = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(ts)
      val expected = s"INFO $expectedDate [job-id] Message"

      e.mkString shouldBe expected
    }

    it("should have stack traces") {
      val ts = LocalDateTime.now(ZoneOffset.UTC)
      val error = new RuntimeException("Test error")
      val e = LogEvent.mkError("job-id", "Error", Some(error), ts.toInstant(ZoneOffset.UTC).toEpochMilli)

      val expectedDate = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(ts)
      val expected =
        s"""ERROR $expectedDate [job-id] Error
            |java.lang.RuntimeException: Test error""".stripMargin

      e.mkString should startWith(expected)
    }
  }
} 
Example 10
Source File: CustomScalars.scala    From graphql-gateway   with Apache License 2.0 5 votes vote down vote up
package sangria.gateway.schema

import java.time.format.DateTimeFormatter
import java.time.{Instant, OffsetDateTime, ZoneOffset, ZonedDateTime}

import sangria.schema._
import sangria.ast
import sangria.validation.ValueCoercionViolation

import scala.util.{Failure, Success, Try}

object CustomScalars {
  implicit val DateTimeType = ScalarType[ZonedDateTime]("DateTime",
    description = Some("DateTime is a scalar value that represents an ISO8601 formatted date and time."),
    coerceOutput = (date, _) ⇒ DateTimeFormatter.ISO_INSTANT.format(date),
    coerceUserInput = {
      case s: String ⇒ parseDateTime(s) match {
        case Success(date) ⇒ Right(date)
        case Failure(_) ⇒ Left(DateCoercionViolation)
      }
      case _ ⇒ Left(DateCoercionViolation)
    },
    coerceInput = {
      case ast.StringValue(s, _, _, _, _) ⇒ parseDateTime(s) match {
        case Success(date) ⇒ Right(date)
        case Failure(_) ⇒ Left(DateCoercionViolation)
      }
      case _ ⇒ Left(DateCoercionViolation)
    })

  def parseDateTime(s: String) = Try(DateTimeFormatter.ISO_ZONED_DATE_TIME.parse(s).asInstanceOf[ZonedDateTime])

  case object DateCoercionViolation extends ValueCoercionViolation("Date value expected")
} 
Example 11
Source File: JobRunMarshallerTest.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package repository.impl.kv.marshaller

import java.time.{LocalDateTime, ZoneOffset}

import dcos.metronome.model._
import org.scalatest.{FunSuite, Matchers}

import scala.concurrent.duration._

class JobRunMarshallerTest extends FunSuite with Matchers {
  test("round-trip of a JobRun") {
    val f = new Fixture
    JobRunMarshaller.fromBytes(JobRunMarshaller.toBytes(f.jobRun)) should be(Some(f.jobRun))
  }

  test("unmarshal with invalid proto data should return None") {
    val invalidBytes = "foobar".getBytes
    JobRunMarshaller.fromBytes(invalidBytes.to[IndexedSeq]) should be(None)
  }

  class Fixture {
    val jobSpec = JobSpec(JobId("job.id"))

    val jobRun = JobRun(
      JobRunId(jobSpec.id, "run.id"),
      jobSpec,
      JobRunStatus.Active,
      LocalDateTime.parse("2004-09-06T08:50:12.000").toInstant(ZoneOffset.UTC),
      Some(LocalDateTime.parse("2004-09-06T08:50:12.000").toInstant(ZoneOffset.UTC)),
      Some(1 minute),
      Map.empty
    )
  }
} 
Example 12
Source File: JobHistoryMarshallerTest.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome
package repository.impl.kv.marshaller

import java.time.{LocalDateTime, ZoneOffset}

import dcos.metronome.model._
import mesosphere.marathon.core.task.Task
import org.scalatest.{FunSuite, Matchers}

class JobHistoryMarshallerTest extends FunSuite with Matchers {
  test("round-trip of a JobHistory") {
    val f = new Fixture
    JobHistoryMarshaller.fromBytes(JobHistoryMarshaller.toBytes(f.jobHistory)) should be(Some(f.jobHistory))
  }

  test("unmarshal with invalid proto data should return None") {
    val invalidBytes = "foobar".getBytes
    JobHistoryMarshaller.fromBytes(invalidBytes.to[IndexedSeq]) should be(None)
  }

  class Fixture {
    val successfulJobRunInfo = JobRunInfo(
      JobRunId(JobId("/test"), "successful"),
      LocalDateTime.parse("2004-09-06T08:50:12.000").toInstant(ZoneOffset.UTC),
      LocalDateTime.parse("2014-09-06T08:50:12.000").toInstant(ZoneOffset.UTC),
      tasks = Seq(Task.Id("test_finished.77a7bc7d-4429-11e9-969f-3a74960279c0"))
    )

    val finishedJobRunInfo = JobRunInfo(
      JobRunId(JobId("/test"), "finished"),
      LocalDateTime.parse("1984-09-06T08:50:12.000").toInstant(ZoneOffset.UTC),
      LocalDateTime.parse("1994-09-06T08:50:12.000").toInstant(ZoneOffset.UTC),
      tasks = Seq(Task.Id("test_finished.77a7bc7d-4429-11e9-969f-3a74960279c0"))
    )

    val jobHistory = JobHistory(
      JobId("/my/wonderful/job"),
      successCount = 1337,
      failureCount = 31337,
      lastSuccessAt = Some(LocalDateTime.parse("2014-09-06T08:50:12.000").toInstant(ZoneOffset.UTC)),
      lastFailureAt = Some(LocalDateTime.parse("2014-09-06T07:50:12.000").toInstant(ZoneOffset.UTC)),
      successfulRuns = Seq(successfulJobRunInfo),
      failedRuns = Seq(finishedJobRunInfo)
    )
  }
} 
Example 13
Source File: SettableClock.scala    From metronome   with Apache License 2.0 5 votes vote down vote up
package dcos.metronome

import java.time.{Clock, Instant, LocalDateTime, ZoneOffset, ZoneId, Duration}

import scala.concurrent.duration.FiniteDuration

object SettableClock {
  private val defaultJavaClock =
    Clock.fixed(LocalDateTime.of(2015, 4, 9, 12, 30, 0).toInstant(ZoneOffset.UTC), ZoneOffset.UTC)

  def ofNow() = new SettableClock(Clock.fixed(Instant.now(), ZoneOffset.UTC))
}

class SettableClock(private[this] var clock: Clock = SettableClock.defaultJavaClock) extends Clock {
  private[this] var subscribers: List[() => Unit] = Nil
  def onChange(fn: () => Unit): Unit =
    synchronized {
      subscribers = fn :: subscribers
    }

  override def getZone: ZoneId = clock.getZone

  override def instant(): Instant = clock.instant()

  override def withZone(zoneId: ZoneId): Clock = new SettableClock(clock.withZone(zoneId))

  def +=(duration: FiniteDuration): Unit = plus(duration)

  def plus(duration: FiniteDuration): this.type =
    plus(Duration.ofMillis(duration.toMillis))

  def plus(duration: Duration): this.type = {
    clock = Clock.offset(clock, duration)
    subscribers.foreach(_())
    this
  }

  def at(instant: Instant): this.type = {
    clock = Clock.fixed(instant, clock.getZone)
    subscribers.foreach(_())
    this
  }
} 
Example 14
Source File: Executor.scala    From neo4j-spark-connector   with Apache License 2.0 5 votes vote down vote up
package org.neo4j.spark

import java.time.{LocalDate, LocalDateTime, OffsetTime, ZoneOffset, ZonedDateTime}
import java.util
import java.sql.Timestamp

import org.apache.spark.SparkContext
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types.StructType
import org.neo4j.spark.dataframe.CypherTypes
import org.neo4j.spark.utils.{Neo4jSessionAwareIterator, Neo4jUtils}

import scala.collection.JavaConverters._


object Executor {

  def convert(value: AnyRef): Any = value match {
    case it: util.Collection[_] => it.toArray()
    case m: java.util.Map[_,_] => m.asScala
    case _ => Neo4jUtils.convert(value)
  }

  def toJava(parameters: Map[String, Any]): java.util.Map[String, Object] = {
    parameters.mapValues(toJava).asJava
  }

  private def toJava(x: Any): AnyRef = x match {
    case y: Seq[_] => y.asJava
    case _ => x.asInstanceOf[AnyRef]
  }

  val EMPTY = Array.empty[Any]

  val EMPTY_RESULT = new CypherResult(new StructType(), Iterator.empty)

  class CypherResult(val schema: StructType, val rows: Iterator[Array[Any]]) {
    def sparkRows: Iterator[Row] = rows.map(row => new GenericRowWithSchema(row, schema))

    def fields = schema.fieldNames
  }

  def execute(sc: SparkContext, query: String, parameters: Map[String, AnyRef]): CypherResult = {
    execute(Neo4jConfig(sc.getConf), query, parameters)
  }

  private def rows(result: Iterator[_]) = {
    var i = 0
    while (result.hasNext) i = i + 1
    i
  }

  def execute(config: Neo4jConfig, query: String, parameters: Map[String, Any], write: Boolean = false): CypherResult = {
    val result = new Neo4jSessionAwareIterator(config, query, toJava(parameters), write)
    if (!result.hasNext) {
      return EMPTY_RESULT
    }
    val peek = result.peek()
    val keyCount = peek.size()
    if (keyCount == 0) {
      return new CypherResult(new StructType(), Array.fill[Array[Any]](rows(result))(EMPTY).toIterator)
    }
    val keys = peek.keys().asScala
    val fields = keys.map(k => (k, peek.get(k).`type`())).map(keyType => CypherTypes.field(keyType))
    val schema = StructType(fields)
    val it = result.map(record => {
      val row = new Array[Any](keyCount)
      var i = 0
      while (i < keyCount) {
        val value = convert(record.get(i).asObject())
        row.update(i, value)
        i = i + 1
      }
      row
    })
    new CypherResult(schema, it)
  }
} 
Example 15
Source File: SparkPredictionTrainer.scala    From smart-meter   with MIT License 5 votes vote down vote up
package com.logimethods.nats.connector.spark.app

import java.util.Properties;
import java.io.File
import java.io.Serializable

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming._

import io.nats.client.ConnectionFactory._
import java.nio.ByteBuffer

import org.apache.log4j.{Level, LogManager, PropertyConfigurator}

import com.logimethods.connector.nats.to_spark._
import com.logimethods.scala.connector.spark.to_nats._

import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator

import java.util.function._

import java.time.{LocalDateTime, ZoneOffset}
import java.time.DayOfWeek._

import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel

object SparkPredictionTrainer extends App with SparkPredictionProcessor {
  log.setLevel(Level.WARN)

  val (properties, targets, logLevel, sc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl) = setup(args)

  val streamingDuration = scala.util.Properties.envOrElse("STREAMING_DURATION", "2000").toInt
  println("STREAMING_DURATION = " + streamingDuration)

  new Thread(new Runnable {
              def run() {
                 while( true ){
                   try {
                     val data = SparkPredictionProcessor.getData(sc, THRESHOLD)
                     val model = trainer.fit(data)
                     model.write.overwrite.save(PREDICTION_MODEL_PATH)
                     println("New model of size " + data.count() + " trained: " + model.uid)
                     Thread.sleep(streamingDuration)
                   } catch {
                     case e: Throwable => log.error(e)
                   }
                 }
              }
             }).start()
} 
Example 16
Source File: SparkProcessor.scala    From smart-meter   with MIT License 5 votes vote down vote up
package com.logimethods.nats.connector.spark.app

import java.util.Properties;
import java.io.File
import java.io.Serializable

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming._

import io.nats.client.Nats._
import io.nats.client.ConnectionFactory._
import java.nio.ByteBuffer

import org.apache.log4j.{Level, LogManager, PropertyConfigurator}

import com.logimethods.connector.nats.to_spark._
import com.logimethods.scala.connector.spark.to_nats._

import java.util.function._

import java.time.{LocalDateTime, ZoneOffset}

trait SparkProcessor {
  def setup(args: Array[String]) = {
    val inputSubject = args(0)
//    val inputNatsStreaming = inputSubject.toUpperCase.contains("STREAMING")
    val outputSubject = args(1)
//    val outputNatsStreaming = outputSubject.toUpperCase.contains("STREAMING")
    println("Will process messages from '" + inputSubject + "' to '" + outputSubject + "'")

    val logLevel = scala.util.Properties.envOrElse("LOG_LEVEL", "INFO")
    println("LOG_LEVEL = " + logLevel)

    val targets = scala.util.Properties.envOrElse("TARGETS", "ALL")
    println("TARGETS = " + targets)

    val cassandraUrl = System.getenv("CASSANDRA_URL")
    println("CASSANDRA_URL = " + cassandraUrl)

    val sparkMasterUrl = System.getenv("SPARK_MASTER_URL")
    println("SPARK_MASTER_URL = " + sparkMasterUrl)

    val sparkCoresMax = System.getenv("SPARK_CORES_MAX")
    println("SPARK_CORES_MAX = " + sparkCoresMax)

    val conf = new SparkConf()
                  .setAppName(args(2))
                  .setMaster(sparkMasterUrl)
                  .set("spark.cores.max", sparkCoresMax)
                  .set("spark.cassandra.connection.host", cassandraUrl);
    val sc = new SparkContext(conf);

//    val streamingDuration = scala.util.Properties.envOrElse("STREAMING_DURATION", "2000").toInt
//    val ssc = new StreamingContext(sc, new Duration(streamingDuration));
///    ssc.checkpoint("/spark/storage")

    val properties = new Properties();
    val natsUrl = System.getenv("NATS_URI")
    println("NATS_URI = " + natsUrl)
    properties.put("servers", natsUrl)
    properties.put(PROP_URL, natsUrl)

    val clusterId = System.getenv("NATS_CLUSTER_ID")

    val inputNatsStreaming = inputSubject.toUpperCase.contains("STREAMING")
    val outputNatsStreaming = outputSubject.toUpperCase.contains("STREAMING")

    (properties, targets, logLevel, sc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl)
  }

  def dataDecoder: Array[Byte] => Tuple2[Long,Float] = bytes => {
        val buffer = ByteBuffer.wrap(bytes);
        val epoch = buffer.getLong()
        val value = buffer.getFloat()
        (epoch, value)
      }
}


trait SparkStreamingProcessor extends SparkProcessor {
  def setupStreaming(args: Array[String]) = {
    val (properties, target, logLevel, sc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl) = setup(args)

    val streamingDuration = scala.util.Properties.envOrElse("STREAMING_DURATION", "2000").toInt
    println("STREAMING_DURATION = " + streamingDuration)

    val ssc = new StreamingContext(sc, new Duration(streamingDuration));
//    ssc.checkpoint("/spark/storage")

    (properties, target, logLevel, sc, ssc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl, streamingDuration)
  }
} 
Example 17
Source File: SparkTemperatureProcessor.scala    From smart-meter   with MIT License 5 votes vote down vote up
package com.logimethods.nats.connector.spark.app

import java.util.Properties;
import java.io.File
import java.io.Serializable

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming._
import com.datastax.spark.connector.streaming._
import com.datastax.spark.connector.SomeColumns

import io.nats.client.ConnectionFactory._
import java.nio.ByteBuffer

import org.apache.log4j.{Level, LogManager, PropertyConfigurator}

import com.logimethods.connector.nats.to_spark._
import com.logimethods.scala.connector.spark.to_nats._

import java.util.function._

import java.time.{LocalDateTime, ZoneOffset}

object SparkTemperatureProcessor extends App with SparkStreamingProcessor {
  val log = LogManager.getRootLogger
  log.setLevel(Level.WARN)

  val (properties, target, logLevel, sc, ssc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl, streamingDuration) =
    setupStreaming(args)

  // Temperatures //

  val temperatures =
    if (inputNatsStreaming) {
      NatsToSparkConnector
        .receiveFromNatsStreaming(classOf[Tuple2[Long,Float]], StorageLevel.MEMORY_ONLY, clusterId)
        .withNatsURL(natsUrl)
        .withSubjects(inputSubject)
        .withDataDecoder(dataDecoder)
        .asStreamOf(ssc)
    } else {
      NatsToSparkConnector
        .receiveFromNats(classOf[Tuple2[Long,Float]], StorageLevel.MEMORY_ONLY)
        .withProperties(properties)
        .withSubjects(inputSubject)
        .withDataDecoder(dataDecoder)
        .asStreamOf(ssc)
    }

  // Ideally, should be the AVG
  val singleTemperature = temperatures.reduceByKey(Math.max(_,_))

  if (logLevel.contains("TEMPERATURE")) {
    singleTemperature.print()
  }

  singleTemperature.saveToCassandra("smartmeter", "temperature")

  val temperatureReport = singleTemperature.map({case (epoch, temperature) => (s"""{"epoch": $epoch, "temperature": $temperature}""") })
  SparkToNatsConnectorPool.newPool()
                      .withProperties(properties)
                      .withSubjects(outputSubject) // "smartmeter.extract.temperature"
                      .publishToNats(temperatureReport)

  // Start //
  ssc.start();

  ssc.awaitTermination()
} 
Example 18
Source File: BloombergFieldValueFn.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.bloomberg

import java.time.format.DateTimeFormatter
import java.time.{LocalDate, OffsetTime, ZoneOffset}

import com.bloomberglp.blpapi.{Datetime, Element}

import scala.collection.JavaConverters._

 =>
        // For SEQUENCE, iterate through each element and we need case (map, e) instead of `element` of existing sequence element.
        //element.elementIterator().asScala.foldLeft(new java.util.LinkedHashMap[String, Any]) { case (map, `element`) =>
        element.elementIterator().asScala.foldLeft(new java.util.LinkedHashMap[String, Any]) { case (map, e) =>
          map.put(e.name().toString, BloombergFieldValueFn(e))
          map
        } //needs to be a java map because of json serialization

      case _ =>
        if (element.isArray) {
          (0 until element.numValues()).map { i =>
            BloombergFieldValueFn(element.getValueAsElement(i))
          }.asJava
        }
        else {
          element.toString
        }
    }
  }

  def offsetDateTime(dt: Datetime): String = {
    val offsetSeconds = if (dt.hasParts(Datetime.TIME_ZONE_OFFSET)) {
      dt.timezoneOffsetMinutes() * 60
    } else {
      0
    }
    val offset = ZoneOffset.ofTotalSeconds(offsetSeconds)
    OffsetTime.of(dt.hour(), dt.minute(), dt.second(), dt.nanosecond(), offset).toString
  }

  def localDate(dt: Datetime): String = {
    LocalDate.of(dt.year(), dt.month(), dt.dayOfMonth()).format(datetimeFormatter)
  }
} 
Example 19
Source File: Rfc3339UtilTest.scala    From api-first-hand   with MIT License 5 votes vote down vote up
package de.zalando.play.controllers

import java.time.{ LocalDateTime, ZoneId, ZoneOffset, ZonedDateTime }

import org.scalatest.{ FunSpec, MustMatchers }


class Rfc3339UtilTest extends FunSpec with MustMatchers {

  val dtz = ZoneId.of("UTC")
  val offset = ZoneOffset.UTC
  //noinspection ScalaStyle
  val date = ZonedDateTime.of(LocalDateTime.ofEpochSecond(1451911387L, 0, offset), dtz)

  describe("Rfc3339UtilTest") {

    it("should parse RFC3339 DateTime") {
      Rfc3339Util.parseDateTime("2007-05-01T15:43:26-00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26Z[UTC]"
      Rfc3339Util.parseDateTime("2007-05-01T15:43:26+00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26Z[UTC]"
      Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452-01:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T16:43:26.345200Z[UTC]"
      Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+01:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T14:43:26.345200Z[UTC]"
      Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26.345200Z[UTC]"
    }
    it("should parse RFC3339 Date") {
      Rfc3339Util.parseDate("2007-05-01").toString mustBe "2007-05-01"
      Rfc3339Util.parseDate("2008-05-01").toString mustBe "2008-05-01"
      Rfc3339Util.parseDate("2007-08-01").toString mustBe "2007-08-01"
      Rfc3339Util.parseDate("2007-05-08").toString mustBe "2007-05-08"
    }
    it("should write DateTime") {
      Rfc3339Util.writeDateTime(date) mustBe "2016-01-04T12:43:07.0000+0000"
    }
    it("should write Date") {
      Rfc3339Util.writeDate(date.toLocalDate) mustBe "2016-01-04"
    }
  }
} 
Example 20
Source File: CronExpression.scala    From cuttle   with Apache License 2.0 5 votes vote down vote up
package com.criteo.cuttle.cron

import cron4s.Cron
import cron4s.lib.javatime._
import java.time.{Duration, Instant, ZoneId, ZoneOffset}
import java.time.temporal.ChronoUnit

import io.circe.{Encoder, Json}
import io.circe.syntax._

import java.time.ZoneOffset
import scala.concurrent.duration._


case class CronExpression(cronExpression: String, tz: ZoneId = ZoneOffset.UTC) {

  // https://www.baeldung.com/cron-expressions
  // https://www.freeformatter.com/cron-expression-generator-quartz.html
  private val cronExpr = Cron.unsafeParse(cronExpression)

  private def toZonedDateTime(instant: Instant) =
    instant.atZone(tz)

  def nextEvent(): Option[ScheduledAt] = {
    val instant = Instant.now()
    cronExpr.next(toZonedDateTime(instant)).map { next =>
      // add 1 second as between doesn't include the end of the interval
      val delay = Duration.between(instant, next).get(ChronoUnit.SECONDS).seconds.plus(1.second)
      ScheduledAt(next.toInstant, delay)
    }
  }
}

object CronExpression {
  implicit val encodeUser: Encoder[CronExpression] = new Encoder[CronExpression] {
    override def apply(cronExpression: CronExpression) =
      Json.obj("expression" -> cronExpression.cronExpression.asJson, "tz" -> cronExpression.tz.getId.asJson)
  }
} 
Example 21
Source File: DatabaseITest.scala    From cuttle   with Apache License 2.0 5 votes vote down vote up
package com.criteo.cuttle

import java.time.{Instant, LocalDateTime, ZoneOffset}
import java.time.temporal.ChronoUnit

import scala.concurrent.Future

import cats.effect.IO
import doobie.implicits._
import doobie.scalatest.IOChecker

import com.criteo.cuttle.Auth.User

class DatabaseITest extends DatabaseSuite with IOChecker with TestScheduling {
  val dbConfig = DatabaseConfig(
    Seq(DBLocation("localhost", 3388)),
    dbName,
    "root",
    ""
  )

  // IOChecker needs a transactor for performing its queries
  override val transactor: doobie.Transactor[IO] = Database.newHikariTransactor(dbConfig).allocated.unsafeRunSync()._1

  test("should establish the connection and instanciate a trasactor") {
    assert(Database.connect(dbConfig).isInstanceOf[doobie.Transactor[IO]])
  }

  test("should validate getPausedJobIdsQuery") {
    Database.reset()
    Database.connect(dbConfig)
    check(queries.getPausedJobIdsQuery)
  }

  test("should validate paused jobs queries") {
    Database.reset()
    val xa = Database.connect(dbConfig)
    val id = "id1"
    val job = Job(id, testScheduling) { _ =>
      Future.successful(Completed)
    }

    val pausedJob = PausedJob(job.id, User("user1"), Instant.now().truncatedTo(ChronoUnit.SECONDS))

    assert(queries.pauseJob(pausedJob).transact(xa).unsafeRunSync() == 1)
    assert(queries.getPausedJobs.transact(xa).unsafeRunSync() == Seq(pausedJob))
  }

  test("paused_jobs migration(1) should set default values for old pauses") {
    Database.reset()

    Database.schemaEvolutions.head.transact(transactor).unsafeRunSync()
    sql"INSERT INTO paused_jobs VALUES ('1')".update.run.transact(transactor).unsafeRunSync()
    val id = sql"SELECT * FROM paused_jobs".query[String].unique.transact(transactor).unsafeRunSync()
    assert(id == "1")

    Database.schemaEvolutions(1).transact(transactor).unsafeRunSync()

    val pausedJob = sql"SELECT * FROM paused_jobs".query[PausedJob].unique.transact(transactor).unsafeRunSync()
    assert(pausedJob.id == "1")
    assert(pausedJob.user == User("not defined user"))
    assert(pausedJob.date == LocalDateTime.parse("1991-11-01T15:42:00").toInstant(ZoneOffset.UTC))
  }

  test("we should be able to retrieve finished executions") {
    Database.reset()

    Database.doSchemaUpdates.transact(transactor).unsafeRunSync()
    val ctx = TestContext()
    val date = Some(Instant.now())
    val el = ExecutionLog("id", "hello", date, date, ctx.asJson, ExecutionStatus.ExecutionSuccessful, None, 10)
    (0 to 100).foreach { i =>
      queries.logExecution(el.copy(s"${el.id}-$i"), ctx.logIntoDatabase).transact(transactor).unsafeRunSync()
      val executionLogSize = queries.getExecutionLogSize(Set("hello")).transact(transactor).unsafeRunSync()
      assert(executionLogSize == i + 1)
    }
  }
} 
Example 22
Source File: SimpleTimeServiceBackendSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.apiserver

import java.time.{Instant, ZoneOffset, ZonedDateTime}

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpec}

class SimpleTimeServiceBackendSpec extends WordSpec with Matchers with ScalaFutures {
  "a simple time service backend" should {
    "return the time it started with" in {
      val timeService = TimeServiceBackend.simple(instantAt(month = 1))
      timeService.getCurrentTime should be(instantAt(month = 1))
    }

    "update the time to a new time" in {
      val timeService = TimeServiceBackend.simple(instantAt(month = 1))
      timeService.setCurrentTime(instantAt(month = 1), instantAt(month = 2))
      timeService.getCurrentTime should be(instantAt(month = 2))
    }

    "not allow the time to be updated without a correct expected time" in {
      val timeService = TimeServiceBackend.simple(instantAt(month = 1))
      whenReady(timeService.setCurrentTime(instantAt(month = 1), instantAt(month = 2))) {
        _ should be(true)
      }
      whenReady(timeService.setCurrentTime(instantAt(month = 1), instantAt(month = 3))) {
        _ should be(false)
      }
      timeService.getCurrentTime should be(instantAt(month = 2))
    }
  }

  // always construct new instants to avoid sharing references, which would allow us to cheat when
  // comparing them inside the SimpleTimeServiceBackend
  private def instantAt(month: Int): Instant =
    ZonedDateTime.of(2020, month, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant
} 
Example 23
Source File: InstantModule.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.dataformats

import java.time.format.{DateTimeFormatter, DateTimeParseException}
import java.time.temporal.{TemporalAccessor, TemporalQuery}
import java.time.{Instant, LocalDateTime, ZoneOffset}

import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer}



class InstantModule extends SimpleModule {
  this.addDeserializer[Instant](classOf[Instant], new MilanInstantDeserializer)
}


class MilanInstantDeserializer extends JsonDeserializer[Instant] {
  private val formatsToTry = List(
    DateTimeFormatter.ISO_INSTANT,
    DateTimeFormatter.ISO_DATE_TIME,
    DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"),
    DateTimeFormatter.ISO_DATE)

  override def deserialize(parser: JsonParser, context: DeserializationContext): Instant = {
    val textValue = parser.getText
    this.parseInstant(textValue)
  }

  private val createInstant = new TemporalQuery[Instant] {
    override def queryFrom(temporal: TemporalAccessor): Instant = LocalDateTime.from(temporal).toInstant(ZoneOffset.UTC)
  }


  private def parseInstant(dateTimeString: String): Instant = {
    // Try a bunch of formats.
    // TODO: This is awful but will do for now.
    formatsToTry.map(formatter => this.tryParseFormat(dateTimeString, formatter))
      .filter(_.isDefined)
      .map(_.get)
      .headOption match {
      case Some(instant) =>
        instant

      case None =>
        throw new DateTimeParseException(s"Unable to parse datetime string '$dateTimeString'.", dateTimeString, 0)
    }
  }

  private def tryParseFormat(dateTimeString: String,
                             formatter: DateTimeFormatter): Option[Instant] = {
    try {
      Some(formatter.parse(dateTimeString, this.createInstant))
    }
    catch {
      case _: DateTimeParseException =>
        None
    }
  }
} 
Example 24
Source File: Transaction.scala    From Scala-Programming-Projects   with MIT License 5 votes vote down vote up
package coinyser

import java.sql.{Date, Timestamp}
import java.time.ZoneOffset

case class Transaction(timestamp: Timestamp,
                       date: Date,
                       tid: Int,
                       price: Double,
                       sell: Boolean,
                       amount: Double)


object Transaction {
  def apply(timestamp: Timestamp,
            tid: Int,
            price: Double,
            sell: Boolean,
            amount: Double) =
    new Transaction(
      timestamp = timestamp,
      date = Date.valueOf(
        timestamp.toInstant.atOffset(ZoneOffset.UTC).toLocalDate),
      tid = tid,
      price = price,
      sell = sell,
      amount = amount)
} 
Example 25
Source File: WriteAndReadGenericApp.scala    From parquet4s   with MIT License 5 votes vote down vote up
package com.github.mjakubowski84.parquet4s.core

import java.time.{LocalDate, ZoneOffset}
import java.util.TimeZone

import com.github.mjakubowski84.parquet4s.{ParquetReader, ParquetWriter, RowParquetRecord, ValueCodecConfiguration}
import com.google.common.io.Files
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.{BINARY, INT32, INT64}
import org.apache.parquet.schema.Type.Repetition.{OPTIONAL, REQUIRED}
import org.apache.parquet.schema.{MessageType, OriginalType, Types}

object WriteAndReadGenericApp extends App {

  val ID = "id"
  val Name = "name"
  val Birthday = "birthday"
  val Schema = "user_schema"

  val path = Files.createTempDir().getAbsolutePath
  val vcc = ValueCodecConfiguration(TimeZone.getTimeZone(ZoneOffset.UTC))

  val users = List(
    (1L, "Alice", LocalDate.of(2000, 1, 1)),
    (2L, "Bob", LocalDate.of(1980, 2, 28)),
    (3L, "Cecilia", LocalDate.of(1977, 3, 15))
  ).map { case (id, name, birthday) =>
    RowParquetRecord.empty
      .add(ID, id, vcc)
      .add(Name, name, vcc)
      .add(Birthday, birthday, vcc)
  }

  // write
  implicit val schema: MessageType = Types.buildMessage()
    .addField(Types.primitive(INT64, REQUIRED).as(OriginalType.INT_64).named(ID))
    .addField(Types.primitive(BINARY, OPTIONAL).as(OriginalType.UTF8).named(Name))
    .addField(Types.primitive(INT32, OPTIONAL).as(OriginalType.DATE).named(Birthday))
    .named(Schema)

  ParquetWriter.writeAndClose(s"$path/users.parquet", users)

  //read
  val readData = ParquetReader.read[RowParquetRecord](path)
  try {
    readData.foreach { record =>
      val id = record.get[Long](ID, vcc)
      val name = record.get[String](Name, vcc)
      val birthday = record.get[LocalDate](Birthday, vcc)
      println(s"User[$ID=$id,$Name=$name,$Birthday=$birthday]")
    }
  } finally readData.close()

} 
Example 26
Source File: AddABTestDefinition.scala    From algoliasearch-client-scala   with MIT License 5 votes vote down vote up
package algolia.definitions

import java.time.ZoneOffset

import algolia.http.{HttpPayload, POST}
import algolia.inputs.ABTest
import algolia.objects.RequestOptions
import org.json4s.Formats
import org.json4s.native.Serialization._

case class AddABTestDefinition(abtest: ABTest)(implicit val formats: Formats)
    extends Definition {

  type T = AddABTestDefinition

  override def options(requestOptions: RequestOptions): AddABTestDefinition =
    this

  override private[algolia] def build(): HttpPayload = {
    val body = Map(
      "name" -> abtest.name,
      "variants" -> abtest.variants,
      "endAt" -> abtest.endAt.atOffset(ZoneOffset.UTC).toString
    )

    HttpPayload(
      POST,
      Seq("2", "abtests"),
      body = Some(write(body)),
      isSearch = false,
      isAnalytics = true,
      requestOptions = None
    )
  }
} 
Example 27
Source File: LocalDateTimeTests.scala    From dtc   with Apache License 2.0 5 votes vote down vote up
package dtc.tests

import java.time.{Duration, LocalDateTime, ZoneOffset}

import cats.instances.option._
import cats.kernel.laws.discipline.OrderTests
import com.fortysevendeg.scalacheck.datetime.jdk8.ArbitraryJdk8.genZonedDateTime
import dtc.instances.localDateTime._
import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.{Arbitrary, Cogen}
import dtc.instances.providers.realLocalDateTimeProvider

class JVMLocalDateTimeTests extends DTCSuiteJVM {

  implicit val arbT: Arbitrary[LocalDateTime] = Arbitrary(genZonedDateTime.map(_.toLocalDateTime))
  implicit val cogenT: Cogen[LocalDateTime] = Cogen(_.toEpochSecond(ZoneOffset.UTC))

  val overflowSafePairGen = for {
    dt <- arbitrary[LocalDateTime]
    dur <- arbitrary[Duration]
  } yield (dt, dur)

  val ldtTests = LocalDateTimeTests[LocalDateTime](overflowSafePairGen, genYear)
  checkAll("java.time.LocalDateTime", DateTimeTests[LocalDateTime](overflowSafePairGen).dateTime)
  checkAll("java.time.LocalDateTime", ldtTests.localDateTime)
  checkAll("java.time.LocalDateTime", ldtTests.monthUntilFractionHandling)
  checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].order)
  checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].partialOrder)
  checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].eqv)

  checkAll("java.time.LocalDateTime", ProviderTests[LocalDateTime](genTimeZone).provider)
} 
Example 28
Source File: SplitAfter.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package sample.stream_shared_state

import java.time.{Instant, LocalDateTime, ZoneOffset}

import akka.Done
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Sink, Source}
import org.slf4j.{Logger, LoggerFactory}

import scala.collection.immutable._
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure, Success}


object SplitAfter extends App {
  val logger: Logger = LoggerFactory.getLogger(this.getClass)
  implicit val system = ActorSystem("SplitAfter")
  implicit val executionContext = system.dispatcher

  private def hasSecondChanged: () => Seq[(Int, Instant)] => Iterable[(Instant, Boolean)] = {
    () => {
      slidingElements => {
        if (slidingElements.size == 2) {
          val current = slidingElements.head
          val next = slidingElements.tail.head
          val currentBucket = LocalDateTime.ofInstant(current._2, ZoneOffset.UTC).withNano(0)
          val nextBucket = LocalDateTime.ofInstant(next._2, ZoneOffset.UTC).withNano(0)
          List((current._2, currentBucket != nextBucket))
        } else {
          val current = slidingElements.head
          List((current._2, false))
        }
      }
    }
  }

  val done: Future[Done] = Source(1 to 100)
    .throttle(1, 100.millis)
    .map(elem => (elem, Instant.now()))
    .sliding(2)                           // allows to compare this element with the next element
    .statefulMapConcat(hasSecondChanged)  // stateful decision
    .splitAfter(_._2)                     // split when second has changed
    .map(_._1)                            // proceed with payload
    .fold(0)((acc, _) => acc + 1)   // sum
    .mergeSubstreams
    .runWith(Sink.foreach(each => println(s"Elements in group: $each")))

  terminateWhen(done)


  def terminateWhen(done: Future[_]) = {
    done.onComplete {
      case Success(_) =>
        println("Flow Success. About to terminate...")
        system.terminate()
      case Failure(e) =>
        println(s"Flow Failure: $e. About to terminate...")
        system.terminate()
    }
  }
} 
Example 29
Source File: Parameters.scala    From flint   with Apache License 2.0 5 votes vote down vote up
package com.twosigma.flint.timeseries.io.read

import java.time.{ Instant, ZonedDateTime, ZoneOffset }
import javax.annotation.Nullable

import scala.collection.mutable

import com.twosigma.flint.annotation.PythonApi

private[read] class Parameters private (
  val extraOptions: mutable.Map[String, String],
  var range: BeginEndRange = BeginEndRange(None, None, None, None)
) extends Serializable {

  def this(defaultOptions: Map[String, String]) =
    this(mutable.HashMap[String, String](defaultOptions.toSeq: _*))

  def option(key: String, valueOpt: Option[String]): Unit = valueOpt match {
    case Some(v) => extraOptions += key -> v
    case None => extraOptions -= key
  }

  
  @PythonApi
  private[read] def extraOptionsAsJavaMap: java.util.Map[String, String] = {
    import scala.collection.JavaConverters._
    extraOptions.asJava
  }

}

private[read] case class BeginEndRange(
  rawBeginNanosOpt: Option[Long] = None,
  rawEndNanosOpt: Option[Long] = None,
  expandBeginNanosOpt: Option[Long] = None,
  expandEndNanosOpt: Option[Long] = None
) {

  def beginNanos: Long = beginNanosOpt.getOrElse(
    throw new IllegalArgumentException("'begin' range must be set")
  )

  def endNanos: Long = endNanosOpt.getOrElse(
    throw new IllegalArgumentException("'end' range must be set")
  )

  def beginNanosOpt: Option[Long] = {
    rawBeginNanosOpt.map(_ - expandBeginNanosOpt.getOrElse(0L))
  }

  def endNanosOpt: Option[Long] = {
    rawEndNanosOpt.map(_ + expandEndNanosOpt.getOrElse(0L))
  }

  @PythonApi
  private[read] def beginNanosOrNull: java.lang.Long = beginNanosOpt.map(Long.box).orNull

  @PythonApi
  private[read] def endNanosOrNull: java.lang.Long = endNanosOpt.map(Long.box).orNull
} 
Example 30
Source File: AccessTokenSpec.scala    From tsec   with MIT License 5 votes vote down vote up
package tsec.oauth2.provider

import java.time.Instant
import java.time.{ZoneOffset, ZonedDateTime}

import cats.effect.IO

import scala.concurrent.duration._
import org.scalatest.Matchers._
import org.scalatest._

class AccessTokenSpec extends flatspec.AnyFlatSpec {

  it should "say a token is active that is not yet expired" in {
    val token = AccessToken("token", None, None, lifeTime = Some(15 seconds), createdAt = Instant.now())
    token.isExpired[IO].unsafeRunSync() shouldBe false
  }

  it should "expire tokens that have a lifespan that has passed" in {
    val token = AccessToken(
      "token",
      None,
      None,
      lifeTime = Some(1798 seconds),
      createdAt = ZonedDateTime.now(ZoneOffset.UTC).minusSeconds(1800).toInstant
    )
    token.isExpired[IO].unsafeRunSync() shouldBe true
  }

  it should "not expire tokens that have no lifespan" in {
    val token = AccessToken(
      "token",
      None,
      None,
      lifeTime = None,
      createdAt = ZonedDateTime.now(ZoneOffset.UTC).minusSeconds(1800).toInstant
    )
    token.isExpired[IO].unsafeRunSync() shouldBe false
  }
} 
Example 31
Source File: Sessionize.scala    From Mastering-Scala-Machine-Learning   with MIT License 5 votes vote down vote up
package org.akozlov.chapter06

import java.io._

import java.time.ZoneOffset
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter

import org.apache.spark.{SparkConf,SparkContext}
import org.apache.spark.storage.StorageLevel


object Sessionize extends App {
  val sc = new SparkContext("local[8]", "Sessionize", new SparkConf())

  val checkoutPattern = ".*>checkout.*".r.pattern

  // a basic page view structure
  case class PageView(ts: String, path: String) extends Serializable with Ordered[PageView] {
    override def toString: String = {
      s"($ts #$path)"
    }
    def compare(other: PageView) = ts compare other.ts
  }

  // represent a session
  case class Session[A  <: PageView](id: String, visits: Seq[A]) extends Serializable {
    override def toString: String = {
      val vsts = visits.mkString("[", ",", "]")
      s"($id -> $vsts)"
    }
  }

  def toEpochSeconds(str: String) = { LocalDateTime.parse(str, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")).toEpochSecond(ZoneOffset.UTC) }

  val sessions = sc.textFile("data/clickstream")
    .map(line => {val parts = line.split("\t"); (parts(4), new PageView(parts(0), parts(20)))})
    .groupByKey.map(x => { new Session(x._1, x._2.toSeq.sorted) } )
    .cache

  // sessions.take(100).foreach(println)

  def findAllCheckoutSessions(s: Session[PageView]) = {
    s.visits.tails.filter {
      _ match { case PageView(ts1, "mycompanycom>homepage") :: PageView(ts2, page) :: tail if (page != "mycompanycom>homepage" ) => true; case _ => false }
    }
    .foldLeft(Seq[Session[PageView]]()) {
      case (r, x) => {
        x.find(y => checkoutPattern.matcher(y.path).matches) match {
          case Some(checkout) if (toEpochSeconds(checkout.ts) > toEpochSeconds(x.head.ts) + 60) => r.:+(new Session(s.id, x.slice(0, x.indexOf(checkout))))
          case _ => r
        }
      }
    }
  }

  val prodLandingSessions = sessions.flatMap(findAllCheckoutSessions)

  prodLandingSessions.collect.foreach(println)

  sc.stop()
} 
Example 32
Source File: TimeStampFormatter.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.metrics

import java.time.format.{DateTimeFormatterBuilder, SignStyle}
import java.time.{Instant, ZoneOffset, ZonedDateTime}
import java.util.Locale


object TimeStampFormatter {
  import java.time.temporal.ChronoField._

  val noSpaceTimestampFormat = new DateTimeFormatterBuilder()
    .parseCaseInsensitive()
    .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
    .appendLiteral('-')
    .appendValue(MONTH_OF_YEAR, 2)
    .appendLiteral('-')
    .appendValue(DAY_OF_MONTH, 2)
    .appendLiteral('T')
    .appendValue(HOUR_OF_DAY, 2)
    .appendLiteral(':')
    .appendValue(MINUTE_OF_HOUR, 2)
    .appendLiteral(':')
    .appendValue(SECOND_OF_MINUTE, 2)
    .appendLiteral('.')
    .appendValue(MILLI_OF_SECOND, 3)
    .appendOffset("+HHMM", "Z")
    .toFormatter(Locale.US)

  val humanReadableTimestampFormatter = new DateTimeFormatterBuilder()
    .parseCaseInsensitive()
    .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
    .appendLiteral('-')
    .appendValue(MONTH_OF_YEAR, 2)
    .appendLiteral('-')
    .appendValue(DAY_OF_MONTH, 2)
    .appendLiteral(' ')
    .appendValue(HOUR_OF_DAY, 2)
    .appendLiteral(':')
    .appendValue(MINUTE_OF_HOUR, 2)
    .appendLiteral(':')
    .appendValue(SECOND_OF_MINUTE, 2)
    .appendOffset("+HHMM", "Z")
    .toFormatter(Locale.US)

  def formatTimestamp(time: ZonedDateTime): String = {
    humanReadableTimestampFormatter.format(time)
  }

  def formatTimestamp(timeMillis: Long, zone: ZoneOffset = systemTimeZone): String = {
    val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(timeMillis), zone)
    humanReadableTimestampFormatter.format(timestamp)
  }

  def formatTimestampWithNoSpace(timeMillis: Long): String = {
    val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(timeMillis), systemTimeZone)
    noSpaceTimestampFormat.format(timestamp)
  }
} 
Example 33
Source File: package.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra

import java.time.format.DateTimeFormatter
import java.time.{ LocalDateTime, ZoneId, ZoneOffset }
import java.util.UUID

import akka.annotation.InternalApi
import com.datastax.oss.driver.api.core.cql.AsyncResultSet
import com.datastax.oss.driver.api.core.uuid.Uuids

package object query {

  
  @InternalApi private[akka] def uuid(timestamp: Long): UUID = {
    def makeMsb(time: Long): Long = {
      // copied from Uuids.makeMsb
      // UUID v1 timestamp must be in 100-nanoseconds interval since 00:00:00.000 15 Oct 1582.
      val uuidEpoch = LocalDateTime.of(1582, 10, 15, 0, 0).atZone(ZoneId.of("GMT-0")).toInstant.toEpochMilli
      val timestamp = (time - uuidEpoch) * 10000

      var msb = 0L
      msb |= (0X00000000FFFFFFFFL & timestamp) << 32
      msb |= (0X0000FFFF00000000L & timestamp) >>> 16
      msb |= (0X0FFF000000000000L & timestamp) >>> 48
      msb |= 0X0000000000001000L // sets the version to 1.
      msb
    }

    val now = Uuids.timeBased()
    new UUID(makeMsb(timestamp), now.getLeastSignificantBits)
  }
} 
Example 34
Source File: CommandsTest.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package cqrs.commands

import java.time.{LocalDateTime, OffsetDateTime, ZoneOffset}
import java.util.UUID

import org.scalatest.BeforeAndAfterAll
import endpoints4s.play.client.{Endpoints, JsonEntitiesFromCodecs}
import endpoints4s.play.server.PlayComponents
import play.api.Mode
import play.api.libs.ws.ahc.{AhcWSClient, AhcWSClientConfig}
import play.core.server.{NettyServer, ServerConfig}

import scala.concurrent.Future
import scala.math.BigDecimal
import org.scalatest.freespec.AsyncFreeSpec

class CommandsTest extends AsyncFreeSpec with BeforeAndAfterAll {

  private val server =
    NettyServer.fromRouterWithComponents(ServerConfig(mode = Mode.Test)) { components =>
      new Commands(PlayComponents.fromBuiltInComponents(components)).routes
    }
  val app = server.applicationProvider.get.get
  import app.materializer
  private val wsClient = AhcWSClient(AhcWSClientConfig())

  object client
      extends Endpoints("http://localhost:9000", wsClient)
      with JsonEntitiesFromCodecs
      with CommandsEndpoints

  override def afterAll(): Unit = {
    server.stop()
    wsClient.close()
  }

  "Commands" - {

    val arbitraryDate = OffsetDateTime
      .of(LocalDateTime.of(2017, 1, 8, 12, 34, 56), ZoneOffset.UTC)
      .toInstant
    val arbitraryValue = BigDecimal(10)

    "create a new meter" in {
      client.command(CreateMeter("electricity")).map { maybeEvent =>
        assert(maybeEvent.collect {
          case StoredEvent(_, MeterCreated(_, "electricity")) => ()
        }.nonEmpty)
      }
    }
    "create a meter and add readings to it" in {
      for {
        maybeCreatedEvent <- client.command(CreateMeter("water"))
        id <-
          maybeCreatedEvent
            .collect { case StoredEvent(_, MeterCreated(id, _)) => id }
            .fold[Future[UUID]](Future.failed(new NoSuchElementException))(
              Future.successful
            )
        maybeAddedEvent <- client.command(
          AddRecord(id, arbitraryDate, arbitraryValue)
        )
        _ <-
          maybeAddedEvent
            .collect {
              case StoredEvent(
                    _,
                    RecordAdded(`id`, `arbitraryDate`, `arbitraryValue`)
                  ) =>
                ()
            }
            .fold[Future[Unit]](Future.failed(new NoSuchElementException))(
              Future.successful
            )
      } yield assert(true)
    }
  }

} 
Example 35
Source File: Reads.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.cli

import com.krux.hyperion.Schedule
import com.krux.hyperion.expression.Duration
import java.time.{DayOfWeek, ZonedDateTime, ZoneOffset}
import scopt.Read._

object Reads {

  private lazy val daysOfWeek = Map(
    "monday" -> DayOfWeek.MONDAY,
    "tuesday" -> DayOfWeek.TUESDAY,
    "wednesday" -> DayOfWeek.WEDNESDAY,
    "thursday" -> DayOfWeek.THURSDAY,
    "friday" -> DayOfWeek.FRIDAY,
    "saturday" -> DayOfWeek.SATURDAY,
    "sunday" -> DayOfWeek.SUNDAY
  )

  private lazy val daysOfMonth = (1 to 31).flatMap { dom =>
    Seq(dom.toString -> dom, dom % 10 match {
      case 1 => s"${dom}st" -> dom
      case 2 => s"${dom}nd" -> dom
      case 3 => s"${dom}rd" -> dom
      case _ => s"${dom}th" -> dom
    })
  }.toMap

  implicit val durationRead: scopt.Read[Duration] = reads { x => Duration(x) }

  implicit val dateTimeRead: scopt.Read[ZonedDateTime] = reads { x =>
    val dt = x.toLowerCase match {
      case "now" | "today" => ZonedDateTime.now
      case "yesterday" => ZonedDateTime.now.minusDays(1)
      case "tomorrow" => ZonedDateTime.now.plusDays(1)
      case dow if daysOfWeek.keySet contains dow => ZonedDateTime.now.`with`(daysOfWeek(dow))
      case dom if daysOfMonth.keySet contains dom => ZonedDateTime.now.withDayOfMonth(daysOfMonth(dom))
      case d => ZonedDateTime.parse(d)
    }

    dt.withZoneSameInstant(ZoneOffset.UTC)
  }

  implicit val scheduleRead: scopt.Read[Schedule] = reads { x =>
    Schedule.cron.startDateTime(dateTimeRead.reads(x))
  }

} 
Example 36
Source File: BoxPathTest.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore.box

import java.time.ZoneOffset
import java.time.format.DateTimeFormatter

import blobstore.AbstractPathTest
import com.box.sdk.BoxFile
import cats.syntax.either._

class BoxPathTest extends AbstractPathTest[BoxPath] {
  @SuppressWarnings(Array("scalafix:DisableSyntax.null"))
  val file = new BoxFile(null, "")
  val dtf  = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssX")
  val json =
    s"""{
       |  "id": "fileId",
       |  "type": "file",
       |  "name": "$fileName",
       |  "size": $fileSize,
       |  "modified_at": "${dtf.format(lastModified.atZone(ZoneOffset.UTC))}",
       |  "path_collection": {
       |    "total_count": 2,
       |    "entries": [
       |      {
       |        "id": "rootId",
       |        "name": "$root"
       |      },
       |      {
       |        "id": "folderId",
       |        "name": "$dir"
       |      }
       |    ]
       |  }
       |}""".stripMargin
  val fileInfo: BoxFile#Info = new file.Info(json)
  val concretePath           = new BoxPath(Some(root), Some("rootId"), fileInfo.asLeft)
} 
Example 37
Source File: Timestamp.scala    From scala-steward   with Apache License 2.0 5 votes vote down vote up
package org.scalasteward.core.util

import cats.Order
import cats.implicits._
import io.circe.Codec
import io.circe.generic.extras.semiauto.deriveUnwrappedCodec
import java.time.{Instant, LocalDateTime, ZoneOffset}
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.FiniteDuration

final case class Timestamp(millis: Long) {
  def +(finiteDuration: FiniteDuration): Timestamp =
    Timestamp(millis + finiteDuration.toMillis)

  def toLocalDateTime: LocalDateTime =
    LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC)

  def until(that: Timestamp): FiniteDuration =
    FiniteDuration(that.millis - millis, TimeUnit.MILLISECONDS)
}

object Timestamp {
  def fromLocalDateTime(ldt: LocalDateTime): Timestamp =
    Timestamp(ldt.toInstant(ZoneOffset.UTC).toEpochMilli)

  implicit val timestampCodec: Codec[Timestamp] =
    deriveUnwrappedCodec

  implicit val timestampOrder: Order[Timestamp] =
    Order.by(_.millis)
} 
Example 38
Source File: ExpiryServiceTest.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.eventual.cassandra

import java.time.{Instant, LocalDate, ZoneOffset}

import cats.effect.ExitCase
import cats.implicits._
import cats.{Id, catsInstancesForId}
import com.evolutiongaming.kafka.journal.ExpireAfter
import com.evolutiongaming.kafka.journal.ExpireAfter.implicits._
import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpireOn.implicits._
import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpiryService.Action
import com.evolutiongaming.kafka.journal.util.BracketFromMonad
import org.scalatest.FunSuite
import org.scalatest.matchers.should.Matchers

import scala.concurrent.duration._
import scala.util.control.NonFatal

class ExpiryServiceTest extends FunSuite with Matchers {
  import ExpiryServiceTest._

  test("expireOn") {
    val expireAfter = 1.day.toExpireAfter
    val expected = LocalDate.of(2019, 12, 12).toExpireOn
    expireService.expireOn(expireAfter, timestamp) shouldEqual expected
  }

  for {
    (expiry, expireAfter, action) <- List(
      (
        none[Expiry],
        1.minute.toExpireAfter.some,
        Action.update(Expiry(
          1.minute.toExpireAfter,
          LocalDate.of(2019, 12, 11).toExpireOn))),
      (
        none[Expiry],
        1.day.toExpireAfter.some,
        Action.update(Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn))),
      (
        Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 11).toExpireOn).some,
        1.day.toExpireAfter.some,
        Action.update(Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn))),
      (
        Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn).some,
        1.day.toExpireAfter.some,
        Action.ignore),
      (
        Expiry(
          1.day.toExpireAfter,
          LocalDate.of(2019, 12, 12).toExpireOn).some,
        none[ExpireAfter],
        Action.remove))
  } yield {
    test(s"action expiry: $expiry, expireAfter: $expireAfter, action: $action") {
      expireService.action(expiry, expireAfter, timestamp) shouldEqual action
    }
  }
}

object ExpiryServiceTest {

  implicit val bracketId: BracketFromMonad[Id, Throwable] = new BracketFromMonad[Id, Throwable] {

    def F = catsInstancesForId

    def bracketCase[A, B](acquire: Id[A])(use: A => Id[B])(release: (A, ExitCase[Throwable]) => Id[Unit]) = {
      flatMap(acquire) { a =>
        try {
          val b = use(a)
          try release(a, ExitCase.Completed) catch { case NonFatal(_) => }
          b
        } catch {
          case NonFatal(e) =>
            release(a, ExitCase.Error(e))
            raiseError(e)
        }
      }
    }

    def raiseError[A](a: Throwable) = throw a

    def handleErrorWith[A](fa: Id[A])(f: Throwable => Id[A]) = fa
  }

  val timestamp: Instant = Instant.parse("2019-12-11T10:10:10.00Z")

  val zoneId: ZoneOffset = ZoneOffset.UTC

  val expireService: ExpiryService[Id] = ExpiryService[Id](zoneId)
} 
Example 39
Source File: CassandraReadSideSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.scaladsl.persistence.cassandra

import java.time.LocalDateTime
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter

import akka.persistence.query.TimeBasedUUID
import scala.concurrent.Future
import scala.concurrent.duration._

import com.typesafe.config.ConfigFactory
import com.lightbend.lagom.internal.persistence.ReadSideConfig
import com.lightbend.lagom.internal.persistence.cassandra.CassandraReadSideSettings
import com.lightbend.lagom.internal.scaladsl.persistence.cassandra.CassandraPersistentEntityRegistry
import com.lightbend.lagom.internal.scaladsl.persistence.cassandra.CassandraReadSideImpl
import com.lightbend.lagom.internal.scaladsl.persistence.cassandra.ScaladslCassandraOffsetStore
import com.lightbend.lagom.scaladsl.persistence.TestEntity.Evt
import com.lightbend.lagom.scaladsl.persistence._

object CassandraReadSideSpec {
  def firstTimeBucket: String = {
    val today                                = LocalDateTime.now(ZoneOffset.UTC)
    val firstBucketFormat: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyyMMdd'T'HH:mm")
    today.minusHours(3).format(firstBucketFormat)
  }

  val readSideConfig = ConfigFactory.parseString(s"""
    # speed up read-side queries
    cassandra-query-journal {
      first-time-bucket = "$firstTimeBucket"
      refresh-interval = 1s
      events-by-tag.eventual-consistency-delay = 1s
    }
    """)

  val defaultConfig =
    ConfigFactory
      .parseString("akka.loglevel = INFO")
      .withFallback(readSideConfig)

  val noAutoCreateConfig =
    ConfigFactory
      .parseString("lagom.persistence.read-side.cassandra.tables-autocreate = false")
      .withFallback(defaultConfig)
}

class CassandraReadSideSpec
    extends CassandraPersistenceSpec(CassandraReadSideSpec.defaultConfig, TestEntitySerializerRegistry)
    with AbstractReadSideSpec {
  import system.dispatcher

  protected override lazy val persistentEntityRegistry = new CassandraPersistentEntityRegistry(system)

  private lazy val testCasReadSideSettings: CassandraReadSideSettings = new CassandraReadSideSettings(system)
  private lazy val testSession: CassandraSession                      = new CassandraSession(system)
  private lazy val offsetStore =
    new ScaladslCassandraOffsetStore(system, testSession, testCasReadSideSettings, ReadSideConfig())
  private lazy val cassandraReadSide = new CassandraReadSideImpl(system, testSession, offsetStore)

  override def processorFactory(): ReadSideProcessor[Evt] =
    new TestEntityReadSide.TestEntityReadSideProcessor(system, cassandraReadSide, testSession)

  private lazy val readSide = new TestEntityReadSide(system, testSession)

  override def getAppendCount(id: String): Future[Long] = readSide.getAppendCount(id)

  override def afterAll(): Unit = {
    super.afterAll()
  }
}

class CassandraReadSideAutoCreateSpec
    extends CassandraPersistenceSpec(CassandraReadSideSpec.noAutoCreateConfig, TestEntitySerializerRegistry) {
  import system.dispatcher

  private lazy val testSession: CassandraSession                      = new CassandraSession(system)
  private lazy val testCasReadSideSettings: CassandraReadSideSettings = new CassandraReadSideSettings(system)
  private lazy val offsetStore =
    new ScaladslCassandraOffsetStore(system, testSession, testCasReadSideSettings, ReadSideConfig())

  "A Cassandra Read-Side" must {
    "not send ClusterStartupTask message, so startupTask must return None" +
      "when 'lagom.persistence.read-side.cassandra.tables-autocreate' flag is 'false'" in {
      offsetStore.startupTask shouldBe None
    }
  }
} 
Example 40
Source File: PubSubMessage.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub.client

import java.time.{Instant, ZoneOffset, ZonedDateTime}

import com.google.protobuf.{ByteString, Timestamp}
import com.google.pubsub.v1.{
  PubsubMessage => PubSubMessageProto,
  ReceivedMessage => ReceivedPubSubMessageProto
}

import scala.collection.JavaConversions._

final case class PubSubMessage(
    payload: Array[Byte],
    msgId: Option[String] = None,
    publishTs: Option[ZonedDateTime] = None,
    attributes: Option[Map[String, String]] = None) {
  def toProto: PubSubMessageProto = {
    val builder = PubSubMessageProto.newBuilder()
    builder.setData(ByteString.copyFrom(payload))
    publishTs.foreach(
      ts =>
        builder.setPublishTime(
          Timestamp.newBuilder().setSeconds(ts.toEpochSecond).build()))
    msgId.foreach(id => builder.setMessageId(id))
    attributes.foreach(attr => builder.putAllAttributes(attr))
    builder.build()
  }
}

object PubSubMessage {
  def fromProto(proto: PubSubMessageProto): PubSubMessage = {
    val payload = proto.getData.toByteArray
    val msgId = Some(proto.getMessageId)
    val attributes = if (proto.getAttributesMap.isEmpty) { None } else {
      Some(proto.getAttributesMap.toMap)
    }
    val publishTs = if (proto.hasPublishTime) {
      Some(
        ZonedDateTime.ofInstant(
          Instant.ofEpochSecond(proto.getPublishTime.getSeconds),
          ZoneOffset.UTC))
    } else {
      None
    }

    PubSubMessage(payload,
                  msgId = msgId,
                  publishTs = publishTs,
                  attributes = attributes)
  }
}

final case class ReceivedPubSubMessage(ackId: String, payload: PubSubMessage)

object ReceivedPubSubMessage {
  def fromProto(proto: ReceivedPubSubMessageProto): ReceivedPubSubMessage = {
    val ackId = proto.getAckId
    val payload = PubSubMessage.fromProto(proto.getMessage)
    ReceivedPubSubMessage(ackId, payload)
  }
} 
Example 41
Source File: ArrayOfZoneOffsetsBenchmark.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8
import java.time.ZoneOffset

import org.openjdk.jmh.annotations.{Param, Setup}

abstract class ArrayOfZoneOffsetsBenchmark extends CommonParams {
  @Param(Array("1", "10", "100", "1000", "10000", "100000", "1000000"))
  var size: Int = 1000
  var obj: Array[ZoneOffset] = _
  var jsonString: String = _
  var jsonBytes: Array[Byte] = _
  var preallocatedBuf: Array[Byte] = _

  @Setup
  def setup(): Unit = {
    obj = (1 to size).map(i => ZoneOffset.ofHoursMinutesSeconds(i % 17, (i % 4) * 15, 0)).toArray
    jsonString = obj.mkString("[\"", "\",\"", "\"]")
    jsonBytes = jsonString.getBytes(UTF_8)
    preallocatedBuf = new Array[Byte](jsonBytes.length + 100)
  }
} 
Example 42
Source File: ArrayOfZoneOffsetsReading.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8
import java.time.ZoneOffset

import com.avsystem.commons.serialization.json._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.AVSystemCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.BorerJsonEncodersDecoders._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JacksonSerDesers._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JsoniterScalaCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.PlayJsonFormats._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.SprayFormats._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.UPickleReaderWriters._
import com.github.plokhotnyuk.jsoniter_scala.core._
import io.circe.parser._
import org.openjdk.jmh.annotations.Benchmark
import play.api.libs.json.Json
import spray.json._

class ArrayOfZoneOffsetsReading extends ArrayOfZoneOffsetsBenchmark {
  @Benchmark
  def avSystemGenCodec(): Array[ZoneOffset] = JsonStringInput.read[Array[ZoneOffset]](new String(jsonBytes, UTF_8))

  @Benchmark
  def borer(): Array[ZoneOffset] = io.bullet.borer.Json.decode(jsonBytes).to[Array[ZoneOffset]].value

  @Benchmark
  def circe(): Array[ZoneOffset] = decode[Array[ZoneOffset]](new String(jsonBytes, UTF_8)).fold(throw _, identity)

  @Benchmark
  def jacksonScala(): Array[ZoneOffset] = jacksonMapper.readValue[Array[ZoneOffset]](jsonBytes)

  @Benchmark
  def jsoniterScala(): Array[ZoneOffset] = readFromArray[Array[ZoneOffset]](jsonBytes)

  @Benchmark
  def playJson(): Array[ZoneOffset] = Json.parse(jsonBytes).as[Array[ZoneOffset]]

  @Benchmark
  def sprayJson(): Array[ZoneOffset] = JsonParser(jsonBytes).convertTo[Array[ZoneOffset]]

  @Benchmark
  def uPickle(): Array[ZoneOffset] = read[Array[ZoneOffset]](jsonBytes)
}