java.time.LocalDateTime Scala Examples
The following examples show how to use java.time.LocalDateTime.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TagInputAssociation.scala From smui with Apache License 2.0 | 7 votes |
package models import java.sql.Connection import java.time.LocalDateTime import anorm.SqlParser.get import anorm._ case class TagInputAssociation(tagId: InputTagId, searchInputId: SearchInputId, lastUpdate: LocalDateTime = LocalDateTime.now()) { import TagInputAssociation._ def toNamedParameters: Seq[NamedParameter] = Seq( TAG_ID -> tagId, INPUT_ID -> searchInputId, LAST_UPDATE -> lastUpdate ) } object TagInputAssociation { val TABLE_NAME = "tag_2_input" val TAG_ID = "tag_id" val INPUT_ID = "input_id" val LAST_UPDATE = "last_update" def insert(associations: TagInputAssociation*)(implicit connection: Connection): Unit = { if (associations.nonEmpty) { BatchSql(s"insert into $TABLE_NAME ($TAG_ID, $INPUT_ID, $LAST_UPDATE) " + s"values ({$TAG_ID}, {$INPUT_ID}, {$LAST_UPDATE})", associations.head.toNamedParameters, associations.tail.map(_.toNamedParameters): _* ).execute() } } def updateTagsForSearchInput(searchInputId: SearchInputId, tagIds: Seq[InputTagId])(implicit connection: Connection): Unit = { deleteBySearchInputId(searchInputId) insert(tagIds.map(tagId => TagInputAssociation(tagId, searchInputId)): _*) } def loadTagsBySearchInputId(id: SearchInputId)(implicit connection: Connection): Seq[InputTag] = { SQL(s"select * from $TABLE_NAME a, ${InputTag.TABLE_NAME} t where a.$INPUT_ID = {inputId} " + s"and a.$TAG_ID = t.${InputTag.ID} order by t.${InputTag.PROPERTY} asc, t.${InputTag.VALUE} asc"). on("inputId" -> id).as(InputTag.sqlParser.*) } def loadTagsBySearchInputIds(ids: Seq[SearchInputId])(implicit connection: Connection): Map[SearchInputId, Seq[InputTag]] = { ids.grouped(100).toSeq.flatMap { idGroup => SQL(s"select * from $TABLE_NAME a, ${InputTag.TABLE_NAME} t where a.$INPUT_ID in ({inputIds}) " + s"and a.$TAG_ID = t.${InputTag.ID} order by t.${InputTag.PROPERTY} asc, t.${InputTag.VALUE} asc"). on("inputIds" -> idGroup).as((InputTag.sqlParser ~ get[SearchInputId](s"$TABLE_NAME.$INPUT_ID")).*). map { case tag ~ inputId => inputId -> tag } }.groupBy(_._1).mapValues(_.map(_._2)) } def deleteBySearchInputId(id: SearchInputId)(implicit connection: Connection): Int = { SQL"delete from #$TABLE_NAME where #$INPUT_ID = $id".executeUpdate() } }
Example 2
Source File: ABTest.scala From algoliasearch-client-scala with MIT License | 6 votes |
package algolia.inputs import java.time.LocalDateTime import algolia.objects.Query case class ABTest( name: String, variants: Seq[ABTestVariant], endAt: LocalDateTime ) case class ABTestVariant( index: String, trafficPercentage: Int, description: Option[String] = None, customSearchParameters: Option[Query] = None )
Example 3
Source File: Response.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.engine.trigger import akka.http.scaladsl.model._ import spray.json.DefaultJsonProtocol._ import spray.json._ import java.time.LocalDateTime import java.time.format.DateTimeFormatter // The HTTP service can `complete` using one of these functions to construct a // a response with a JSON object and status code matching the one in the body. object Response { def successResponse[A: JsonWriter](a: A): (StatusCode, JsObject) = { (StatusCodes.OK, resultJsObject(a)) } def errorResponse(status: StatusCode, es: String*): (StatusCode, JsObject) = { (status, errorsJsObject(status, es)) } // These functions are borrowed from the HTTP JSON ledger API but I haven't // factored them out for now as they are fairly small. def errorsJsObject(status: StatusCode, es: Seq[String]): JsObject = { val errors = es.toJson JsObject(statusField(status), ("errors", errors)) } def resultJsObject[A: JsonWriter](a: A): JsObject = { resultJsObject(a.toJson) } def resultJsObject(a: JsValue): JsObject = { JsObject(statusField(StatusCodes.OK), ("result", a)) } def statusField(status: StatusCode): (String, JsNumber) = ("status", JsNumber(status.intValue())) // Trigger status messages have timestamps for which this is the // formatter. object LocalDateTimeJsonFormat extends RootJsonFormat[LocalDateTime] { override def write(dt: LocalDateTime) = JsString(dt.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME)) override def read(json: JsValue): LocalDateTime = json match { case JsString(s) => LocalDateTime.parse(s, DateTimeFormatter.ISO_LOCAL_DATE_TIME) case _ => throw new DeserializationException("Decode local datetime failed") } } }
Example 4
Source File: Common.scala From chymyst-core with Apache License 2.0 | 5 votes |
package io.chymyst.benchmark import java.time.LocalDateTime import java.time.temporal.ChronoUnit object Common { val warmupTimeMs = 50L def elapsed(initTime: LocalDateTime): Long = initTime.until(LocalDateTime.now, ChronoUnit.MILLIS) def elapsed(initTime: Long): Long = System.currentTimeMillis() - initTime def timeThis(task: => Unit): Long = { val initTime = LocalDateTime.now task elapsed(initTime) } def timeWithPriming(task: => Unit): Long = { task // this is just priming, no measurement val result1 = timeThis { task } val result2 = timeThis { task } (result1 + result2 + 1) / 2 } def waitSome(): Unit = Thread.sleep(warmupTimeMs) }
Example 5
Source File: Benchmarks7.scala From chymyst-core with Apache License 2.0 | 5 votes |
package io.chymyst.benchmark import java.time.LocalDateTime import io.chymyst.benchmark.Common._ import io.chymyst.jc._ import code.jiansen.scalajoin._ // Use precompiled classes from Jiansen's Join.scala, which are in that package. object Benchmarks7 { /// Concurrent decrement of `n` counters, each going from `count` to 0 concurrently. /// create `n` asynchronous counters, initialize each to `count`, then decrement `count*n` times, until all counters are zero. /// collect the zero-counter events, make sure there are `n` of them, then fire an `all_done` event that yields the benchmark time. val numberOfCounters = 5 def benchmark7(count: Int, tp: Pool): Long = { val done = m[Unit] val all_done = m[Int] val f = b[LocalDateTime,Long] site(tp)( go { case all_done(0) + f(tInit, r) => r(elapsed(tInit)) }, go { case all_done(x) + done(_) if x > 0 => all_done(x-1) } ) val initialTime = LocalDateTime.now all_done(numberOfCounters) val d = make_counters(done, numberOfCounters, count, tp) (1 to (count*numberOfCounters)).foreach{ _ => d() } f(initialTime) } // this deadlocks whenever `count` * `counters` becomes large. def benchmark8(count: Int, tp: Pool): Long = { println(s"Creating $numberOfCounters concurrent counters, each going from $count to 0") object j8 extends Join { object done extends AsyName[Unit] object all_done extends AsyName[Int] object f extends SynName[LocalDateTime, Long] join { case all_done(0) and f(tInit) => f.reply(elapsed(tInit)) case all_done(x) and done(_) if x > 0 => all_done(x-1) } } val initialTime = LocalDateTime.now j8.all_done(count) val d = make_counters8a(j8.done, numberOfCounters, count) (1 to (count*numberOfCounters)).foreach{ _ => d(()) } j8.f(initialTime) } private def make_counters(done: M[Unit], counters: Int, init: Int, tp: Pool) = { val c = m[Int] val d = m[Unit] site(tp)( go { case c(0) => done() }, go { case c(n) + d(_) if n > 0 => c(n - 1) } ) (1 to counters).foreach(_ => c(init)) // We return just one molecule. d } private def make_counters8a(done: AsyName[Unit], counters: Int, init: Int): AsyName[Unit] = { object j8a extends Join { object c extends AsyName[Int] object d extends AsyName[Unit] join { case c(0) => done(()) case c(n) and d(_) if n > 0 => c(n-1) } } (1 to counters).foreach(_ => j8a.c(init)) // We return just one molecule. j8a.d } }
Example 6
Source File: package.scala From ArchiveSpark with MIT License | 5 votes |
package org.archive import java.time.LocalDateTime import org.apache.spark.rdd.RDD import org.archive.archivespark.implicits.{EnrichableRDD, GenericHelpersRDD, JsonConvertibleRDD, SimplifiedGetterEnrichRoot, StringRDD} import org.archive.archivespark.model.EnrichRoot import org.archive.archivespark.util.JsonConvertible import scala.reflect.ClassTag package object archivespark { implicit class ImplicitStringRDD(rdd: RDD[String]) extends StringRDD(rdd) implicit class ImplicitGenericHelpersRDD[A : ClassTag](rdd: RDD[A]) extends GenericHelpersRDD[A](rdd) implicit class ImplicitEnrichableRDD[Root <: EnrichRoot : ClassTag](rdd: RDD[Root]) extends EnrichableRDD[Root](rdd) implicit class ImplicitJsonConvertibleRDD[Record <: JsonConvertible : ClassTag](rdd: RDD[Record]) extends JsonConvertibleRDD[Record](rdd) implicit class ImplicitSimplifiedGetterEnrichRoot[Root <: EnrichRoot](root: Root) extends SimplifiedGetterEnrichRoot[Root](root) implicit class OrderedLocalDateTime(time: LocalDateTime) extends Ordered[LocalDateTime] { override def compare(that: LocalDateTime): Int = time.compareTo(that) } }
Example 7
Source File: ItTestPlugin.scala From matcher with MIT License | 5 votes |
import java.time.LocalDateTime import java.time.format.DateTimeFormatter import sbt.Keys._ import sbt.Tests.Group import sbt._ // Separate projects for integration tests because of IDEA: https://youtrack.jetbrains.com/issue/SCL-14363#focus=streamItem-27-3061842.0-0 object ItTestPlugin extends AutoPlugin { object autoImport extends ItKeys import autoImport._ override def projectSettings: Seq[Def.Setting[_]] = inConfig(Test)( Seq( logDirectory := { val runId = Option(System.getenv("RUN_ID")).getOrElse { val formatter = DateTimeFormatter.ofPattern("MM-dd--HH_mm_ss") formatter.format(LocalDateTime.now()) // git branch? } val r = target.value / "logs" / runId IO.createDirectory(r) r }, // Example: SCALATEST_EXCLUDE_TAGS="package1.Tag1 package2.Tag2 package3.Tag3" testOptions += { val excludeTags = sys.env.get("SCALATEST_EXCLUDE_TAGS").fold(Seq.empty[String])(Seq("-l", _)) val includeTags = sys.env.get("SCALATEST_INCLUDE_TAGS").fold(Seq.empty[String])(Seq("-n", _)) val args = Seq("-fFWD", (logDirectory.value / "summary.log").toString) ++ excludeTags ++ includeTags Tests.Argument(TestFrameworks.ScalaTest, args: _*) }, parallelExecution in Test := true, tags in test += Tags.ForkedTestGroup -> 1, tags in testOnly += Tags.ForkedTestGroup -> 1, tags in testQuick += Tags.ForkedTestGroup -> 1, testGrouping := { // ffs, sbt! // https://github.com/sbt/sbt/issues/3266 val javaHomeValue = javaHome.value val logDirectoryValue = logDirectory.value val envVarsValue = envVars.value val javaOptionsValue = javaOptions.value val resourceDirectoryValue = resourceDirectory.value for { group <- testGrouping.value suite <- group.tests } yield Group( suite.name, Seq(suite), Tests.SubProcess( ForkOptions( javaHome = javaHomeValue, outputStrategy = outputStrategy.value, bootJars = Vector.empty[java.io.File], workingDirectory = Option(baseDirectory.value), runJVMOptions = Vector( s"-Djava.util.logging.config.file=${resourceDirectoryValue / "jul.properties"}", s"-Dlogback.configurationFile=${resourceDirectoryValue / "logback-test.xml"}", "-Dwaves.it.logging.appender=FILE", s"-Dwaves.it.logging.dir=${logDirectoryValue / suite.name.replaceAll("""(\w)\w*\.""", "$1.")}" // foo.bar.Baz -> f.b.Baz ) ++ javaOptionsValue, connectInput = false, envVars = envVarsValue )) ) } )) } trait ItKeys { val logDirectory = taskKey[File]("The directory where logs of integration tests are written") }
Example 8
Source File: DBRecords.scala From matcher with MIT License | 5 votes |
package com.wavesplatform.dex.history import java.time.LocalDateTime object DBRecords { sealed trait Record case class OrderRecord(id: String, tpe: Byte, senderAddress: String, senderPublicKey: String, amountAssetId: String, priceAssetId: String, feeAssetId: String, side: Byte, price: BigDecimal, amount: BigDecimal, timestamp: LocalDateTime, expiration: LocalDateTime, fee: BigDecimal, created: LocalDateTime) extends Record case class EventRecord(orderId: String, eventType: Byte, timestamp: LocalDateTime, price: BigDecimal, filled: BigDecimal, totalFilled: BigDecimal, feeFilled: BigDecimal, feeTotalFilled: BigDecimal, status: Byte) extends Record }
Example 9
Source File: InformativeTestStart.scala From matcher with MIT License | 5 votes |
package com.wavesplatform.dex.it.test import java.time.{LocalDateTime, ZoneId} import com.wavesplatform.dex.it.api.BaseContainersKit import mouse.any._ import org.scalatest.{Args, Status, Suite} import scala.util.{Failure, Success} trait InformativeTestStart extends Suite { self: BaseContainersKit => override protected def runTest(testName: String, args: Args): Status = { def print(text: String): Unit = writeGlobalLog(s"---------- [${LocalDateTime.now(ZoneId.of("UTC"))}] $text ----------") print(s"Test '$testName' started") super.runTest(testName, args) unsafeTap { _.whenCompleted { case Success(r) => print(s"Test '$testName' ${if (r) "succeeded" else "failed"}") case Failure(e) => print(s"Test '$testName' failed with exception '${e.getClass.getSimpleName}'") } } } protected def writeGlobalLog(x: String): Unit = { log.debug(x) knownContainers.get().foreach { _.printDebugMessage(x) } } }
Example 10
Source File: Status.scala From daf-semantics with Apache License 2.0 | 5 votes |
package it.almawave.kb.http.endpoints import java.time.LocalTime import io.swagger.annotations.Api import javax.ws.rs.Path import javax.ws.rs.GET import javax.ws.rs.Produces import io.swagger.annotations.ApiOperation import javax.ws.rs.core.MediaType import org.slf4j.LoggerFactory import javax.ws.rs.core.Context import javax.ws.rs.core.UriInfo import javax.ws.rs.core.Request import it.almawave.linkeddata.kb.utils.JSONHelper import java.time.LocalDateTime import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.util.Locale import java.time.ZoneId @Api(tags = Array("catalog")) @Path("/status") class Status { private val logger = LoggerFactory.getLogger(this.getClass) @Context var uriInfo: UriInfo = null @GET @Produces(Array(MediaType.APPLICATION_JSON)) @ApiOperation(nickname = "status", value = "endpoint status") def status() = { val base_uri = uriInfo.getBaseUri val msg = s"the service is running at ${base_uri}" logger.info(msg) val _now = now() StatusMsg(_now._1, _now._2, msg) } def now() = { val zdt = ZonedDateTime.now(ZoneId.of("+1")) val dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSZ") (zdt.format(dtf), zdt) } } case class StatusMsg( now: String, dateTime: ZonedDateTime, msg: String )
Example 11
Source File: JdbcResultSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset.query.jdbc import java.sql.Timestamp import java.time.{ LocalDateTime, OffsetDateTime } import org.scalatest.{ MustMatchers, WordSpec } import play.api.libs.json._ class JdbcResultSpec extends WordSpec with MustMatchers { "A JDBC Result container" must { "convert to CSV" in { JdbcResults.flat.toCsv.toList must be { List( """"int", "string", "bool", "timestamp"""", """1, "str1", true, "2018-06-25T09:00:00Z"""", """2, "str2", false, "2018-06-25T09:30:00Z"""", """<null>, <null>, false, <null>""" ) } } "convert to json" in { JdbcResults.flat.toJson.toList must be { Seq( JsObject { Seq( "int" -> JsNumber(1), "string" -> JsString("str1"), "bool" -> JsBoolean(true), "timestamp" -> JsString("2018-06-25T09:00:00Z") ) }, JsObject { Seq( "int" -> JsNumber(2), "string" -> JsString("str2"), "bool" -> JsBoolean(false), "timestamp" -> JsString("2018-06-25T09:30:00Z") ) }, JsObject { Seq( "int" -> JsNull, "string" -> JsNull, "bool" -> JsBoolean(false), "timestamp" -> JsNull ) } ) } } } } object JdbcResults { private val offset = OffsetDateTime.now().getOffset private def timestamp(dateTime: LocalDateTime) = Timestamp.from { dateTime.toInstant(offset) } val flat = JdbcResult( header = Seq("int", "string", "bool", "timestamp"), rows = Vector( List( Int.box(1), "str1", Boolean.box(true), timestamp { LocalDateTime.of(2018, 6, 25, 9, 0) } ), List( Int.box(2), "str2", Boolean.box(false), timestamp { LocalDateTime.of(2018, 6, 25, 9, 30) } ), List( null, null, Boolean.box(false), null ) ) ) }
Example 12
Source File: ABTest.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.responses import java.time.LocalDateTime import algolia.objects.Query case class ABTestsResponse(abtests: Seq[ABTestResponse], count: Int, total: Int) case class ABTestResponse( abTestID: Int, clickSignificance: Option[Int], conversionSignificance: Option[Float], createdAt: LocalDateTime, endAt: LocalDateTime, name: String, status: String, variants: Seq[VariantResponse] ) case class VariantResponse( averageClickPosition: Option[Int], clickCount: Option[Int], clickThroughRate: Option[Float], conversionCount: Option[Int], conversionRate: Option[Float], description: String, index: String, noResultCount: Option[Int], searchCount: Option[Int], trafficPercentage: Int, userCount: Option[Int], customSearchParameters: Option[Query] )
Example 13
Source File: Main.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.examples import java.time.{LocalDate, LocalDateTime, LocalTime, Month} import dtc.instances.localDateTime._ // scalastyle:off object Main extends App { val calendar = Calendar(List( CalendarEvent( LocalDateTime.of(LocalDate.now(), LocalTime.of(10, 0)), LocalDateTime.of(LocalDate.now(), LocalTime.of(11, 0)), "Breakfast" ), CalendarEvent( LocalDateTime.of(LocalDate.now().minusDays(2), LocalTime.of(12, 0)), LocalDateTime.of(LocalDate.now().minusDays(2), LocalTime.of(14, 0)), "Meeting" ), CalendarEvent( LocalDateTime.of(2016, Month.OCTOBER, 9, 11, 0), LocalDateTime.of(2016, Month.OCTOBER, 9, 11, 0), "Birthday party" ) )) println(calendar.eventsAfter(LocalDateTime.now().minusDays(1L)).mkString(", ")) println(calendar.onlyWorkDays.mkString(", ")) val period = Period(LocalDateTime.now(), LocalDateTime.now().plusDays(1L)) println(period.durationInMinutes) println(period.durationInSeconds) println(period.hours.mkString("\n")) }
Example 14
Source File: LocalDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{Duration, LocalDateTime, ZoneOffset} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import com.fortysevendeg.scalacheck.datetime.jdk8.ArbitraryJdk8.genZonedDateTime import dtc.instances.localDateTime._ import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen} import dtc.instances.providers.realLocalDateTimeProvider class JVMLocalDateTimeTests extends DTCSuiteJVM { implicit val arbT: Arbitrary[LocalDateTime] = Arbitrary(genZonedDateTime.map(_.toLocalDateTime)) implicit val cogenT: Cogen[LocalDateTime] = Cogen(_.toEpochSecond(ZoneOffset.UTC)) val overflowSafePairGen = for { dt <- arbitrary[LocalDateTime] dur <- arbitrary[Duration] } yield (dt, dur) val ldtTests = LocalDateTimeTests[LocalDateTime](overflowSafePairGen, genYear) checkAll("java.time.LocalDateTime", DateTimeTests[LocalDateTime](overflowSafePairGen).dateTime) checkAll("java.time.LocalDateTime", ldtTests.localDateTime) checkAll("java.time.LocalDateTime", ldtTests.monthUntilFractionHandling) checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].order) checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].partialOrder) checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].eqv) checkAll("java.time.LocalDateTime", ProviderTests[LocalDateTime](genTimeZone).provider) }
Example 15
Source File: Request.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.client.protocol import java.time.{Instant, LocalDateTime} import scala.concurrent.duration.FiniteDuration case class Request[+T](correlated: Correlated[T], attempt: Int, lastPlannedDelay: Option[FiniteDuration], firstAttemptTimestamp: Instant) { def msg = correlated.msg def correlationId = correlated.correlationId def isFirstAttempt: Boolean = attempt == 1 def nextAttempt: Request[T] = copy(attempt = attempt + 1) } object Request { def apply[T](correlated: Correlated[T], attempt: Int, lastPlannedDelay: FiniteDuration, firstAttemptTimestamp: Instant): Request[T] = { Request( correlated = correlated, attempt = attempt, lastPlannedDelay = Some(lastPlannedDelay), firstAttemptTimestamp = firstAttemptTimestamp ) } def firstAttempt[T](correlated: Correlated[T], firstAttemptTimestamp: Instant): Request[T] = { Request( correlated = correlated, attempt = 1, lastPlannedDelay = None, firstAttemptTimestamp = firstAttemptTimestamp ) } }
Example 16
Source File: SplitAfter.scala From akka_streams_tutorial with MIT License | 5 votes |
package sample.stream_shared_state import java.time.{Instant, LocalDateTime, ZoneOffset} import akka.Done import akka.actor.ActorSystem import akka.stream.scaladsl.{Sink, Source} import org.slf4j.{Logger, LoggerFactory} import scala.collection.immutable._ import scala.concurrent.Future import scala.concurrent.duration._ import scala.util.{Failure, Success} object SplitAfter extends App { val logger: Logger = LoggerFactory.getLogger(this.getClass) implicit val system = ActorSystem("SplitAfter") implicit val executionContext = system.dispatcher private def hasSecondChanged: () => Seq[(Int, Instant)] => Iterable[(Instant, Boolean)] = { () => { slidingElements => { if (slidingElements.size == 2) { val current = slidingElements.head val next = slidingElements.tail.head val currentBucket = LocalDateTime.ofInstant(current._2, ZoneOffset.UTC).withNano(0) val nextBucket = LocalDateTime.ofInstant(next._2, ZoneOffset.UTC).withNano(0) List((current._2, currentBucket != nextBucket)) } else { val current = slidingElements.head List((current._2, false)) } } } } val done: Future[Done] = Source(1 to 100) .throttle(1, 100.millis) .map(elem => (elem, Instant.now())) .sliding(2) // allows to compare this element with the next element .statefulMapConcat(hasSecondChanged) // stateful decision .splitAfter(_._2) // split when second has changed .map(_._1) // proceed with payload .fold(0)((acc, _) => acc + 1) // sum .mergeSubstreams .runWith(Sink.foreach(each => println(s"Elements in group: $each"))) terminateWhen(done) def terminateWhen(done: Future[_]) = { done.onComplete { case Success(_) => println("Flow Success. About to terminate...") system.terminate() case Failure(e) => println(s"Flow Failure: $e. About to terminate...") system.terminate() } } }
Example 17
Source File: SuggestedSolrField.scala From smui with Apache License 2.0 | 5 votes |
package models import java.sql.Connection import java.time.LocalDateTime import anorm.SqlParser.get import anorm._ import play.api.libs.json.{Json, OFormat} class SuggestedSolrFieldId(id: String) extends Id(id) object SuggestedSolrFieldId extends IdObject[SuggestedSolrFieldId](new SuggestedSolrFieldId(_)) case class SuggestedSolrField(id: SuggestedSolrFieldId = SuggestedSolrFieldId(), name: String) { } object SuggestedSolrField { implicit val jsonFormat: OFormat[SuggestedSolrField] = Json.format[SuggestedSolrField] val TABLE_NAME = "suggested_solr_field" val ID = "id" val NAME = "name" val SOLR_INDEX_ID = "solr_index_id" val LAST_UPDATE = "last_update" val sqlParser: RowParser[SuggestedSolrField] = { get[SuggestedSolrFieldId](s"$TABLE_NAME.$ID") ~ get[String](s"$TABLE_NAME.$NAME") map { case id ~ name => SuggestedSolrField(id, name) } } def listAll(solrIndexId: SolrIndexId)(implicit connection: Connection): List[SuggestedSolrField] = { SQL"select * from #$TABLE_NAME where #$SOLR_INDEX_ID = $solrIndexId order by #$NAME asc".as(sqlParser.*) } def insert(solrIndexId: SolrIndexId, fieldName: String)(implicit connection: Connection): SuggestedSolrField = { val field = SuggestedSolrField(SuggestedSolrFieldId(), fieldName) SQL(s"insert into $TABLE_NAME($ID, $NAME, $SOLR_INDEX_ID, $LAST_UPDATE) values ({$ID}, {$NAME}, {$SOLR_INDEX_ID}, {$LAST_UPDATE})") .on( ID -> field.id, NAME -> fieldName, SOLR_INDEX_ID -> solrIndexId, LAST_UPDATE -> LocalDateTime.now() ) .execute() field } }
Example 18
Source File: SearchManagementRepository.scala From smui with Apache License 2.0 | 5 votes |
package models import java.io.FileInputStream import java.time.LocalDateTime import java.util.UUID import java.util.Date import anorm.SqlParser.get import javax.inject.Inject import anorm._ import models.FeatureToggleModel.FeatureToggleService import models.SearchInput.ID import play.api.db.DBApi @javax.inject.Singleton class SearchManagementRepository @Inject()(dbapi: DBApi, toggleService: FeatureToggleService)(implicit ec: DatabaseExecutionContext) { private val db = dbapi.database("default") // On startup, always sync predefined tags with the DB syncPredefinedTagsWithDB() private def syncPredefinedTagsWithDB(): Unit = { db.withTransaction { implicit connection => if (toggleService.isRuleTaggingActive) { for (fileName <- toggleService.predefinedTagsFileName) { val tags = PredefinedTag.fromStream(new FileInputStream(fileName)) PredefinedTag.updateInDB(tags) } } } } def addNewSearchInput(solrIndexId: SolrIndexId, searchInputTerm: String, tags: Seq[InputTagId]): SearchInputId = db.withConnection { implicit connection => val id = SearchInput.insert(solrIndexId, searchInputTerm).id if (tags.nonEmpty) { TagInputAssociation.updateTagsForSearchInput(id, tags) } id } def getDetailedSearchInput(searchInputId: SearchInputId): Option[SearchInputWithRules] = db.withConnection { implicit connection => SearchInputWithRules.loadById(searchInputId) } def updateSearchInput(searchInput: SearchInputWithRules): Unit = db.withTransaction { implicit connection => SearchInputWithRules.update(searchInput) } def deleteSearchInput(searchInputId: String): Int = db.withTransaction { implicit connection => SearchInputWithRules.delete(SearchInputId(searchInputId)) } def listAllSuggestedSolrFields(solrIndexId: String): List[SuggestedSolrField] = db.withConnection { implicit connection => SuggestedSolrField.listAll(SolrIndexId(solrIndexId)) } def addNewSuggestedSolrField(solrIndexId: SolrIndexId, suggestedSolrFieldName: String): SuggestedSolrField = db.withConnection { implicit connection => SuggestedSolrField.insert(solrIndexId, suggestedSolrFieldName) } def addNewDeploymentLogOk(solrIndexId: String, targetPlatform: String): Boolean = db.withConnection { implicit connection => SQL("insert into deployment_log(id, solr_index_id, target_platform, last_update, result) values ({id}, {solr_index_id}, {target_platform}, {last_update}, {result})") .on( 'id -> UUID.randomUUID().toString, 'solr_index_id -> solrIndexId, 'target_platform -> targetPlatform, 'last_update -> new Date(), 'result -> 0 ) .execute() } case class DeploymentLogDetail(id: String, lastUpdate: LocalDateTime, result: Int) val sqlParserDeploymentLogDetail: RowParser[DeploymentLogDetail] = { get[String](s"deployment_log.id") ~ get[LocalDateTime](s"deployment_log.last_update") ~ get[Int](s"deployment_log.result") map { case id ~ lastUpdate ~ result => DeploymentLogDetail(id, lastUpdate, result) } } def lastDeploymentLogDetail(solrIndexId: String, targetPlatform: String): Option[DeploymentLogDetail] = db.withConnection { implicit connection => { SQL"select * from deployment_log where solr_index_id = $solrIndexId and target_platform = $targetPlatform order by last_update desc".as(sqlParserDeploymentLogDetail.*).headOption } } }
Example 19
Source File: SearchInput.scala From smui with Apache License 2.0 | 5 votes |
package models import java.sql.Connection import java.time.LocalDateTime import anorm.SqlParser.get import anorm._ class SearchInputId(id: String) extends Id(id) object SearchInputId extends IdObject[SearchInputId](new SearchInputId(_)) case class SearchInput(id: SearchInputId = SearchInputId(), solrIndexId: SolrIndexId, term: String, lastUpdate: LocalDateTime, isActive: Boolean, comment: String) { import SearchInput._ def status: Int = statusFromIsActive(isActive) def toNamedParameters: Seq[NamedParameter] = Seq( ID -> id, SOLR_INDEX_ID -> solrIndexId, TERM -> term, LAST_UPDATE -> lastUpdate, STATUS -> status, COMMENT -> comment ) } object SearchInput { val TABLE_NAME = "search_input" val ID = "id" val TERM = "term" val SOLR_INDEX_ID = "solr_index_id" val LAST_UPDATE = "last_update" val STATUS = "status" val COMMENT = "comment" def isActiveFromStatus(status: Int): Boolean = { (status & 0x01) == 0x01 } def statusFromIsActive(isActive: Boolean) = { if (isActive) 0x01 else 0x00 } val sqlParser: RowParser[SearchInput] = { get[SearchInputId](s"$TABLE_NAME.$ID") ~ get[String](s"$TABLE_NAME.$TERM") ~ get[SolrIndexId](s"$TABLE_NAME.$SOLR_INDEX_ID") ~ get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") ~ get[Int](s"$TABLE_NAME.$STATUS") ~ get[String](s"$TABLE_NAME.$COMMENT") map { case id ~ term ~ indexId ~ lastUpdate ~ status ~ comment => SearchInput(id, indexId, term, lastUpdate, isActiveFromStatus(status), comment) } } def insert(solrIndexId: SolrIndexId, term: String)(implicit connection: Connection): SearchInput = { val input = SearchInput(SearchInputId(), solrIndexId, term, LocalDateTime.now(), true, "") SQL(s"insert into $TABLE_NAME ($ID, $TERM, $SOLR_INDEX_ID, $LAST_UPDATE, $STATUS, $COMMENT) values ({$ID}, {$TERM}, {$SOLR_INDEX_ID}, {$LAST_UPDATE}, {$STATUS}, {$COMMENT})") .on(input.toNamedParameters: _*).execute() input } def loadAllForIndex(solrIndexId: SolrIndexId)(implicit connection: Connection): List[SearchInput] = { SQL"select * from #$TABLE_NAME where #$SOLR_INDEX_ID = $solrIndexId order by #$TERM asc".as(sqlParser.*) } def loadAllIdsForIndex(solrIndexId: SolrIndexId)(implicit connection: Connection): List[SearchInputId] = { SQL"select #$ID from #$TABLE_NAME where #$SOLR_INDEX_ID = $solrIndexId order by #$TERM asc".as(get[SearchInputId](ID).*) } def loadById(id: SearchInputId)(implicit connection: Connection): Option[SearchInput] = { SQL"select * from #$TABLE_NAME where #$ID = $id".as(sqlParser.*).headOption } def update(id: SearchInputId, term: String, isActive: Boolean, comment: String)(implicit connection: Connection): Unit = { SQL"update #$TABLE_NAME set #$TERM = $term, #$LAST_UPDATE = ${LocalDateTime.now()}, #$STATUS = ${statusFromIsActive(isActive)}, #$COMMENT = $comment where #$ID = $id".executeUpdate() } def delete(id: SearchInputId)(implicit connection: Connection): Int = { SQL"delete from #$TABLE_NAME where #$ID = $id".executeUpdate() } }
Example 20
Source File: InputTag.scala From smui with Apache License 2.0 | 5 votes |
package models import java.sql.Connection import java.time.LocalDateTime import anorm._ import anorm.SqlParser.get import play.api.libs.json._ class InputTagId(id: String) extends Id(id) object InputTagId extends IdObject[InputTagId](new InputTagId(_)) case class InputTag(id: InputTagId, solrIndexId: Option[SolrIndexId], property: Option[String], value: String, exported: Boolean, predefined: Boolean, lastUpdate: LocalDateTime) { import InputTag._ def toNamedParameters: Seq[NamedParameter] = Seq( ID -> id, SOLR_INDEX_ID -> solrIndexId, PROPERTY -> property, VALUE -> value, EXPORTED -> (if (exported) 1 else 0), PREDEFINED -> (if (predefined) 1 else 0), LAST_UPDATE -> lastUpdate ) def tagContent = TagContent(solrIndexId, property, value) def displayValue: String = property.map(p => s"$p:").getOrElse("") + value } object InputTag { val TABLE_NAME = "input_tag" val ID = "id" val SOLR_INDEX_ID = "solr_index_id" val PROPERTY = "property" val VALUE = "tag_value" val EXPORTED = "exported" val PREDEFINED = "predefined" val LAST_UPDATE = "last_update" implicit val jsonReads: Reads[InputTag] = Json.reads[InputTag] private val defaultWrites: OWrites[InputTag] = Json.writes[InputTag] implicit val jsonWrites: OWrites[InputTag] = OWrites[InputTag] { tag => Json.obj("displayValue" -> tag.displayValue) ++ defaultWrites.writes(tag) } def create(solrIndexId: Option[SolrIndexId], property: Option[String], value: String, exported: Boolean, predefined: Boolean = false): InputTag = { InputTag(InputTagId(), solrIndexId, property, value, exported, predefined, LocalDateTime.now()) } val sqlParser: RowParser[InputTag] = get[InputTagId](s"$TABLE_NAME.$ID") ~ get[Option[SolrIndexId]](s"$TABLE_NAME.$SOLR_INDEX_ID") ~ get[Option[String]](s"$TABLE_NAME.$PROPERTY") ~ get[String](s"$TABLE_NAME.$VALUE") ~ get[Int](s"$TABLE_NAME.$EXPORTED") ~ get[Int](s"$TABLE_NAME.$PREDEFINED") ~ get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { case id ~ solrIndexId ~ property ~ value ~ exported ~ predefined ~ lastUpdate => InputTag(id, solrIndexId, property, value, exported > 0, predefined > 0, lastUpdate) } def insert(tags: InputTag*)(implicit connection: Connection): Unit = { if (tags.nonEmpty) { BatchSql(s"insert into $TABLE_NAME ($ID, $SOLR_INDEX_ID, $PROPERTY, $VALUE, $EXPORTED, $PREDEFINED, $LAST_UPDATE) " + s"values ({$ID}, {$SOLR_INDEX_ID}, {$PROPERTY}, {$VALUE}, {$EXPORTED}, {$PREDEFINED}, {$LAST_UPDATE})", tags.head.toNamedParameters, tags.tail.map(_.toNamedParameters): _* ).execute() } } def loadAll()(implicit connection: Connection): Seq[InputTag] = { SQL(s"select * from $TABLE_NAME order by $PROPERTY asc, $VALUE asc") .as(sqlParser.*) } def deleteByIds(ids: Seq[InputTagId])(implicit connection: Connection): Unit = { for (idGroup <- ids.grouped(100)) { SQL"delete from #$TABLE_NAME where #$ID in ($idGroup)".executeUpdate() } } }
Example 21
Source File: DBCompatibilitySpec.scala From smui with Apache License 2.0 | 5 votes |
package models import java.time.LocalDateTime import models.rules._ import org.scalatest.{FlatSpec, Matchers} import play.api.db.Database abstract class DBCompatibilitySpec extends FlatSpec with Matchers with TestData { protected def db: Database // Set millis/nanos of second to 0 since MySQL does not save them // and so comparisons would fail if they were set private val now = LocalDateTime.now().withNano(0) "Most important DB queries" should "work using this database" in { db.withConnection { implicit conn => SolrIndex.insert(indexDe) SolrIndex.loadNameById(indexDe.id) shouldBe indexDe.name SolrIndex.listAll shouldBe Seq(indexDe) val tag = InputTag(InputTagId(), Some(indexDe.id), Some("testProperty"), "testValue", exported = true, predefined = false, now) InputTag.insert(tag) InputTag.loadAll() shouldBe Seq(tag) val input = SearchInput.insert(indexDe.id, "test") val inputWithRules = SearchInputWithRules(input.id, input.term, List(SynonymRule(SynonymRuleId(), SynonymRule.TYPE_UNDIRECTED, "testSynonym", isActive = true)), List(UpDownRule(UpDownRuleId(), UpDownRule.TYPE_UP, 5, "upDownTerm", isActive = true)), List(FilterRule(FilterRuleId(), "filterTerm", isActive = true)), List(DeleteRule(DeleteRuleId(), "deleteTerm", isActive = true)), List(RedirectRule(RedirectRuleId(), "/testTarget", isActive = true)), List(tag), true, "Some search input comment." ) SearchInputWithRules.update(inputWithRules) SearchInputWithRules.loadById(input.id) shouldBe Some(inputWithRules) SearchInputWithRules.loadWithUndirectedSynonymsAndTagsForSolrIndexId(indexDe.id) shouldBe Seq( inputWithRules.copy(upDownRules = Nil, filterRules = Nil, deleteRules = Nil, redirectRules = Nil) ) SearchInputWithRules.delete(input.id) SearchInputWithRules.loadById(input.id) shouldBe None val field1 = SuggestedSolrField.insert(indexDe.id, "title") val field2 = SuggestedSolrField.insert(indexDe.id, "description") SuggestedSolrField.listAll(indexDe.id).toSet shouldBe Set(field1, field2) InputTag.deleteByIds(Seq(tag.id)) InputTag.loadAll() shouldBe Nil } } }
Example 22
Source File: DBSpec.scala From dr-cla with BSD 3-Clause "New" or "Revised" License | 5 votes |
package utils import java.time.LocalDateTime import models.{ClaSignature, Contact} import modules.Database import org.flywaydb.play.PlayInitializer import org.scalatestplus.play.PlaySpec import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.inject.guice.GuiceApplicationBuilder import play.api.test.Helpers._ import scala.concurrent.ExecutionContext.Implicits.global import scala.util.Try class DBSpec extends PlaySpec with GuiceOneAppPerSuite { val dbUrl = sys.env.getOrElse("DATABASE_URL", "postgres://salesforcecla:password@localhost:5432/salesforcecla-test") val testConfig = Map("db.default.url" -> dbUrl) implicit override def fakeApplication() = new GuiceApplicationBuilder().configure(testConfig).build() lazy val database = app.injector.instanceOf[Database] lazy val db = app.injector.instanceOf[DB] lazy val playIntializer = app.injector.instanceOf[PlayInitializer] Try(await(database.ctx.executeQuery("drop schema salesforce cascade"))) Try(await(database.ctx.executeQuery("drop table schema_version"))) playIntializer.onStart() "Contact" must { "be creatable" in { val contact = await(db.createContact(Contact(-1, Some("foo"), "bar", "[email protected]", "foobar"))) contact.id must not equal -1 } "be creatable with null firstname" in { val contact = await(db.createContact(Contact(-1, None, "blah", "[email protected]", "blah"))) contact.id must not equal -1 } "be able to get one that exists by the gitHubId" in { val contact = await(db.findContactByGitHubId("foobar")) contact mustBe 'defined } "fail to get one that doesn't exist by a gitHubId" in { val contact = await(db.findContactByGitHubId("asdf")) contact mustBe None } "work with null firstname" in { val contact = await(db.findContactByGitHubId("blah")) contact mustBe 'defined contact.get.firstName mustBe empty } } "ClaSignature" must { "be creatable" in { val contact = Contact(-1, Some("foo"), "bar", "[email protected]", "foobar") val claSignature = await(db.createClaSignature(ClaSignature(-1, contact.gitHubId, LocalDateTime.now(), "0.0.0"))) claSignature.id must not equal -1 } "be queryable with one github id" in { val claSignatures = await(db.findClaSignaturesByGitHubIds(Set(GitHub.User("foobar")))) claSignatures.size mustEqual 1 claSignatures.head.contactGitHubId mustEqual "foobar" } "be queryable with a set of github ids" in { val claSignatures = await(db.findClaSignaturesByGitHubIds(Set(GitHub.User("foobar"), GitHub.User("jondoe")))) claSignatures.size mustEqual 1 claSignatures.head.contactGitHubId mustEqual "foobar" } } "Contact.fullNameToFirstAndLast" must { "work with no names" in { Contact.fullNameToFirstAndLast("") must equal (None, None) } "work with one name" in { Contact.fullNameToFirstAndLast("Foo") must equal (None, Some("Foo")) } "work with two names" in { Contact.fullNameToFirstAndLast("Foo Bar") must equal (Some("Foo"), Some("Bar")) } "work with three names" in { Contact.fullNameToFirstAndLast("Foo Baz Bar") must equal (Some("Foo Baz"), Some("Bar")) } } }
Example 23
Source File: CassandraStorage.scala From graphsense-transformation with MIT License | 5 votes |
package at.ac.ait.storage import com.datastax.spark.connector.rdd.ValidRDDType import com.datastax.spark.connector.rdd.reader.RowReaderFactory import com.datastax.spark.connector.writer.{RowWriterFactory} import java.time.LocalDateTime import java.time.format.DateTimeFormatter import org.apache.spark.sql.{Dataset, Encoder, SparkSession} import scala.reflect.ClassTag import at.ac.ait.Util._ class CassandraStorage(spark: SparkSession) { import spark.implicits._ import com.datastax.spark.connector._ def load[T <: Product: ClassTag: RowReaderFactory: ValidRDDType: Encoder]( keyspace: String, tableName: String, columns: ColumnRef* ) = { spark.sparkContext.setJobDescription(s"Loading table ${tableName}") val table = spark.sparkContext.cassandraTable[T](keyspace, tableName) if (columns.isEmpty) table.toDS().as[T] else table.select(columns: _*).toDS().as[T] } def store[T <: Product: RowWriterFactory]( keyspace: String, tableName: String, df: Dataset[T] ) = { spark.sparkContext.setJobDescription(s"Writing table ${tableName}") val dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") val timestamp = LocalDateTime.now().format(dtf) println(s"[$timestamp] Writing table ${tableName}") time { df.rdd.saveToCassandra(keyspace, tableName) } } }
Example 24
Source File: Sessionize.scala From Mastering-Scala-Machine-Learning with MIT License | 5 votes |
package org.akozlov.chapter06 import java.io._ import java.time.ZoneOffset import java.time.LocalDateTime import java.time.format.DateTimeFormatter import org.apache.spark.{SparkConf,SparkContext} import org.apache.spark.storage.StorageLevel object Sessionize extends App { val sc = new SparkContext("local[8]", "Sessionize", new SparkConf()) val checkoutPattern = ".*>checkout.*".r.pattern // a basic page view structure case class PageView(ts: String, path: String) extends Serializable with Ordered[PageView] { override def toString: String = { s"($ts #$path)" } def compare(other: PageView) = ts compare other.ts } // represent a session case class Session[A <: PageView](id: String, visits: Seq[A]) extends Serializable { override def toString: String = { val vsts = visits.mkString("[", ",", "]") s"($id -> $vsts)" } } def toEpochSeconds(str: String) = { LocalDateTime.parse(str, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")).toEpochSecond(ZoneOffset.UTC) } val sessions = sc.textFile("data/clickstream") .map(line => {val parts = line.split("\t"); (parts(4), new PageView(parts(0), parts(20)))}) .groupByKey.map(x => { new Session(x._1, x._2.toSeq.sorted) } ) .cache // sessions.take(100).foreach(println) def findAllCheckoutSessions(s: Session[PageView]) = { s.visits.tails.filter { _ match { case PageView(ts1, "mycompanycom>homepage") :: PageView(ts2, page) :: tail if (page != "mycompanycom>homepage" ) => true; case _ => false } } .foldLeft(Seq[Session[PageView]]()) { case (r, x) => { x.find(y => checkoutPattern.matcher(y.path).matches) match { case Some(checkout) if (toEpochSeconds(checkout.ts) > toEpochSeconds(x.head.ts) + 60) => r.:+(new Session(s.id, x.slice(0, x.indexOf(checkout)))) case _ => r } } } } val prodLandingSessions = sessions.flatMap(findAllCheckoutSessions) prodLandingSessions.collect.foreach(println) sc.stop() }
Example 25
Source File: RootController.scala From spring-boot-scala-example with Apache License 2.0 | 5 votes |
package spring.boot.scala.example.controller import java.time.LocalDateTime import io.micrometer.core.annotation.Timed import org.springframework.beans.factory.annotation.Value import org.springframework.web.bind.annotation.RequestMethod.GET import org.springframework.web.bind.annotation.{RequestMapping, RestController} @RestController class RootController { @Value("${application.name}") val appName: String = null @RequestMapping(path = Array("/"), method = Array(GET)) @Timed def root(): Map[String, Any] = { Map("name" -> appName, "java.version" -> System.getProperty("java.version"), "now" -> LocalDateTime.now()) } }
Example 26
Source File: InstantModule.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.dataformats import java.time.format.{DateTimeFormatter, DateTimeParseException} import java.time.temporal.{TemporalAccessor, TemporalQuery} import java.time.{Instant, LocalDateTime, ZoneOffset} import com.fasterxml.jackson.core.JsonParser import com.fasterxml.jackson.databind.module.SimpleModule import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer} class InstantModule extends SimpleModule { this.addDeserializer[Instant](classOf[Instant], new MilanInstantDeserializer) } class MilanInstantDeserializer extends JsonDeserializer[Instant] { private val formatsToTry = List( DateTimeFormatter.ISO_INSTANT, DateTimeFormatter.ISO_DATE_TIME, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"), DateTimeFormatter.ISO_DATE) override def deserialize(parser: JsonParser, context: DeserializationContext): Instant = { val textValue = parser.getText this.parseInstant(textValue) } private val createInstant = new TemporalQuery[Instant] { override def queryFrom(temporal: TemporalAccessor): Instant = LocalDateTime.from(temporal).toInstant(ZoneOffset.UTC) } private def parseInstant(dateTimeString: String): Instant = { // Try a bunch of formats. // TODO: This is awful but will do for now. formatsToTry.map(formatter => this.tryParseFormat(dateTimeString, formatter)) .filter(_.isDefined) .map(_.get) .headOption match { case Some(instant) => instant case None => throw new DateTimeParseException(s"Unable to parse datetime string '$dateTimeString'.", dateTimeString, 0) } } private def tryParseFormat(dateTimeString: String, formatter: DateTimeFormatter): Option[Instant] = { try { Some(formatter.parse(dateTimeString, this.createInstant)) } catch { case _: DateTimeParseException => None } } }
Example 27
Source File: CommandsTest.scala From endpoints4s with MIT License | 5 votes |
package cqrs.commands import java.time.{LocalDateTime, OffsetDateTime, ZoneOffset} import java.util.UUID import org.scalatest.BeforeAndAfterAll import endpoints4s.play.client.{Endpoints, JsonEntitiesFromCodecs} import endpoints4s.play.server.PlayComponents import play.api.Mode import play.api.libs.ws.ahc.{AhcWSClient, AhcWSClientConfig} import play.core.server.{NettyServer, ServerConfig} import scala.concurrent.Future import scala.math.BigDecimal import org.scalatest.freespec.AsyncFreeSpec class CommandsTest extends AsyncFreeSpec with BeforeAndAfterAll { private val server = NettyServer.fromRouterWithComponents(ServerConfig(mode = Mode.Test)) { components => new Commands(PlayComponents.fromBuiltInComponents(components)).routes } val app = server.applicationProvider.get.get import app.materializer private val wsClient = AhcWSClient(AhcWSClientConfig()) object client extends Endpoints("http://localhost:9000", wsClient) with JsonEntitiesFromCodecs with CommandsEndpoints override def afterAll(): Unit = { server.stop() wsClient.close() } "Commands" - { val arbitraryDate = OffsetDateTime .of(LocalDateTime.of(2017, 1, 8, 12, 34, 56), ZoneOffset.UTC) .toInstant val arbitraryValue = BigDecimal(10) "create a new meter" in { client.command(CreateMeter("electricity")).map { maybeEvent => assert(maybeEvent.collect { case StoredEvent(_, MeterCreated(_, "electricity")) => () }.nonEmpty) } } "create a meter and add readings to it" in { for { maybeCreatedEvent <- client.command(CreateMeter("water")) id <- maybeCreatedEvent .collect { case StoredEvent(_, MeterCreated(id, _)) => id } .fold[Future[UUID]](Future.failed(new NoSuchElementException))( Future.successful ) maybeAddedEvent <- client.command( AddRecord(id, arbitraryDate, arbitraryValue) ) _ <- maybeAddedEvent .collect { case StoredEvent( _, RecordAdded(`id`, `arbitraryDate`, `arbitraryValue`) ) => () } .fold[Future[Unit]](Future.failed(new NoSuchElementException))( Future.successful ) } yield assert(true) } } }
Example 28
Source File: JsonJacksonMarshallerTest.scala From wix-http-testkit with MIT License | 5 votes |
package com.wix.e2e.http.json import java.time.LocalDateTime import java.util.Optional import com.fasterxml.jackson.databind.ObjectMapper import com.wix.e2e.http.api.Marshaller import com.wix.e2e.http.json.MarshallingTestObjects.SomeCaseClass import com.wix.test.random._ import org.joda.time.DateTimeZone.UTC import org.joda.time.{DateTime, DateTimeZone} import org.specs2.mutable.Spec import org.specs2.specification.Scope class JsonJacksonMarshallerTest extends Spec { trait ctx extends Scope { val someStr = randomStr val javaDateTime = LocalDateTime.now() val someCaseClass = SomeCaseClass(randomStr, randomInt) val dateTime = new DateTime val dateTimeUTC = new DateTime(UTC) val marshaller: Marshaller = new JsonJacksonMarshaller } "JsonJacksonMarshaller" should { "marshall scala option properly" in new ctx { marshaller.unmarshall[Option[String]]( marshaller.marshall( Some(someStr) ) ) must beSome(someStr) } "marshall scala case classes properly" in new ctx { marshaller.unmarshall[SomeCaseClass]( marshaller.marshall( someCaseClass ) ) must_=== someCaseClass } "marshall datetime without zone" in new ctx { marshaller.unmarshall[DateTime]( marshaller.marshall( dateTime.withZone(DateTimeZone.getDefault) ) ) must_=== dateTime.withZone(UTC) } "marshall date time to textual format in UTC" in new ctx { marshaller.marshall( dateTime ) must contain(dateTime.withZone(UTC).toString) } "marshall java.time objects" in new ctx { marshaller.unmarshall[LocalDateTime]( marshaller.marshall( javaDateTime ) ) must_=== javaDateTime } "marshall java 8 Optional" in new ctx { marshaller.unmarshall[Optional[DateTime]]( marshaller.marshall( dateTimeUTC ) ) must_=== Optional.of(dateTimeUTC) marshaller.unmarshall[Optional[SomeCaseClass]]( marshaller.marshall( someCaseClass ) ) must_=== Optional.of(someCaseClass) } "expose jackson object mapper to allow external configuration" in new ctx { marshaller.asInstanceOf[JsonJacksonMarshaller].configure must beAnInstanceOf[ObjectMapper] } } } object MarshallingTestObjects { case class SomeCaseClass(s: String, i: Int) }
Example 29
Source File: VersionGeneratorTest.scala From slick-repo with MIT License | 5 votes |
package com.byteslounge.slickrepo.version import java.time.{Instant, LocalDateTime} import com.byteslounge.slickrepo.datetime.{DateTimeHelper, MockDateTimeHelper} import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} class VersionGeneratorTest extends FlatSpec with Matchers with BeforeAndAfter { before { MockDateTimeHelper.start() MockDateTimeHelper.mock( Instant.parse("2016-01-03T01:01:02Z") ) } "The Integer Version Generator" should "generate the integer initial value" in { VersionGenerator.intVersionGenerator.initialVersion() should equal(1) } it should "generate the next integer value" in { VersionGenerator.intVersionGenerator.nextVersion(1) should equal(2) } "The Long Version Generator" should "generate the long initial value" in { VersionGenerator.longVersionGenerator.initialVersion() should equal(1L) } it should "generate the next long value" in { VersionGenerator.longVersionGenerator.nextVersion(1L) should equal(2L) } "The Instant Version Generator" should "generate the instant initial value" in { VersionGenerator.instantVersionGenerator.initialVersion() should equal(InstantVersion(Instant.parse("2016-01-03T01:01:02Z"))) } it should "generate the next instant value" in { VersionGenerator.instantVersionGenerator.nextVersion(InstantVersion(Instant.parse("2016-01-01T01:00:02.112Z"))) should equal(InstantVersion(Instant.parse("2016-01-03T01:01:02Z"))) } "The LongInstant Version Generator" should "generate the LongInstant initial value" in { VersionGenerator.longInstantVersionGenerator.initialVersion() should equal(LongInstantVersion(Instant.parse("2016-01-03T01:01:02Z"))) } it should "generate the next LongInstant value" in { VersionGenerator.longInstantVersionGenerator.nextVersion(LongInstantVersion(Instant.parse("2016-01-01T01:00:02.112Z"))) should equal(LongInstantVersion(Instant.parse("2016-01-03T01:01:02Z"))) } "The LocalDateTime Version Generator" should "generate the LocalDateTime initial value" in { VersionGenerator.localDateTimeVersionGenerator.initialVersion() should equal(LocalDateTimeVersion(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z")))) } it should "generate the next LocalDateTime value" in { VersionGenerator.localDateTimeVersionGenerator.nextVersion(LocalDateTimeVersion(instantToLocalDateTime(Instant.parse("2016-01-01T01:00:02.112Z")))) should equal(LocalDateTimeVersion(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z")))) } "The LongLocalDateTime Version Generator" should "generate the LongLocalDateTime initial value" in { VersionGenerator.longLocalDateTimeVersionGenerator.initialVersion() should equal(LongLocalDateTimeVersion(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z")))) } it should "generate the next LocalDateTime value" in { VersionGenerator.longLocalDateTimeVersionGenerator.nextVersion(LongLocalDateTimeVersion(instantToLocalDateTime(Instant.parse("2016-01-01T01:00:02.112Z")))) should equal(LongLocalDateTimeVersion(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z")))) } private def instantToLocalDateTime(instant: Instant): LocalDateTime = { LocalDateTime.ofInstant(instant, DateTimeHelper.localDateTimeZone) } }
Example 30
Source File: LongLocalDateTimeVersionedRepositoryTest.scala From slick-repo with MIT License | 5 votes |
package com.byteslounge.slickrepo.test import java.time.{Instant, LocalDateTime} import com.byteslounge.slickrepo.datetime.{DateTimeHelper, MockDateTimeHelper} import com.byteslounge.slickrepo.exception.OptimisticLockException import com.byteslounge.slickrepo.repository.TestLongLocalDateTimeVersionedEntity abstract class LongLocalDateTimeVersionedRepositoryTest(override val config: Config) extends AbstractRepositoryTest(config) { override def prepareTest() { MockDateTimeHelper.start() MockDateTimeHelper.mock( Instant.parse("2016-01-03T01:01:02Z"), Instant.parse("2016-01-04T01:01:05Z"), Instant.parse("2016-01-05T01:01:07Z") ) } "The LongLocalDateTime Versioned Repository" should "save an entity (manual pk) with an initial LongLocalDateTime version field value" in { import scala.concurrent.ExecutionContext.Implicits.global val entity: TestLongLocalDateTimeVersionedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.save(TestLongLocalDateTimeVersionedEntity(Option(1), 2, None))) entity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) val readEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) } it should "update an entity (manual pk) incrementing the LongLocalDateTime version field value" in { import scala.concurrent.ExecutionContext.Implicits.global val entity: TestLongLocalDateTimeVersionedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.save(TestLongLocalDateTimeVersionedEntity(Option(1), 2, None))) val readEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) val updatedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.update(readEntity.copy(price = 3))) updatedEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) val readUpdatedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readUpdatedEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) } it should "updating an LongLocalDateTime versioned entity (manual pk) that was meanwhile updated by other process throws exception" in { val exception = intercept[OptimisticLockException] { import scala.concurrent.ExecutionContext.Implicits.global val entity: TestLongLocalDateTimeVersionedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.save(TestLongLocalDateTimeVersionedEntity(Option(1), 2, None))) val readEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) val updatedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.update(readEntity.copy(price = 3))) updatedEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) executeAction(testLongLocalDateTimeVersionedEntityRepository.update(readEntity.copy(price = 4))) } exception.getMessage should equal("Failed to update entity of type com.byteslounge.slickrepo.repository.TestLongLocalDateTimeVersionedEntity. Expected version was not found: 2016-01-03T01:01:02") } it should "perform a batch insert of LongLocalDateTime versioned entities" in { import scala.concurrent.ExecutionContext.Implicits.global val batchInsertAction = testLongLocalDateTimeVersionedEntityRepository.batchInsert( Seq(TestLongLocalDateTimeVersionedEntity(Option(1), 2.2, None), TestLongLocalDateTimeVersionedEntity(Option(2), 3.3, None), TestLongLocalDateTimeVersionedEntity(Option(3), 4.4, None)) ) batchInsertAction.getClass.getName.contains("MultiInsertAction") should equal(true) val rowCount = executeAction(batchInsertAction) assertBatchInsertResult(rowCount) val entity1: TestLongLocalDateTimeVersionedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(1)).get val entity2: TestLongLocalDateTimeVersionedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(2)).get val entity3: TestLongLocalDateTimeVersionedEntity = executeAction(testLongLocalDateTimeVersionedEntityRepository.findOne(3)).get entity1.price should equal(2.2) entity1.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) entity2.price should equal(3.3) entity2.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) entity3.price should equal(4.4) entity3.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-05T01:01:07Z"))) } private def instantToLocalDateTime(instant: Instant): LocalDateTime = { LocalDateTime.ofInstant(instant, DateTimeHelper.localDateTimeZone) } }
Example 31
Source File: LocalDateTimeVersionedRepositoryTest.scala From slick-repo with MIT License | 5 votes |
package com.byteslounge.slickrepo.test import java.time.{Instant, LocalDateTime} import com.byteslounge.slickrepo.datetime.{DateTimeHelper, MockDateTimeHelper} import com.byteslounge.slickrepo.exception.OptimisticLockException import com.byteslounge.slickrepo.repository.TestLocalDateTimeVersionedEntity abstract class LocalDateTimeVersionedRepositoryTest(override val config: Config) extends AbstractRepositoryTest(config) { override def prepareTest() { MockDateTimeHelper.start() MockDateTimeHelper.mock( Instant.parse("2016-01-03T01:01:02Z"), Instant.parse("2016-01-04T01:01:05Z"), Instant.parse("2016-01-05T01:01:07Z") ) } "The LocalDateTime Versioned Repository" should "save an entity (manual pk) with an initial LocalDateTime version field value" in { import scala.concurrent.ExecutionContext.Implicits.global val entity: TestLocalDateTimeVersionedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.save(TestLocalDateTimeVersionedEntity(Option(1), 2, None))) entity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) val readEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) } it should "update an entity (manual pk) incrementing the LocalDateTime version field value" in { import scala.concurrent.ExecutionContext.Implicits.global val entity: TestLocalDateTimeVersionedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.save(TestLocalDateTimeVersionedEntity(Option(1), 2, None))) val readEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) val updatedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.update(readEntity.copy(price = 3))) updatedEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) val readUpdatedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readUpdatedEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) } it should "updating an LocalDateTime versioned entity (manual pk) that was meanwhile updated by other process throws exception" in { val exception = intercept[OptimisticLockException] { import scala.concurrent.ExecutionContext.Implicits.global val entity: TestLocalDateTimeVersionedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.save(TestLocalDateTimeVersionedEntity(Option(1), 2, None))) val readEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(entity.id.get)).get readEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) val updatedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.update(readEntity.copy(price = 3))) updatedEntity.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) executeAction(testLocalDateTimeVersionedEntityRepository.update(readEntity.copy(price = 4))) } exception.getMessage should equal("Failed to update entity of type com.byteslounge.slickrepo.repository.TestLocalDateTimeVersionedEntity. Expected version was not found: 2016-01-03T01:01:02") } it should "perform a batch insert of LocalDateTime versioned entities" in { import scala.concurrent.ExecutionContext.Implicits.global val batchInsertAction = testLocalDateTimeVersionedEntityRepository.batchInsert( Seq(TestLocalDateTimeVersionedEntity(Option(1), 2.2, None), TestLocalDateTimeVersionedEntity(Option(2), 3.3, None), TestLocalDateTimeVersionedEntity(Option(3), 4.4, None)) ) batchInsertAction.getClass.getName.contains("MultiInsertAction") should equal(true) val rowCount = executeAction(batchInsertAction) assertBatchInsertResult(rowCount) val entity1: TestLocalDateTimeVersionedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(1)).get val entity2: TestLocalDateTimeVersionedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(2)).get val entity3: TestLocalDateTimeVersionedEntity = executeAction(testLocalDateTimeVersionedEntityRepository.findOne(3)).get entity1.price should equal(2.2) entity1.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-03T01:01:02Z"))) entity2.price should equal(3.3) entity2.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-04T01:01:05Z"))) entity3.price should equal(4.4) entity3.version.get.localDateTime should equal(instantToLocalDateTime(Instant.parse("2016-01-05T01:01:07Z"))) } private def instantToLocalDateTime(instant: Instant): LocalDateTime = { LocalDateTime.ofInstant(instant, DateTimeHelper.localDateTimeZone) } }
Example 32
Source File: DateTimeHelperTest.scala From slick-repo with MIT License | 5 votes |
package com.byteslounge.slickrepo.datetime import java.time.{Instant, LocalDateTime} import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} class DateTimeHelperTest extends FlatSpec with Matchers with BeforeAndAfter { before { MockDateTimeHelper.restore() } "The DateTimeHelper" should "return the current instant" in { val now: Instant = Instant.now() val currentInstant: Instant = DateTimeHelper.currentInstant currentInstant.toEpochMilli should be >= now.toEpochMilli } it should "return the current LocalDateTime" in { val now: Instant = Instant.now() val currentLocalDateTime: LocalDateTime = DateTimeHelper.currentLocalDateTime currentLocalDateTime.atZone(DateTimeHelper.localDateTimeZone).toInstant.toEpochMilli should be >= now.toEpochMilli } }
Example 33
Source File: MockDateTimeHelper.scala From slick-repo with MIT License | 5 votes |
package com.byteslounge.slickrepo.datetime import java.time.{Instant, LocalDateTime} object MockDateTimeHelper { def start(): Unit = { MockDateTimeProvider.reset() DateTimeHelper.setDateTimeProvider(MockDateTimeProvider) } def mock(instants: Instant*): Unit = { MockDateTimeProvider.mock(instants) } def restore(): Unit = { DateTimeHelper.restore() } } private object MockDateTimeProvider extends DateTimeProvider { var instants: Seq[Instant] = _ def reset(): Unit = { instants = Seq() } override def currentInstant: Instant = { val result: Instant = instants.head instants = instants.tail result } override def currentLocalDateTime: LocalDateTime = { LocalDateTime.ofInstant(currentInstant, DateTimeHelper.localDateTimeZone) } def mock(instants: Seq[Instant]): Unit = { instants.foreach(instant => this.instants = this.instants :+ instant) } }
Example 34
Source File: JsonFormatterTests.scala From play-json-extra with Apache License 2.0 | 5 votes |
package play.json.extra import java.time.LocalDateTime import org.scalatest.FunSuite import play.api.libs.json.{JsObject, Json} class JsonFormatterTests extends FunSuite { @JsonFormat final case class TestDataTime(dt: LocalDateTime, children: List[TestDataTime] = Nil) @JsonFormat final case class DefaultTest(@key("ok1") a1: Int = 1, @key("ok2") a2: Int = 2, @key("ok3") a3: Int = 3, @key("ok4") a4: Int = 4, @key("ok5") a5: Int = 5, @key("ok6") a6: Int = 6, @key("ok7") a7: Int = 7, @key("ok8") a8: Int = 8, @key("ok9") a9: Int = 9, @key("ok10") a10: Int = 10, @key("ok11") a11: Int = 11, @key("ok12") a12: Int = 12, @key("ok13") a13: Int = 13, @key("ok14") a14: Int = 14, @key("ok15") a15: Int = 15, @key("ok16") a16: Int = 16, @key("ok17") a17: Int = 17, @key("ok18") a18: Int = 18, @key("ok19") a19: Int = 19, @key("ok20") a20: Int = 20, @key("ok21") a21: Int = 21, @key("ok22") a22: Int = 22, @key("ok23") a23: Int = 23, @key("ok24") a24: Int = 24, @key("ok25") a25: Int = 25, @key("ok26") a26: Int = 26, @key("ok27") a27: Int = 27, @key("ok28") a28: Int = 28, @key("ok29") a29: Int = 29 ) test("basic datetime test") { val dt = LocalDateTime.of(2015, 8, 11, 12, 1, 2, 3) val ts = TestDataTime(dt) val json = Json.toJson(ts) // println(Json.stringify(json)) assert(Json.stringify(json) === """{"dt":"2015-08-11T12:01:02.000000003","children":[]}""") assert(json.as[TestDataTime].dt === dt) } test("default test key + >21 fields + des/ser") { val ts = DefaultTest() val json = Json.toJson(ts).as[JsObject] // println(Json.stringify(json)) assert(json.fields.length === 29) val fieldNames=List( "ok1", "ok2", "ok3", "ok4", "ok5", "ok6", "ok7", "ok8", "ok9", "ok10", "ok10", "ok11", "ok12", "ok13", "ok14", "ok15", "ok16", "ok17", "ok18", "ok19", "ok20", "ok21", "ok22", "ok23", "ok24", "ok25", "ok26", "ok27", "ok28", "ok29" ) fieldNames.foreach(value => assert(json.fields.map(_._1).contains(value) === true)) val json2=Json.parse("""{"ok27":27,"ok16":16,"ok4":4,"ok25":25,"ok14":14,"ok10":10,"ok17":17,"ok21":21,"ok24":24,"ok28":28,"ok3":3,"ok20":20,"ok15":15,"ok7":7,"ok11":11,"ok1":1,"ok23":23,"ok12":12,"ok8":8,"ok29":29,"ok19":19,"ok2":2,"ok5":5,"ok26":26,"ok13":13,"ok18":18,"ok9":9,"ok6":6,"ok22":22}""") val ts2 = json2.as[DefaultTest] assert(ts2.a29 === 29) } }
Example 35
Source File: Timestamp.scala From scala-steward with Apache License 2.0 | 5 votes |
package org.scalasteward.core.util import cats.Order import cats.implicits._ import io.circe.Codec import io.circe.generic.extras.semiauto.deriveUnwrappedCodec import java.time.{Instant, LocalDateTime, ZoneOffset} import java.util.concurrent.TimeUnit import scala.concurrent.duration.FiniteDuration final case class Timestamp(millis: Long) { def +(finiteDuration: FiniteDuration): Timestamp = Timestamp(millis + finiteDuration.toMillis) def toLocalDateTime: LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC) def until(that: Timestamp): FiniteDuration = FiniteDuration(that.millis - millis, TimeUnit.MILLISECONDS) } object Timestamp { def fromLocalDateTime(ldt: LocalDateTime): Timestamp = Timestamp(ldt.toInstant(ZoneOffset.UTC).toEpochMilli) implicit val timestampCodec: Codec[Timestamp] = deriveUnwrappedCodec implicit val timestampOrder: Order[Timestamp] = Order.by(_.millis) }
Example 36
Source File: HelloWorldEntity.scala From lagom with Apache License 2.0 | 5 votes |
package com.example.helloworld.impl import java.time.LocalDateTime import akka.Done import com.lightbend.lagom.scaladsl.persistence.AggregateEventShards import com.lightbend.lagom.scaladsl.persistence.{ PersistentEntity, AggregateEventTag, AggregateEvent } import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.ReplyType import com.lightbend.lagom.scaladsl.playjson.{ JsonSerializer, JsonSerializerRegistry } import play.api.libs.json.{ Format, Json } import scala.collection.immutable.Seq class HelloWorldEntity extends PersistentEntity { override type Command = HelloWorldCommand[_] override type Event = HelloWorldEvent override type State = HelloWorldState override def initialState: HelloWorldState = HelloWorldState("Hello", LocalDateTime.now.toString) override def behavior: Behavior = { case HelloWorldState(message, _) => Actions().onCommand[UseGreetingMessage, Done] { case (UseGreetingMessage(newMessage), ctx, state) => ctx.thenPersist( GreetingMessageChanged(entityId, newMessage) ) { _ => ctx.reply(Done) } }.onReadOnlyCommand[Hello, String] { case (Hello(name), ctx, state) => ctx.reply(s"$message, $name!") }.onEvent { case (GreetingMessageChanged(_, newMessage), state) => HelloWorldState(newMessage, LocalDateTime.now().toString) } } } case class HelloWorldState(message: String, timestamp: String) object HelloWorldState { implicit val format: Format[HelloWorldState] = Json.format } sealed trait HelloWorldEvent extends AggregateEvent[HelloWorldEvent] { override def aggregateTag: AggregateEventShards[HelloWorldEvent] = HelloWorldEvent.Tag } object HelloWorldEvent { val NumShards = 4 val Tag = AggregateEventTag.sharded[HelloWorldEvent](NumShards) } case class GreetingMessageChanged(id:String, message: String) extends HelloWorldEvent object GreetingMessageChanged { implicit val format: Format[GreetingMessageChanged] = Json.format } sealed trait HelloWorldCommand[R] extends ReplyType[R] case class UseGreetingMessage(message: String) extends HelloWorldCommand[Done] object UseGreetingMessage { implicit val format: Format[UseGreetingMessage] = Json.format } case class Hello(name: String) extends HelloWorldCommand[String] object Hello { implicit val format: Format[Hello] = Json.format } object HelloWorldSerializerRegistry extends JsonSerializerRegistry { override def serializers: Seq[JsonSerializer[_]] = Seq( JsonSerializer[UseGreetingMessage], JsonSerializer[Hello], JsonSerializer[GreetingMessageChanged], JsonSerializer[HelloWorldState] ) }
Example 37
Source File: HelloEntity.scala From lagom with Apache License 2.0 | 5 votes |
package docs.scaladsl.mb import java.time.LocalDateTime import akka.Done import com.lightbend.lagom.scaladsl.persistence.AggregateEvent import com.lightbend.lagom.scaladsl.persistence.AggregateEventTag import com.lightbend.lagom.scaladsl.persistence.AggregateEventTagger import com.lightbend.lagom.scaladsl.persistence.PersistentEntity import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.ReplyType import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import com.lightbend.lagom.scaladsl.playjson.JsonSerializer import play.api.libs.json.Format import play.api.libs.json.Json import scala.collection.immutable.Seq class HelloEntity extends PersistentEntity { override type Command = HelloCommand[_] override type Event = HelloEvent override type State = HelloState override def initialState: HelloState = HelloState("Hello", LocalDateTime.now.toString) override def behavior: Behavior = { case HelloState(message, _) => Actions() .onCommand[UseGreetingMessage, Done] { // Command handler for the UseGreetingMessage command case (UseGreetingMessage(newMessage), ctx, state) => // In response to this command, we want to first persist it as a // GreetingMessageChanged event ctx.thenPersist(GreetingMessageChanged(newMessage)) { // Then once the event is successfully persisted, we respond with done. _ => ctx.reply(Done) } } .onReadOnlyCommand[Hello, String] { // Command handler for the Hello command case (Hello(name, organization), ctx, state) => // Reply with a message built from the current message, and the name of // the person we're meant to say hello to. ctx.reply(s"$message, $name!") } .onEvent { // Event handler for the GreetingMessageChanged event case (GreetingMessageChanged(newMessage), state) => // We simply update the current state to use the greeting message from // the event. HelloState(newMessage, LocalDateTime.now().toString) } } } case class HelloState(message: String, timestamp: String) object HelloState { implicit val format: Format[HelloState] = Json.format } object HelloEventTag { val INSTANCE: AggregateEventTag[HelloEvent] = AggregateEventTag[HelloEvent]() } sealed trait HelloEvent extends AggregateEvent[HelloEvent] { override def aggregateTag: AggregateEventTagger[HelloEvent] = HelloEventTag.INSTANCE } case class GreetingMessageChanged(message: String) extends HelloEvent object GreetingMessageChanged { implicit val format: Format[GreetingMessageChanged] = Json.format } sealed trait HelloCommand[R] extends ReplyType[R] case class UseGreetingMessage(message: String) extends HelloCommand[Done] object UseGreetingMessage { implicit val format: Format[UseGreetingMessage] = Json.format } case class Hello(name: String, organization: Option[String]) extends HelloCommand[String] object Hello { implicit val format: Format[Hello] = Json.format } object HelloSerializerRegistry extends JsonSerializerRegistry { override def serializers: Seq[JsonSerializer[_]] = Seq( JsonSerializer[UseGreetingMessage], JsonSerializer[Hello], JsonSerializer[GreetingMessageChanged], JsonSerializer[HelloState] ) }
Example 38
Source File: CassandraReadSideSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.persistence.cassandra import java.time.LocalDateTime import java.time.ZoneOffset import java.time.format.DateTimeFormatter import akka.persistence.query.TimeBasedUUID import scala.concurrent.Future import scala.concurrent.duration._ import com.typesafe.config.ConfigFactory import com.lightbend.lagom.internal.persistence.ReadSideConfig import com.lightbend.lagom.internal.persistence.cassandra.CassandraReadSideSettings import com.lightbend.lagom.internal.scaladsl.persistence.cassandra.CassandraPersistentEntityRegistry import com.lightbend.lagom.internal.scaladsl.persistence.cassandra.CassandraReadSideImpl import com.lightbend.lagom.internal.scaladsl.persistence.cassandra.ScaladslCassandraOffsetStore import com.lightbend.lagom.scaladsl.persistence.TestEntity.Evt import com.lightbend.lagom.scaladsl.persistence._ object CassandraReadSideSpec { def firstTimeBucket: String = { val today = LocalDateTime.now(ZoneOffset.UTC) val firstBucketFormat: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyyMMdd'T'HH:mm") today.minusHours(3).format(firstBucketFormat) } val readSideConfig = ConfigFactory.parseString(s""" # speed up read-side queries cassandra-query-journal { first-time-bucket = "$firstTimeBucket" refresh-interval = 1s events-by-tag.eventual-consistency-delay = 1s } """) val defaultConfig = ConfigFactory .parseString("akka.loglevel = INFO") .withFallback(readSideConfig) val noAutoCreateConfig = ConfigFactory .parseString("lagom.persistence.read-side.cassandra.tables-autocreate = false") .withFallback(defaultConfig) } class CassandraReadSideSpec extends CassandraPersistenceSpec(CassandraReadSideSpec.defaultConfig, TestEntitySerializerRegistry) with AbstractReadSideSpec { import system.dispatcher protected override lazy val persistentEntityRegistry = new CassandraPersistentEntityRegistry(system) private lazy val testCasReadSideSettings: CassandraReadSideSettings = new CassandraReadSideSettings(system) private lazy val testSession: CassandraSession = new CassandraSession(system) private lazy val offsetStore = new ScaladslCassandraOffsetStore(system, testSession, testCasReadSideSettings, ReadSideConfig()) private lazy val cassandraReadSide = new CassandraReadSideImpl(system, testSession, offsetStore) override def processorFactory(): ReadSideProcessor[Evt] = new TestEntityReadSide.TestEntityReadSideProcessor(system, cassandraReadSide, testSession) private lazy val readSide = new TestEntityReadSide(system, testSession) override def getAppendCount(id: String): Future[Long] = readSide.getAppendCount(id) override def afterAll(): Unit = { super.afterAll() } } class CassandraReadSideAutoCreateSpec extends CassandraPersistenceSpec(CassandraReadSideSpec.noAutoCreateConfig, TestEntitySerializerRegistry) { import system.dispatcher private lazy val testSession: CassandraSession = new CassandraSession(system) private lazy val testCasReadSideSettings: CassandraReadSideSettings = new CassandraReadSideSettings(system) private lazy val offsetStore = new ScaladslCassandraOffsetStore(system, testSession, testCasReadSideSettings, ReadSideConfig()) "A Cassandra Read-Side" must { "not send ClusterStartupTask message, so startupTask must return None" + "when 'lagom.persistence.read-side.cassandra.tables-autocreate' flag is 'false'" in { offsetStore.startupTask shouldBe None } } }
Example 39
Source File: Global.scala From wowchat with GNU General Public License v3.0 | 5 votes |
package wowchat.common import java.time.LocalDateTime import java.time.format.DateTimeFormatter import io.netty.channel.EventLoopGroup import net.dv8tion.jda.core.entities.TextChannel import wowchat.discord.Discord import wowchat.game.GameCommandHandler import scala.collection.mutable object Global { var group: EventLoopGroup = _ var config: WowChatConfig = _ var discord: Discord = _ var game: Option[GameCommandHandler] = None val discordToWow = new mutable.HashMap[String, mutable.Set[WowChannelConfig]] with mutable.MultiMap[String, WowChannelConfig] val wowToDiscord = new mutable.HashMap[(Byte, Option[String]), mutable.Set[(TextChannel, DiscordChannelConfig)]] with mutable.MultiMap[(Byte, Option[String]), (TextChannel, DiscordChannelConfig)] val guildEventsToDiscord = new mutable.HashMap[String, mutable.Set[TextChannel]] with mutable.MultiMap[String, TextChannel] def getTime: String = { LocalDateTime.now.format(DateTimeFormatter.ofPattern("HH:mm:ss")) } }
Example 40
Source File: Master.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.scheduler.actors import java.time.LocalDateTime import java.util.concurrent.TimeUnit import akka.actor.{Props, Cancellable, Actor} import akka.routing.RoundRobinPool import com.ivan.nikolov.scheduler.actors.messages.{Work, Schedule, Done} import com.ivan.nikolov.scheduler.config.job.{Daily, Hourly} import com.typesafe.scalalogging.LazyLogging import scala.concurrent.duration.Duration import scala.collection.mutable.ListBuffer import scala.concurrent.ExecutionContext.Implicits.global class Master(numWorkers: Int, actorFactory: ActorFactory) extends Actor with LazyLogging { val cancelables = ListBuffer[Cancellable]() val router = context.actorOf( Props(actorFactory.createWorkerActor()).withRouter(RoundRobinPool(numWorkers)), "scheduler-master-worker-router" ) override def receive: Receive = { case Done(name, command, jobType, success) => if (success) { logger.info("Successfully completed {} ({}).", name, command) } else { logger.error("Failure! Command {} ({}) returned a non-zero result code.", name, command) } case Schedule(configs) => configs.foreach { case config => val cancellable = this.context.system.scheduler.schedule( config.timeOptions.getInitialDelay(LocalDateTime.now(), config.frequency), config.frequency match { case Hourly => Duration.create(1, TimeUnit.HOURS) case Daily => Duration.create(1, TimeUnit.DAYS) }, router, Work(config.name, config.command, config.jobType) ) cancellable +: cancelables logger.info("Scheduled: {}", config) } } override def postStop(): Unit = { cancelables.foreach(_.cancel()) } }
Example 41
Source File: TimeOptions.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.scheduler.config.job import java.time.LocalDateTime import java.time.temporal.ChronoUnit import java.util.concurrent.TimeUnit import scala.concurrent.duration.{FiniteDuration, Duration} case class TimeOptions(hours: Int, minutes: Int) { if (hours < 0 || hours > 23) { throw new IllegalArgumentException("Hours must be between 0 and 23: " + hours) } else if (minutes < 0 || minutes > 59) { throw new IllegalArgumentException("Minutes must be between 0 and 59: " + minutes) } def getInitialDelay(now: LocalDateTime, frequency: JobFrequency): FiniteDuration = { val firstRun = now.withHour(hours).withMinute(minutes) val isBefore = firstRun.isBefore(now) val actualFirstRun = frequency match { case Hourly => var tmp = firstRun Iterator.continually({tmp = tmp.plusHours(1); tmp}).takeWhile(d => d.isBefore(now)).toList.lastOption.getOrElse(if (isBefore) firstRun else firstRun.minusHours(1)).plusHours(1) case Daily => var tmp = firstRun Iterator.continually({tmp = tmp.plusDays(1); tmp}).takeWhile(d => d.isBefore(now)).toList.lastOption.getOrElse(if (isBefore) firstRun else firstRun.minusDays(1)).plusDays(1) } val secondsUntilRun = now.until(actualFirstRun, ChronoUnit.SECONDS) Duration.create(secondsUntilRun, TimeUnit.SECONDS) } }
Example 42
Source File: TimeOptionsTest.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.scheduler.config.job import java.time.LocalDateTime import org.scalatest.{FlatSpec, Matchers} class TimeOptionsTest extends FlatSpec with Matchers { "getInitialDelay" should "get the right initial delay for hourly less than an hour after now." in { val now = LocalDateTime.of(2018, 3, 20, 12, 43, 10) val later = now.plusMinutes(20) val timeOptions = TimeOptions(later.getHour, later.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toMinutes should equal(20) } it should "get the right initial delay for hourly more than an hour after now." in { val now = LocalDateTime.of(2018, 3, 20, 18, 51, 17) val later = now.plusHours(3) val timeOptions = TimeOptions(later.getHour, later.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toHours should equal(3) } it should "get the right initial delay for hourly less than an hour before now." in { val now = LocalDateTime.of(2018, 3, 20, 11, 18, 55) val earlier = now.minusMinutes(25) // because of the logic and it will fail otherwise. if (earlier.getDayOfWeek == now.getDayOfWeek) { val timeOptions = TimeOptions(earlier.getHour, earlier.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toMinutes should equal(35) } } it should "get the right initial delay for hourly more than an hour before now." in { val now = LocalDateTime.of(2018, 3, 20, 12, 43, 59) val earlier = now.minusHours(1).minusMinutes(25) // because of the logic and it will fail otherwise. if (earlier.getDayOfWeek == now.getDayOfWeek) { val timeOptions = TimeOptions(earlier.getHour, earlier.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toMinutes should equal(35) } } it should "get the right initial delay for daily before now." in { val now = LocalDateTime.of(2018, 3, 20, 14, 43, 10) val earlier = now.minusMinutes(25) // because of the logic and it will fail otherwise. if (earlier.getDayOfWeek == now.getDayOfWeek) { val timeOptions = TimeOptions(earlier.getHour, earlier.getMinute) val result = timeOptions.getInitialDelay(now, Daily) result.toMinutes should equal(24 * 60 - 25) } } it should "get the right initial delay for daily after now." in { val now = LocalDateTime.of(2018, 3, 20, 16, 21, 6) val later = now.plusMinutes(20) val timeOptions = TimeOptions(later.getHour, later.getMinute) val result = timeOptions.getInitialDelay(now, Daily) result.toMinutes should equal(20) } }
Example 43
Source File: Master.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.scheduler.actors import java.time.LocalDateTime import java.util.concurrent.TimeUnit import akka.actor.{Props, Cancellable, Actor} import akka.routing.RoundRobinPool import com.ivan.nikolov.scheduler.actors.messages.{Work, Schedule, Done} import com.ivan.nikolov.scheduler.config.job.{Daily, Hourly} import com.typesafe.scalalogging.LazyLogging import scala.concurrent.duration.Duration import scala.collection.mutable.ListBuffer import scala.concurrent.ExecutionContext.Implicits.global class Master(numWorkers: Int, actorFactory: ActorFactory) extends Actor with LazyLogging { val cancelables = ListBuffer[Cancellable]() val router = context.actorOf( Props(actorFactory.createWorkerActor()).withRouter(RoundRobinPool(numWorkers)), "scheduler-master-worker-router" ) override def receive: Receive = { case Done(name, command, jobType, success) => if (success) { logger.info("Successfully completed {} ({}).", name, command) } else { logger.error("Failure! Command {} ({}) returned a non-zero result code.", name, command) } case Schedule(configs) => configs.foreach { case config => val cancellable = this.context.system.scheduler.schedule( config.timeOptions.getInitialDelay(LocalDateTime.now(), config.frequency), config.frequency match { case Hourly => Duration.create(1, TimeUnit.HOURS) case Daily => Duration.create(1, TimeUnit.DAYS) }, router, Work(config.name, config.command, config.jobType) ) cancellable +: cancelables logger.info("Scheduled: {}", config) } } override def postStop(): Unit = { cancelables.foreach(_.cancel()) } }
Example 44
Source File: TimeOptions.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.scheduler.config.job import java.time.LocalDateTime import java.time.temporal.ChronoUnit import java.util.concurrent.TimeUnit import scala.concurrent.duration.{FiniteDuration, Duration} case class TimeOptions(hours: Int, minutes: Int) { if (hours < 0 || hours > 23) { throw new IllegalArgumentException("Hours must be between 0 and 23: " + hours) } else if (minutes < 0 || minutes > 59) { throw new IllegalArgumentException("Minutes must be between 0 and 59: " + minutes) } def getInitialDelay(now: LocalDateTime, frequency: JobFrequency): FiniteDuration = { val firstRun = now.withHour(hours).withMinute(minutes) val isBefore = firstRun.isBefore(now) val actualFirstRun = frequency match { case Hourly => var tmp = firstRun Iterator.continually({tmp = tmp.plusHours(1); tmp}).takeWhile(d => d.isBefore(now)).toList.lastOption.getOrElse(if (isBefore) firstRun else firstRun.minusHours(1)).plusHours(1) case Daily => var tmp = firstRun Iterator.continually({tmp = tmp.plusDays(1); tmp}).takeWhile(d => d.isBefore(now)).toList.lastOption.getOrElse(if (isBefore) firstRun else firstRun.minusDays(1)).plusDays(1) } val secondsUntilRun = now.until(actualFirstRun, ChronoUnit.SECONDS) Duration.create(secondsUntilRun, TimeUnit.SECONDS) } }
Example 45
Source File: TimeOptionsTest.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.scheduler.config.job import java.time.LocalDateTime import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FlatSpec, Matchers} @RunWith(classOf[JUnitRunner]) class TimeOptionsTest extends FlatSpec with Matchers { "getInitialDelay" should "get the right initial delay for hourly less than an hour after now." in { val now = LocalDateTime.of(2018, 3, 20, 12, 43, 10) val later = now.plusMinutes(20) val timeOptions = TimeOptions(later.getHour, later.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toMinutes should equal(20) } it should "get the right initial delay for hourly more than an hour after now." in { val now = LocalDateTime.of(2018, 3, 20, 18, 51, 17) val later = now.plusHours(3) val timeOptions = TimeOptions(later.getHour, later.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toHours should equal(3) } it should "get the right initial delay for hourly less than an hour before now." in { val now = LocalDateTime.of(2018, 3, 20, 11, 18, 55) val earlier = now.minusMinutes(25) // because of the logic and it will fail otherwise. if (earlier.getDayOfWeek == now.getDayOfWeek) { val timeOptions = TimeOptions(earlier.getHour, earlier.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toMinutes should equal(35) } } it should "get the right initial delay for hourly more than an hour before now." in { val now = LocalDateTime.of(2018, 3, 20, 12, 43, 59) val earlier = now.minusHours(1).minusMinutes(25) // because of the logic and it will fail otherwise. if (earlier.getDayOfWeek == now.getDayOfWeek) { val timeOptions = TimeOptions(earlier.getHour, earlier.getMinute) val result = timeOptions.getInitialDelay(now, Hourly) result.toMinutes should equal(35) } } it should "get the right initial delay for daily before now." in { val now = LocalDateTime.of(2018, 3, 20, 14, 43, 10) val earlier = now.minusMinutes(25) // because of the logic and it will fail otherwise. if (earlier.getDayOfWeek == now.getDayOfWeek) { val timeOptions = TimeOptions(earlier.getHour, earlier.getMinute) val result = timeOptions.getInitialDelay(now, Daily) result.toMinutes should equal(24 * 60 - 25) } } it should "get the right initial delay for daily after now." in { val now = LocalDateTime.of(2018, 3, 20, 16, 21, 6) val later = now.plusMinutes(20) val timeOptions = TimeOptions(later.getHour, later.getMinute) val result = timeOptions.getInitialDelay(now, Daily) result.toMinutes should equal(20) } }
Example 46
Source File: AuthorizeStore.scala From OAuth2-mock-play with MIT License | 5 votes |
package models import java.time.LocalDateTime import play.api.cache.CacheApi case class AuthorizeStoreCache(value: CacheApi) extends AnyVal sealed abstract class AuthorizeStore object AuthorizeStore { case class Token(accessToken: String, expirationDate: LocalDateTime, tokenType: TokenType, grantType: GrantType, uid: String, realm: String, scope: List[String]) extends AuthorizeStore case class Code(state: String, clientId: String, redirectUri: String, username: String, scope: List[String]) extends AuthorizeStore }
Example 47
Source File: ArrayOfLocalDateTimesBenchmark.scala From jsoniter-scala with MIT License | 5 votes |
package com.github.plokhotnyuk.jsoniter_scala.benchmark import java.nio.charset.StandardCharsets.UTF_8 import java.time.{LocalDate, LocalDateTime, LocalTime} import org.openjdk.jmh.annotations.{Param, Setup} abstract class ArrayOfLocalDateTimesBenchmark extends CommonParams { @Param(Array("1", "10", "100", "1000", "10000", "100000", "1000000")) var size: Int = 1000 var obj: Array[LocalDateTime] = _ var jsonString: String = _ var jsonBytes: Array[Byte] = _ var preallocatedBuf: Array[Byte] = _ @Setup def setup(): Unit = { obj = (1 to size).map { i => val n = Math.abs(i * 1498724053) LocalDateTime.of(LocalDate.ofEpochDay(i), LocalTime.ofNanoOfDay(((n % 86000) | 0x1) * 1000000000L + (i % 4 match { case 0 => 0 case 1 => ((n % 1000) | 0x1) * 1000000 case 2 => ((n % 1000000) | 0x1) * 1000 case 3 => (n | 0x1) % 1000000000 }))) }.toArray jsonString = obj.mkString("[\"", "\",\"", "\"]") jsonBytes = jsonString.getBytes(UTF_8) preallocatedBuf = new Array[Byte](jsonBytes.length + 100) } }
Example 48
Source File: ArrayOfLocalDateTimesReading.scala From jsoniter-scala with MIT License | 5 votes |
package com.github.plokhotnyuk.jsoniter_scala.benchmark import java.nio.charset.StandardCharsets.UTF_8 import java.time.LocalDateTime import com.avsystem.commons.serialization.json._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.AVSystemCodecs._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.BorerJsonEncodersDecoders._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.DslPlatformJson._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.JacksonSerDesers._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.JsoniterScalaCodecs._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.SprayFormats._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.UPickleReaderWriters._ import com.github.plokhotnyuk.jsoniter_scala.core._ import com.rallyhealth.weejson.v1.jackson.FromJson import com.rallyhealth.weepickle.v1.WeePickle.ToScala import io.circe.parser._ import org.openjdk.jmh.annotations.Benchmark import play.api.libs.json.Json import spray.json._ class ArrayOfLocalDateTimesReading extends ArrayOfLocalDateTimesBenchmark { @Benchmark def avSystemGenCodec(): Array[LocalDateTime] = JsonStringInput.read[Array[LocalDateTime]](new String(jsonBytes, UTF_8)) @Benchmark def borer(): Array[LocalDateTime] = io.bullet.borer.Json.decode(jsonBytes).to[Array[LocalDateTime]].value @Benchmark def circe(): Array[LocalDateTime] = decode[Array[LocalDateTime]](new String(jsonBytes, UTF_8)).fold(throw _, identity) @Benchmark def dslJsonScala(): Array[LocalDateTime] = dslJsonDecode[Array[LocalDateTime]](jsonBytes) @Benchmark def jacksonScala(): Array[LocalDateTime] = jacksonMapper.readValue[Array[LocalDateTime]](jsonBytes) @Benchmark def jsoniterScala(): Array[LocalDateTime] = readFromArray[Array[LocalDateTime]](jsonBytes) @Benchmark def playJson(): Array[LocalDateTime] = Json.parse(jsonBytes).as[Array[LocalDateTime]] @Benchmark def sprayJson(): Array[LocalDateTime] = JsonParser(jsonBytes).convertTo[Array[LocalDateTime]] @Benchmark def uPickle(): Array[LocalDateTime] = read[Array[LocalDateTime]](jsonBytes) @Benchmark def weePickle(): Array[LocalDateTime] = FromJson(jsonBytes).transform(ToScala[Array[LocalDateTime]]) }
Example 49
Source File: projects.scala From renku with Apache License 2.0 | 5 votes |
package ch.renku.acceptancetests.model import java.net.URL import java.time.LocalDateTime import java.time.format.DateTimeFormatter import ch.renku.acceptancetests.generators.Generators.Implicits._ import ch.renku.acceptancetests.generators.Generators._ import ch.renku.acceptancetests.model.users.UserCredentials import eu.timepit.refined.api.Refined import eu.timepit.refined.collection.NonEmpty object projects { final case class ProjectIdentifier( namespace: String Refined NonEmpty, slug: String Refined NonEmpty ) final case class ProjectDetails( title: String Refined NonEmpty, description: String Refined NonEmpty, readmeTitle: String ) final case class ProjectUrl(value: String) { override lazy val toString: String = value } object ProjectUrl { implicit class ProjectUrlOps(projectUrl: ProjectUrl)(implicit userCredentials: UserCredentials) { import ch.renku.acceptancetests.tooling.UrlEncoder.urlEncode lazy val addGitCredentials: String = { val protocol = new URL(projectUrl.value).getProtocol projectUrl.value .replace( s"$protocol://", s"$protocol://${urlEncode(userCredentials.username.value)}:${urlEncode(userCredentials.password.value)}@" ) } } } object ProjectDetails { def generate: ProjectDetails = { val now = LocalDateTime.now() val desc = prefixParagraph("An automatically generated project for testing: ").generateOne val readmeTitle = s"test${now.format(DateTimeFormatter.ofPattern("yyyyMMddHHmm_ss"))}" ProjectDetails(Refined.unsafeApply(s"test_${now.format(DateTimeFormatter.ofPattern("yyyy_MM_dd_HHmm_ss"))}"), desc, readmeTitle) } def generateHandsOnProject(captureScreenshots: Boolean): ProjectDetails = if (captureScreenshots) { val readmeTitle = "flights tutorial" ProjectDetails(Refined.unsafeApply(readmeTitle), Refined.unsafeApply("A renku tutorial project."), readmeTitle) } else generate implicit class TitleOps(title: String Refined NonEmpty) { lazy val toPathSegment: String = title.value.replace(" ", "-") } } }
Example 50
Source File: JavaTimeCronDateTimeRegressionSpec.scala From cron4s with Apache License 2.0 | 5 votes |
package cron4s.lib.javatime import java.time.LocalDateTime import java.time.temporal.{ChronoField, ChronoUnit} import cron4s._ import org.scalatest.matchers.should.Matchers import org.scalatest.flatspec.AnyFlatSpec 10 * * * ?") val next = cron.next(from).get from.until(next, ChronoUnit.SECONDS) shouldBe 17L } // https://github.com/alonsodomin/cron4s/issues/59 "Cron with day of week" should "yield a date in the future" in { val cron = Cron.unsafeParse("0 0 0 ? * 1-3") for (dayOfMonth <- 1 to 30) { val from = LocalDateTime.of(2017, 3, dayOfMonth, 2, 0, 0) cron.next(from).forall(_.isAfter(from)) shouldBe true } } }
Example 51
Source File: DefaultDatabaseOperationsTest.scala From Conseil with Apache License 2.0 | 5 votes |
package tech.cryptonomic.conseil.api.sql import java.sql.Timestamp import java.time.LocalDateTime import org.scalatest.concurrent.ScalaFutures import org.scalatest.{Matchers, WordSpec} import slick.jdbc.PostgresProfile.api._ import tech.cryptonomic.conseil.api.TezosInMemoryDatabaseSetup import tech.cryptonomic.conseil.api.sql.DefaultDatabaseOperations._ import tech.cryptonomic.conseil.common.testkit.InMemoryDatabase import tech.cryptonomic.conseil.common.tezos.Tables import tech.cryptonomic.conseil.common.tezos.Tables.FeesRow import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.language.postfixOps class DefaultDatabaseOperationsTest extends WordSpec with Matchers with InMemoryDatabase with TezosInMemoryDatabaseSetup with ScalaFutures { "The default database operations" should { val fees: List[FeesRow] = List.tabulate(5) { i => FeesRow( 1 + i, 3 + i, 5 + i, Timestamp.valueOf(LocalDateTime.of(2018, 11, 22, 12, 30)), s"$i-example", None, None ) } "count distinct elements in column properly" in { dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true dbHandler.run(countDistinct("tezos", "fees", "timestamp")).futureValue shouldBe 1 dbHandler.run(countDistinct("tezos", "fees", "low")).futureValue shouldBe 5 } "select distinct elements from column properly" in { dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true dbHandler.run(selectDistinct("tezos", "fees", "timestamp")).futureValue shouldBe List( "2018-11-22 12:30:00" ) dbHandler.run(selectDistinct("tezos", "fees", "low")).futureValue should contain theSameElementsAs List( "1", "2", "3", "4", "5" ) } "select distinct elements from column with 'like' properly" in { dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true dbHandler.run(selectDistinctLike("tezos", "fees", "kind", "1-")).futureValue shouldBe List( "1-example" ) } } }
Example 52
Source File: DefaultDatabaseOperationsTest.scala From Conseil with Apache License 2.0 | 5 votes |
package tech.cryptonomic.conseil.indexer.sql import java.sql.Timestamp import java.time.LocalDateTime import org.scalatest.concurrent.ScalaFutures import org.scalatest.{Matchers, WordSpec} import slick.jdbc.PostgresProfile.api._ import tech.cryptonomic.conseil.common.testkit.InMemoryDatabase import tech.cryptonomic.conseil.common.tezos.Tables import tech.cryptonomic.conseil.common.tezos.Tables.{Fees, FeesRow} import tech.cryptonomic.conseil.indexer.sql.DefaultDatabaseOperations._ import tech.cryptonomic.conseil.indexer.tezos.TezosInMemoryDatabaseSetup import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.language.postfixOps class DefaultDatabaseOperationsTest extends WordSpec with Matchers with InMemoryDatabase with TezosInMemoryDatabaseSetup with ScalaFutures { "The default database operations" should { val fees: List[FeesRow] = List.tabulate(5) { i => FeesRow( 1 + i, 3 + i, 5 + i, Timestamp.valueOf(LocalDateTime.of(2018, 11, 22, 12, 30)), s"$i-example", None, None ) } "insert data when table is empty" in { dbHandler.run(insertWhenEmpty[Fees](Tables.Fees, fees)).futureValue shouldBe Some(5) } "do not insert data when table is not empty" in { dbHandler.run(Tables.Fees ++= fees).isReadyWithin(5 seconds) shouldBe true dbHandler.run(insertWhenEmpty[Fees](Tables.Fees, fees)).futureValue.value shouldBe Some(0) } } }
Example 53
Source File: BfsStrategyStopWatchDecorator.scala From apalache with Apache License 2.0 | 5 votes |
package at.forsyte.apalache.tla.bmcmt.search import java.io.{FileWriter, PrintWriter, Writer} import java.time.{Duration, LocalDateTime} import at.forsyte.apalache.tla.bmcmt.search.SearchStrategy.{Finish, FinishOnDeadlock, NextStep} class BfsStrategyStopWatchDecorator(strategy: SearchStrategy, filename: String) extends SearchStrategy { private var currentStep: Int = 0 private var printWriter: Option[PrintWriter] = None private var startTime: LocalDateTime = LocalDateTime.now() override def getCommand: SearchStrategy.Command = { val command = strategy.getCommand command match { case NextStep(stepNo, _, _) => if (stepNo == 0) { currentStep = 0 // create a log file and add a header printWriter = Some(new PrintWriter(new FileWriter(filename, false))) printWriter.get.println("step,total_sec,nanosec_adjustment") // start the timer startTime = LocalDateTime.now() } else { appendCsvEntry() currentStep = stepNo } case Finish() | FinishOnDeadlock() => appendCsvEntry() printWriter.get.close() } command } private def appendCsvEntry(): Unit = { val currentTime = LocalDateTime.now() val duration = Duration.between(startTime, currentTime) printWriter.get.println("%d,%d,%d".format(currentStep, duration.getSeconds, duration.getNano)) printWriter.get.flush() // get the results as soon as possible } override def registerResponse(response: SearchStrategy.Response): Unit = { strategy.registerResponse(response) } }
Example 54
Source File: Cookie.scala From zio-web with Apache License 2.0 | 5 votes |
package zio.web.http.model import java.time.LocalDateTime // https://tools.ietf.org/html/rfc6265 final case class Cookie( value: String, domain: Option[String], path: Option[String], expires: Option[LocalDateTime], maxAge: Option[Long], secure: Boolean, httpOnly: Boolean )
Example 55
Source File: Model.scala From scaldy with Apache License 2.0 | 5 votes |
package sample.posts import java.time.LocalDateTime // from http://stackoverflow.com/questions/23832136/scala-domain-object-modeling sealed trait Id { def strVal: String } case class UserId(strVal: String) extends Id case class PostId(strVal: String) extends Id trait Model { def id: Id def creationDate: java.time.LocalDateTime } case class User( id: UserId, creationDate: LocalDateTime, name: String, email: String ) extends Model trait Post extends Model { def id: PostId def user: User def title: String def body: String } trait Moderated { def isApproved: Boolean } case class UnModeratedPost( id: PostId, creationDate: LocalDateTime, user: User, title: String, body: String ) extends Post case class ModeratedPost( id: PostId, creationDate: LocalDateTime, user: User, title: String, body: String, isApproved: Boolean ) extends Post with Moderated
Example 56
Source File: Model.scala From scaldy with Apache License 2.0 | 5 votes |
package sample.beanedposts import java.time.LocalDateTime import scala.beans.BeanProperty // from http://stackoverflow.com/questions/23832136/scala-domain-object-modeling sealed trait Id { @BeanProperty var strVal: String } case class UserId(strVal: String) extends Id case class PostId(strVal: String) extends Id trait Model { @BeanProperty var id: Id @BeanProperty var creationDate: java.time.LocalDateTime } case class User( id: UserId, creationDate: LocalDateTime, name: String, email: String ) extends Model trait Post extends Model { @BeanProperty var id: PostId @BeanProperty var user: User @BeanProperty var title: String @BeanProperty var body: String } trait Moderated { @BeanProperty var isApproved: Boolean } case class UnModeratedPost( id: PostId, creationDate: LocalDateTime, user: User, title: String, body: String ) extends Post case class ModeratedPost( id: PostId, creationDate: LocalDateTime, user: User, title: String, body: String, isApproved: Boolean ) extends Post with Moderated
Example 57
Source File: SXSpout.scala From Raphtory with Apache License 2.0 | 5 votes |
package com.raphtory.examples.stackex import java.time.LocalDateTime import com.raphtory.core.components.Spout.SpoutTrait import scala.concurrent.duration._ import scala.io import scala.language.postfixOps class SXSpout extends SpoutTrait { val directory = System.getenv().getOrDefault("SX_DIRECTORY", "/app").trim val file_name = System.getenv().getOrDefault("SX_FILE_NAME", "sx_reordered.txt").trim val fileLines = io.Source.fromFile(directory + "/" + file_name).getLines.drop(1).toArray // upstream/master var position = 0 var linesNumber = fileLines.length println("Start: " + LocalDateTime.now()) protected def ProcessSpoutTask(message: Any): Unit = message match { case StartSpout => AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") case "newLine" => try { if (position < linesNumber) { for (i <- 1 to 100) { var line = fileLines(position) sendTuple(line) position += 1 } AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") } else { println("Finished ingestion") } }catch {case e:Exception => println("Finished ingestion")} case _ => println("message not recognized!") } }
Example 58
Source File: LDBCSpout.scala From Raphtory with Apache License 2.0 | 5 votes |
package com.raphtory.examples.ldbc.spouts import java.time.LocalDateTime import com.raphtory.core.components.Spout.SpoutTrait import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.io import scala.language.postfixOps import scala.concurrent.duration.Duration import scala.concurrent.duration.NANOSECONDS class LDBCSpout extends SpoutTrait { val directory = System .getenv() .getOrDefault("LDBC_DIRECTORY", "/Users/mirate/Documents/phd/ldbc_snb_datagen/social_network/dynamic") .trim val peopleFile = io.Source.fromFile(directory + "/" + "person_0_0.csv").getLines.drop(1).toArray val friendFile = io.Source.fromFile(directory + "/" + "person_knows_person_0_0.csv").getLines.drop(1).toArray // upstream/master var position = 1 var people = peopleFile.length var friends = friendFile.length println("Start: " + LocalDateTime.now()) protected def ProcessSpoutTask(message: Any): Unit = message match { case StartSpout => AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") case "newLine" => for (i <- 1 to 100) { if (position < people) sendTuple("person|" + peopleFile(position)) if (position < friends) sendTuple("person_knows_person|" + friendFile(position)) position += 1 } if (position > friends) println("ingestion Finished") else AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") case _ => println("message not recognized!") } }
Example 59
Source File: TrackAndTraceSpout.scala From Raphtory with Apache License 2.0 | 5 votes |
package com.raphtory.examples.trackAndTrace.spouts import java.time.LocalDateTime import com.raphtory.core.components.Spout.SpoutTrait import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.io import scala.language.postfixOps class TrackAndTraceSpout extends SpoutTrait { val file_name = System.getenv().getOrDefault("TRACK_AND_TRACE_FILENAME", "/Users/mirate/Documents/phd/locationdataexample.csv").trim val fileLines = io.Source.fromFile(file_name).getLines.drop(3).toArray // upstream/master var position = 0 var linesNumber = fileLines.length println("Starting File ingestion: " + LocalDateTime.now()) println("Lines :" + linesNumber) protected def ProcessSpoutTask(message: Any): Unit = message match { case StartSpout => AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") case "newLine" => if (position < linesNumber) { sendTuple(fileLines(position)) position += 1 AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") } case _ => println("message not recognized!") } }
Example 60
Source File: ChainalysisABSpout.scala From Raphtory with Apache License 2.0 | 5 votes |
package com.raphtory.examples.blockchain.spouts import java.time.LocalDateTime import com.raphtory.core.components.Spout.SpoutTrait import scala.concurrent.duration._ import scala.io.Source import scala.language.postfixOps class ChainalysisABSpout extends SpoutTrait { val file_name = System.getenv().getOrDefault("CHAINALYSIS_FILENAME", "/home/tsunade/qmul/datasets/chainalysis/abmshort.csv").trim val fl = Source.fromFile(file_name) val fileLines = fl.getLines.drop(1)//.toArray // upstream/master // var position = 0 // var linesNumber = fl.length println("Starting File ingestion: " + LocalDateTime.now()) // println("Lines :" + linesNumber) override def ProcessSpoutTask(message: Any): Unit = message match { case StartSpout => AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") case "newLine" => // if (position < linesNumber) { //val line = if(fileLines.hasNext) fileLines.next() else "" try { sendTuple(fileLines.next()) AllocateSpoutTask(Duration(1, NANOSECONDS), "newLine") }catch{ case e: NoSuchElementException => println("End of file!") } // position += 1 // } case _ => println("message not recognized!") } }
Example 61
Source File: package.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra import java.time.format.DateTimeFormatter import java.time.{ LocalDateTime, ZoneId, ZoneOffset } import java.util.UUID import akka.annotation.InternalApi import com.datastax.oss.driver.api.core.cql.AsyncResultSet import com.datastax.oss.driver.api.core.uuid.Uuids package object query { @InternalApi private[akka] def uuid(timestamp: Long): UUID = { def makeMsb(time: Long): Long = { // copied from Uuids.makeMsb // UUID v1 timestamp must be in 100-nanoseconds interval since 00:00:00.000 15 Oct 1582. val uuidEpoch = LocalDateTime.of(1582, 10, 15, 0, 0).atZone(ZoneId.of("GMT-0")).toInstant.toEpochMilli val timestamp = (time - uuidEpoch) * 10000 var msb = 0L msb |= (0X00000000FFFFFFFFL & timestamp) << 32 msb |= (0X0000FFFF00000000L & timestamp) >>> 16 msb |= (0X0FFF000000000000L & timestamp) >>> 48 msb |= 0X0000000000001000L // sets the version to 1. msb } val now = Uuids.timeBased() new UUID(makeMsb(timestamp), now.getLeastSignificantBits) } }
Example 62
Source File: TestTagWriter.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import java.nio.ByteBuffer import java.time.{ LocalDateTime, ZoneOffset } import java.util.UUID import akka.actor.ActorSystem import akka.persistence.PersistentRepr import akka.persistence.cassandra.BucketSize import akka.persistence.cassandra.EventsByTagSettings import akka.persistence.cassandra.PluginSettings import akka.persistence.cassandra.formatOffset import akka.persistence.cassandra.journal._ import akka.serialization.Serialization import akka.serialization.Serializers import com.datastax.oss.driver.api.core.CqlSession import com.datastax.oss.driver.api.core.uuid.Uuids private[akka] trait TestTagWriter { def system: ActorSystem def cluster: CqlSession val serialization: Serialization val settings: PluginSettings final def journalSettings: JournalSettings = settings.journalSettings final def eventsByTagSettings: EventsByTagSettings = settings.eventsByTagSettings lazy val (preparedWriteTagMessage, preparedWriteTagMessageWithMeta) = { val writeStatements: CassandraJournalStatements = new CassandraJournalStatements(settings) (cluster.prepare(writeStatements.writeTags(false)), cluster.prepare(writeStatements.writeTags(true))) } def clearAllEvents(): Unit = { cluster.execute(s"truncate ${journalSettings.keyspace}.${eventsByTagSettings.tagTable.name}") } def writeTaggedEvent( time: LocalDateTime, pr: PersistentRepr, tags: Set[String], tagPidSequenceNr: Long, bucketSize: BucketSize): Unit = { val timestamp = time.toInstant(ZoneOffset.UTC).toEpochMilli write(pr, tags, tagPidSequenceNr, uuid(timestamp), bucketSize) } def writeTaggedEvent( persistent: PersistentRepr, tags: Set[String], tagPidSequenceNr: Long, bucketSize: BucketSize): Unit = { val nowUuid = Uuids.timeBased() write(persistent, tags, tagPidSequenceNr, nowUuid, bucketSize) } def writeTaggedEvent( persistent: PersistentRepr, tags: Set[String], tagPidSequenceNr: Long, uuid: UUID, bucketSize: BucketSize): Unit = write(persistent, tags, tagPidSequenceNr, uuid, bucketSize) private def write( pr: PersistentRepr, tags: Set[String], tagPidSequenceNr: Long, uuid: UUID, bucketSize: BucketSize): Unit = { val event = pr.payload.asInstanceOf[AnyRef] val serializer = serialization.findSerializerFor(event) val serialized = ByteBuffer.wrap(serialization.serialize(event).get) val serManifest = Serializers.manifestFor(serializer, pr) val timeBucket = TimeBucket(Uuids.unixTimestamp(uuid), bucketSize) tags.foreach(tag => { val bs = preparedWriteTagMessage .bind() .setString("tag_name", tag) .setLong("timebucket", timeBucket.key) .setUuid("timestamp", uuid) .setLong("tag_pid_sequence_nr", tagPidSequenceNr) .setByteBuffer("event", serialized) .setString("event_manifest", pr.manifest) .setString("persistence_id", pr.persistenceId) .setInt("ser_id", serializer.identifier) .setString("ser_manifest", serManifest) .setString("writer_uuid", "ManualWrite") .setLong("sequence_nr", pr.sequenceNr) cluster.execute(bs) }) system.log.debug( "Written event: {} Uuid: {} Timebucket: {} TagPidSeqNr: {}", pr.payload, formatOffset(uuid), timeBucket, tagPidSequenceNr) } }
Example 63
Source File: Sessionize.scala From ml-in-scala with The Unlicense | 5 votes |
package org.akozlov.chapter06 import java.io._ import java.time.ZoneOffset import java.time.LocalDateTime import java.time.format.DateTimeFormatter import org.apache.spark.{SparkConf,SparkContext} import org.apache.spark.storage.StorageLevel object Sessionize extends App { val sc = new SparkContext("local[8]", "Sessionize", new SparkConf()) val checkoutPattern = ".*>checkout.*".r.pattern // a basic page view structure case class PageView(ts: String, path: String) extends Serializable with Ordered[PageView] { override def toString: String = { s"($ts #$path)" } def compare(other: PageView) = ts compare other.ts } // represent a session case class Session[A <: PageView](id: String, visits: Seq[A]) extends Serializable { override def toString: String = { val vsts = visits.mkString("[", ",", "]") s"($id -> $vsts)" } } def toEpochSeconds(str: String) = { LocalDateTime.parse(str, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")).toEpochSecond(ZoneOffset.UTC) } val sessions = sc.textFile("data/clickstream") .map(line => {val parts = line.split("\t"); (parts(4), new PageView(parts(0), parts(20)))}) .groupByKey.map(x => { new Session(x._1, x._2.toSeq.sorted) } ) .cache // sessions.take(100).foreach(println) def findAllCheckoutSessions(s: Session[PageView]) = { s.visits.tails.filter { _ match { case PageView(ts1, "mycompanycom>homepage") :: PageView(ts2, page) :: tail if (page != "mycompanycom>homepage" ) => true; case _ => false } } .foldLeft(Seq[Session[PageView]]()) { case (r, x) => { x.find(y => checkoutPattern.matcher(y.path).matches) match { case Some(checkout) if (toEpochSeconds(checkout.ts) > toEpochSeconds(x.head.ts) + 60) => r.:+(new Session(s.id, x.slice(0, x.indexOf(checkout)))) case _ => r } } } } val prodLandingSessions = sessions.flatMap(findAllCheckoutSessions) prodLandingSessions.collect.foreach(println) sc.stop() }
Example 64
Source File: FileNamePatternSyntax.scala From odin with Apache License 2.0 | 5 votes |
package io.odin.config import java.time.LocalDateTime trait FileNamePattern { def extract(dateTime: LocalDateTime): String } trait FileNamePatternSyntax { case class Value(value: String) extends FileNamePattern { def extract(dateTime: LocalDateTime): String = value } case object year extends FileNamePattern { def extract(dateTime: LocalDateTime): String = padWithZero(dateTime.getYear) } case object month extends FileNamePattern { def extract(dateTime: LocalDateTime): String = padWithZero(dateTime.getMonthValue) } case object day extends FileNamePattern { def extract(dateTime: LocalDateTime): String = padWithZero(dateTime.getDayOfMonth) } case object hour extends FileNamePattern { def extract(dateTime: LocalDateTime): String = padWithZero(dateTime.getHour) } case object minute extends FileNamePattern { def extract(dateTime: LocalDateTime): String = padWithZero(dateTime.getMinute) } case object second extends FileNamePattern { def extract(dateTime: LocalDateTime): String = padWithZero(dateTime.getSecond) } private[odin] def padWithZero(value: Int): String = f"$value%02d" }
Example 65
Source File: package.scala From odin with Apache License 2.0 | 5 votes |
package io.odin import java.time.LocalDateTime import cats.Monad import cats.effect.Timer import cats.instances.list._ import cats.syntax.all._ import io.odin.internal.StringContextLength import io.odin.loggers.DefaultLogger import scala.annotation.tailrec package object config extends FileNamePatternSyntax { def levelRouting[F[_]: Timer: Monad](router: Map[Level, Logger[F]]): DefaultBuilder[F] = new DefaultBuilder[F]({ default: Logger[F] => new DefaultLogger[F]() { def log(msg: LoggerMessage): F[Unit] = router.getOrElse(msg.level, default).log(msg) override def log(msgs: List[LoggerMessage]): F[Unit] = { msgs.groupBy(_.level).toList.traverse_ { case (level, msgs) => router.getOrElse(level, default).log(msgs) } } } }) implicit class FileNamePatternInterpolator(private val sc: StringContext) extends AnyVal { def file(ps: FileNamePattern*): LocalDateTime => String = { StringContextLength.checkLength(sc, ps) dt => { @tailrec def rec(args: List[FileNamePattern], parts: List[String], acc: StringBuilder): String = { args match { case Nil => acc.append(parts.head).toString() case head :: tail => rec(tail, parts.tail, acc.append(parts.head).append(head.extract(dt))) } } rec(ps.toList, sc.parts.toList, new StringBuilder()) } } } implicit def str2fileNamePattern(str: String): FileNamePattern = { Value(str) } }
Example 66
Source File: OdinSpec.scala From odin with Apache License 2.0 | 5 votes |
package io.odin import java.time.LocalDateTime import cats.effect.{Clock, Timer} import cats.{Applicative, Eval} import io.odin.formatter.Formatter import io.odin.meta.Position import org.scalacheck.{Arbitrary, Cogen, Gen} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatestplus.scalacheck.{Checkers, ScalaCheckDrivenPropertyChecks} import org.typelevel.discipline.Laws import scala.concurrent.duration.{FiniteDuration, TimeUnit} trait OdinSpec extends AnyFlatSpec with Matchers with Checkers with ScalaCheckDrivenPropertyChecks with EqInstances { def checkAll(name: String, ruleSet: Laws#RuleSet): Unit = { for ((id, prop) <- ruleSet.all.properties) it should (name + "." + id) in { check(prop) } } def zeroTimer[F[_]](implicit F: Applicative[F]): Timer[F] = new Timer[F] { def clock: Clock[F] = new Clock[F] { def realTime(unit: TimeUnit): F[Long] = F.pure(0L) def monotonic(unit: TimeUnit): F[Long] = F.pure(0L) } def sleep(duration: FiniteDuration): F[Unit] = ??? } val lineSeparator: String = System.lineSeparator() val nonEmptyStringGen: Gen[String] = Gen.nonEmptyListOf(Gen.alphaNumChar).map(_.mkString) val levelGen: Gen[Level] = Gen.oneOf(Level.Trace, Level.Debug, Level.Info, Level.Warn, Level.Error) implicit val levelArbitrary: Arbitrary[Level] = Arbitrary(levelGen) val positionGen: Gen[Position] = for { fileName <- nonEmptyStringGen enclosureName <- Gen.uuid.map(_.toString) packageName <- nonEmptyStringGen line <- Gen.posNum[Int] } yield { Position(fileName, enclosureName, packageName, line) } implicit val positionArbitrary: Arbitrary[Position] = Arbitrary(positionGen) val loggerMessageGen: Gen[LoggerMessage] = { val startTime = System.currentTimeMillis() for { level <- levelGen msg <- Gen.alphaNumStr context <- Gen.mapOfN(20, nonEmptyStringGen.flatMap(key => nonEmptyStringGen.map(key -> _))) exception <- Gen.option(Arbitrary.arbitrary[Throwable]) position <- positionGen threadName <- nonEmptyStringGen timestamp <- Gen.choose(0, startTime) } yield { LoggerMessage( level = level, message = Eval.now(msg), context = context, exception = exception, position = position, threadName = threadName, timestamp = timestamp ) } } implicit val loggerMessageArbitrary: Arbitrary[LoggerMessage] = Arbitrary(loggerMessageGen) implicit val cogenLoggerMessage: Cogen[LoggerMessage] = Cogen[LoggerMessage]((msg: LoggerMessage) => msg.level.hashCode().toLong + msg.message.value.hashCode().toLong) val formatterGen: Gen[Formatter] = Gen.oneOf(Formatter.default, Formatter.colorful) implicit val formatterArbitrary: Arbitrary[Formatter] = Arbitrary(formatterGen) val localDateTimeGen: Gen[LocalDateTime] = for { year <- Gen.choose(0, LocalDateTime.now().getYear) month <- Gen.choose(1, 12) day <- Gen.choose(1, 28) hour <- Gen.choose(0, 23) minute <- Gen.choose(0, 59) second <- Gen.choose(0, 59) } yield { LocalDateTime.of(year, month, day, hour, minute, second) } implicit val localDateTimeArbitrary: Arbitrary[LocalDateTime] = Arbitrary(localDateTimeGen) }
Example 67
Source File: FileNamePatternSyntaxSpec.scala From odin with Apache License 2.0 | 5 votes |
package io.odin.config import java.time.LocalDateTime import io.odin.OdinSpec class FileNamePatternSyntaxSpec extends OdinSpec { def checkPadding(value: Int): String = { if (value < 10) s"0$value" else value.toString } "year" should "extract current year" in { forAll { dt: LocalDateTime => year.extract(dt) shouldBe checkPadding(dt.getYear) } } "month" should "extract current month" in { forAll { dt: LocalDateTime => month.extract(dt) shouldBe checkPadding(dt.getMonthValue) } } "day" should "extract current day" in { forAll { dt: LocalDateTime => day.extract(dt) shouldBe checkPadding(dt.getDayOfMonth) } } "hour" should "extract current hour" in { forAll { dt: LocalDateTime => hour.extract(dt) shouldBe checkPadding(dt.getHour) } } "minute" should "extract current minute" in { forAll { dt: LocalDateTime => minute.extract(dt) shouldBe checkPadding(dt.getMinute) } } "second" should "extract current second" in { forAll { dt: LocalDateTime => second.extract(dt) shouldBe checkPadding(dt.getSecond) } } "file" should "process a single argument" in { forAll { dt: LocalDateTime => file"$year".apply(dt) shouldBe checkPadding(dt.getYear) } } "file" should "process an argument in the beginning" in { forAll { dt: LocalDateTime => file"$year year".apply(dt) shouldBe s"${checkPadding(dt.getYear)} year" } } "file" should "process an argument in the end" in { forAll { dt: LocalDateTime => file"Year $year".apply(dt) shouldBe s"Year ${checkPadding(dt.getYear)}" } } "file" should "process argument in the middle" in { forAll { dt: LocalDateTime => file"It's $year year".apply(dt) shouldBe s"It's ${checkPadding(dt.getYear)} year" } } "file" should "process multiple arguments" in { forAll { dt: LocalDateTime => file"$year-$month-$day".apply(dt) shouldBe s"${checkPadding(dt.getYear)}-${checkPadding(dt.getMonthValue)}-${checkPadding(dt.getDayOfMonth)}" } } "file" should "process a string variable" in { forAll { (dt: LocalDateTime, str: String) => file"$str".apply(dt) shouldBe str } } }
Example 68
Source File: WFEntity.scala From Scala-Reactive-Programming with MIT License | 5 votes |
package com.packt.publishing.wf.consumer.impl import java.time.LocalDateTime import akka.Done import com.lightbend.lagom.scaladsl.persistence.PersistentEntity import com.packt.publishing.wf.consumer.api.models.WeatherForcasting class WFEntity extends PersistentEntity { override type Command = WFCommand[_] override type Event = WFEvent override type State = WFState override def initialState = WFState("None","0", LocalDateTime.now().toString) override def behavior: Behavior = { case WFState(city, temperature, _) => Actions().onCommand[SaveNewWF, Done] { case (SaveNewWF(city, temperature), ctx, state) => println(s"New WF message came to Lagom Kafka server: ${city} ${temperature}") val wf = WeatherForcasting(city,temperature) ctx.thenPersist(WFSaved(wf)) { msgSaved: WFSaved => ctx.reply(Done) } }.onEvent { case (WFSaved(wf), state) => println(s"FYI, New WF change event fired, which is converted into a Message.") WFState(city, temperature, LocalDateTime.now().toString) } } }
Example 69
Source File: WFEntity.scala From Scala-Reactive-Programming with MIT License | 5 votes |
package com.packt.publishing.wf.consumer.impl import java.time.LocalDateTime import akka.Done import com.lightbend.lagom.scaladsl.persistence.PersistentEntity import com.packt.publishing.wf.consumer.api.models.WeatherForcasting class WFEntity extends PersistentEntity { override type Command = WFCommand[_] override type Event = WFEvent override type State = WFState override def initialState = WFState("None","0", LocalDateTime.now().toString) override def behavior: Behavior = { case WFState(city, temperature, _) => Actions().onCommand[SaveNewWF, Done] { case (SaveNewWF(city, temperature), ctx, state) => println(s"New WF message came to Lagom Kafka server: ${city} ${temperature}") val wf = WeatherForcasting(city,temperature) ctx.thenPersist(WFSaved(wf)) { msgSaved: WFSaved => ctx.reply(Done) } }.onEvent { case (WFSaved(wf), state) => println(s"FYI, New WF change event fired, which is converted into a Message.") WFState(city, temperature, LocalDateTime.now().toString) } } }
Example 70
Source File: SparkEnv.scala From comet-data-pipeline with Apache License 2.0 | 5 votes |
package com.ebiznext.comet.config import java.time.LocalDateTime import java.time.format.DateTimeFormatter import com.typesafe.scalalogging.StrictLogging import org.apache.spark.SparkConf import org.apache.spark.sql.SparkSession lazy val session: SparkSession = { val session = if (settings.comet.hive) SparkSession.builder.config(config).enableHiveSupport().getOrCreate() else SparkSession.builder.config(config).getOrCreate() logger.info("Spark Version -> " + session.version) logger.info(session.conf.getAll.mkString("\n")) session } }
Example 71
Source File: AccountRepository.scala From frdomain-extras with Apache License 2.0 | 5 votes |
package frdomain.ch6 package domain package repository import java.time.LocalDateTime import cats._ import cats.data._ import cats.instances.all._ import cats.effect.IO import common._ import model.{ Account, Balance } trait AccountRepository { def query(no: String): IO[ErrorOr[Option[Account]]] def store(a: Account): IO[ErrorOr[Account]] def query(openedOn: LocalDateTime): IO[ErrorOr[Seq[Account]]] def all: IO[ErrorOr[Seq[Account]]] def balance(no: String): IO[ErrorOr[Balance]] = query(no).map { case Right(Some(a)) => Right(a.balance) case Right(None) => Left(NonEmptyChain(s"No account exists with no $no")) case Left(x) => Left(x) } }
Example 72
Source File: AccountRepository.scala From frdomain-extras with Apache License 2.0 | 5 votes |
package frdomain.ch6 package domain package repository package interpreter import java.time.LocalDateTime import scala.collection.mutable.{ Map => MMap } import cats._ import cats.data._ import cats.instances.all._ import cats.effect.IO import common._ import model.{ Account, Balance } class AccountRepositoryInMemory extends AccountRepository { lazy val repo = MMap.empty[String, Account] def query(no: String): IO[ErrorOr[Option[Account]]] = IO(Right(repo.get(no))) def store(a: Account): IO[ErrorOr[Account]] = IO { val _ = repo += ((a.no, a)) Right(a) } def query(openedOn: LocalDateTime): IO[ErrorOr[Seq[Account]]] = IO { Right(repo.values.filter(_.dateOfOpen.getOrElse(today) == openedOn).toSeq) } def all: IO[ErrorOr[Seq[Account]]] = IO(Right(repo.values.toSeq)) } object AccountRepositoryInMemory extends AccountRepositoryInMemory
Example 73
Source File: AccountRepositoryInMemory.scala From frdomain-extras with Apache License 2.0 | 5 votes |
package frdomain.ch6 package domain package repository package interpreter import java.time.LocalDateTime import scala.collection.immutable.Map import cats._ import cats.data._ import cats.implicits._ import cats.instances.all._ import cats.effect.concurrent.Ref import cats.effect.Sync import common._ import model.account._ // Constructor private for the interpreter to prevent the Ref from leaking // access through smart constructor below final class AccountRepositoryInMemory[M[_]: Monad] private (repo: Ref[M, Map[AccountNo, Account]]) extends AccountRepository[M] { def query(no: AccountNo): M[Option[Account]] = repo.get.map(_.get(no)) def store(a: Account): M[Account] = repo.update(_ + ((a.no, a))).map(_ => a) def query(openedOn: LocalDateTime): M[List[Account]] = repo.get.map(_.values.filter(_.dateOfOpen.getOrElse(today) == openedOn).toList) def all: M[List[Account]] = repo.get.map(_.values.toList) def balance(no: AccountNo): M[Option[Balance]] = query(no).map(_.map(_.balance)) } // Smart constructor object AccountRepositoryInMemory { def make[M[_]: Sync]: M[AccountRepositoryInMemory[M]] = Ref.of[M, Map[AccountNo, Account]](Map.empty).map(new AccountRepositoryInMemory(_)) }
Example 74
Source File: AccountServiceInterpreter.scala From frdomain-extras with Apache License 2.0 | 5 votes |
package frdomain.ch6 package domain package service package interpreter import java.time.LocalDateTime import cats._ import cats.implicits._ import cats.mtl._ import squants.market._ import common._ import model.account._ import AccountType._ import repository.AccountRepository class AccountServiceInterpreter[M[_]: MonadAppException] (implicit A: ApplicativeAsk[M, AccountRepository[M]]) extends AccountService[M, Account, Amount, Balance] { val E = implicitly[MonadAppException[M]] def open(no: AccountNo, name: AccountName, rate: Option[BigDecimal], openingDate: Option[LocalDateTime], accountType: AccountType): M[Account] = { for { repo <- A.ask _ <- repo.query(no).ensureOr(_ => AlreadyExistingAccount(no))(!_.isDefined) account <- createOrUpdate(repo, no, name, rate, openingDate, accountType) } yield account } private def createOrUpdate(repo: AccountRepository[M], no: AccountNo, name: AccountName, rate: Option[BigDecimal], openingDate: Option[LocalDateTime], accountType: AccountType): M[Account] = accountType match { case Checking => createOrUpdate(repo, Account.checkingAccount(no, name, openingDate, None, Balance())) case Savings => rate.map(r => createOrUpdate(repo, Account.savingsAccount(no, name, r, openingDate, None, Balance()))) .getOrElse(E.raiseError(RateMissingForSavingsAccount)) } private def createOrUpdate(repo: AccountRepository[M], errorOrAccount: ErrorOr[Account]): M[Account] = errorOrAccount match { case Left(errs) => E.raiseError(MiscellaneousDomainExceptions(errs)) case Right(a) => repo.store(a) } def close(no: AccountNo, closeDate: Option[LocalDateTime]): M[Account] = { for { repo <- A.ask maybeAccount <- repo.query(no).ensureOr(_ => NonExistingAccount(no))(_.isDefined) account <- createOrUpdate(repo, Account.close(maybeAccount.get, closeDate.getOrElse(today))) } yield account } def balance(no: AccountNo): M[Option[Balance]] = for { repo <- A.ask b <- repo.balance(no) } yield b private trait DC private object D extends DC private object C extends DC def debit(no: AccountNo, amount: Money): M[Account] = update(no, amount, D) def credit(no: AccountNo, amount: Money): M[Account] = update(no, amount, C) private def update(no: AccountNo, amount: Money, debitCredit: DC): M[Account] = for { repo <- A.ask maybeAccount <- repo.query(no).ensureOr(_ => NonExistingAccount(no))(_.isDefined) multiplier = if (debitCredit == D) (-1) else 1 account <- createOrUpdate(repo, Account.updateBalance(maybeAccount.get, multiplier * amount)) } yield account def transfer(from: AccountNo, to: AccountNo, amount: Money): M[(Account, Account)] = for { a <- debit(from, amount) b <- credit(to, amount) } yield ((a, b)) }
Example 75
Source File: AccountRepository.scala From frdomain-extras with Apache License 2.0 | 5 votes |
package frdomain.ch6 package domain package repository package interpreter import java.time.LocalDateTime import scala.collection.mutable.{ Map => MMap } import cats._ import cats.data._ import cats.implicits._ import cats.instances.all._ import common._ import model.{ Account, Balance } class AccountRepositoryInMemory[M[+_]](implicit me: MonadError[M, Throwable]) extends AccountRepository[M] { lazy val repo = MMap.empty[String, Account] def query(no: String): M[Option[Account]] = repo.get(no).pure[M] def store(a: Account): M[Account] = { val _ = repo += ((a.no, a)) a.pure[M] } def query(openedOn: LocalDateTime): M[Seq[Account]] = { repo.values.filter(_.dateOfOpen.getOrElse(today) == openedOn).toSeq.pure[M] } def all: M[Seq[Account]] = repo.values.toSeq.pure[M] def balance(no: String): M[Balance] = repo.get(no).map(_.balance) match { case Some(b) => b.pure[M] case _ => me.raiseError[Balance](new IllegalArgumentException(s"Non existing account $no")) } }
Example 76
Source File: AccountService.scala From frdomain-extras with Apache License 2.0 | 5 votes |
package frdomain.ch6 package domain package service package interpreter import java.time.LocalDateTime import cats._ import cats.data._ import cats.implicits._ import cats.instances.all._ import model.{ Account, Balance } import common._ import repository.AccountRepository class AccountServiceInterpreter[M[+_]](implicit me: MonadError[M, Throwable]) extends AccountService[M, Account, Amount, Balance] { def open(no: String, name: String, rate: Option[BigDecimal], openingDate: Option[LocalDateTime], accountType: AccountType) = Kleisli[M, AccountRepository[M], Account] { (repo: AccountRepository[M]) => repo.query(no).flatMap(maybeAccount => doOpenAccount(repo, maybeAccount, no, name, rate, openingDate, accountType )) } private def doOpenAccount(repo: AccountRepository[M], maybeAccount: Option[Account], no: String, name: String, rate: Option[BigDecimal], openingDate: Option[LocalDateTime], accountType: AccountType): M[Account] = { maybeAccount.map(_ => me.raiseError(new IllegalArgumentException(s"Account no $no already exists"))) .getOrElse(createOrUpdate(repo, no, name, rate, openingDate, accountType)) } private def createOrUpdate(repo: AccountRepository[M], no: String, name: String, rate: Option[BigDecimal], openingDate: Option[LocalDateTime], accountType: AccountType): M[Account] = accountType match { case Checking => createOrUpdate(repo, Account.checkingAccount(no, name, openingDate, None, Balance())) case Savings => rate.map(r => createOrUpdate(repo, Account.savingsAccount(no, name, r, openingDate, None, Balance()))) .getOrElse(me.raiseError(new IllegalArgumentException("Rate missing for savings account"))) } private def createOrUpdate(repo: AccountRepository[M], errorOrAccount: ErrorOr[Account]): M[Account] = errorOrAccount match { case Left(errs) => me.raiseError(new IllegalArgumentException(s"${errs.toList}")) case Right(a) => repo.store(a) } def close(no: String, closeDate: Option[LocalDateTime]) = Kleisli[M, AccountRepository[M], Account] { (repo: AccountRepository[M]) => for { maybeAccount <- repo.query(no) account <- maybeAccount.map(a => createOrUpdate(repo, Account.close(a, closeDate.getOrElse(today)))) .getOrElse(me.raiseError(new IllegalArgumentException(s"Account no $no does not exist"))) } yield account } def debit(no: String, amount: Amount) = update(no, amount, D) def credit(no: String, amount: Amount) = update(no, amount, C) private trait DC private case object D extends DC private case object C extends DC private def update(no: String, amount: Amount, debitCredit: DC) = Kleisli[M, AccountRepository[M], Account] { (repo: AccountRepository[M]) => for { maybeAccount <- repo.query(no) multiplier = if (debitCredit == D) (-1) else 1 account <- maybeAccount.map(a => createOrUpdate(repo, Account.updateBalance(a, multiplier * amount))) .getOrElse(me.raiseError(new IllegalArgumentException(s"Account no $no does not exist"))) } yield account } def balance(no: String) = Kleisli[M, AccountRepository[M], Balance] { (repo: AccountRepository[M]) => repo.balance(no) } def transfer(from: String, to: String, amount: Amount): Kleisli[M, AccountRepository[M], (Account, Account)] = for { a <- debit(from, amount) b <- credit(to, amount) } yield ((a, b)) }
Example 77
Source File: FinaglePostgresDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.finagle.postgres import java.nio.charset.Charset import java.time.{ LocalDate, LocalDateTime, ZoneId } import java.util.{ Date, UUID } import com.twitter.finagle.postgres.values.ValueDecoder import com.twitter.util.Return import com.twitter.util.Throw import com.twitter.util.Try import io.getquill.FinaglePostgresContext import io.getquill.util.Messages.fail import io.netty.buffer.ByteBuf trait FinaglePostgresDecoders { this: FinaglePostgresContext[_] => import ValueDecoder._ type Decoder[T] = FinaglePostgresDecoder[T] case class FinaglePostgresDecoder[T]( vd: ValueDecoder[T], default: Throwable => T = (e: Throwable) => fail(e.getMessage) ) extends BaseDecoder[T] { override def apply(index: Index, row: ResultRow): T = row.getTry[T](index)(vd) match { case Return(r) => r case Throw(e) => default(e) } def orElse[U](f: U => T)(implicit vdu: ValueDecoder[U]): FinaglePostgresDecoder[T] = { val mappedVd = vdu.map[T](f) FinaglePostgresDecoder[T]( new ValueDecoder[T] { def decodeText(recv: String, text: String): Try[T] = { val t = vd.decodeText(recv, text) if (t.isReturn) t else mappedVd.decodeText(recv, text) } def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[T] = { val t = vd.decodeBinary(recv, bytes, charset) if (t.isReturn) t else mappedVd.decodeBinary(recv, bytes, charset) } } ) } } implicit def decoderDirectly[T](implicit vd: ValueDecoder[T]): Decoder[T] = FinaglePostgresDecoder(vd) def decoderMapped[U, T](f: U => T)(implicit vd: ValueDecoder[U]): Decoder[T] = FinaglePostgresDecoder(vd.map[T](f)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = FinaglePostgresDecoder[Option[T]]( new ValueDecoder[Option[T]] { def decodeText(recv: String, text: String): Try[Option[T]] = Return(d.vd.decodeText(recv, text).toOption) def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[Option[T]] = Return(d.vd.decodeBinary(recv, bytes, charset).toOption) }, _ => None ) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = decoderMapped[I, O](mapped.f)(d.vd) implicit val stringDecoder: Decoder[String] = decoderDirectly[String] implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoderDirectly[BigDecimal] implicit val booleanDecoder: Decoder[Boolean] = decoderDirectly[Boolean] implicit val shortDecoder: Decoder[Short] = decoderDirectly[Short] implicit val byteDecoder: Decoder[Byte] = decoderMapped[Short, Byte](_.toByte) implicit val intDecoder: Decoder[Int] = decoderDirectly[Int].orElse[Long](_.toInt) implicit val longDecoder: Decoder[Long] = decoderDirectly[Long].orElse[Int](_.toLong) implicit val floatDecoder: Decoder[Float] = decoderDirectly[Float].orElse[Double](_.toFloat) implicit val doubleDecoder: Decoder[Double] = decoderDirectly[Double] implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoderDirectly[Array[Byte]] implicit val dateDecoder: Decoder[Date] = decoderMapped[LocalDateTime, Date](d => Date.from(d.atZone(ZoneId.systemDefault()).toInstant)) implicit val localDateDecoder: Decoder[LocalDate] = decoderDirectly[LocalDate].orElse[LocalDateTime](_.toLocalDate) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoderDirectly[LocalDateTime].orElse[LocalDate](_.atStartOfDay) implicit val uuidDecoder: Decoder[UUID] = decoderDirectly[UUID] }
Example 78
Source File: PostgresEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.ndbc import java.time.{ LocalDate, LocalDateTime, ZoneOffset } import java.util.{ Date, UUID } import io.getquill.dsl.CoreDsl import io.trane.ndbc.PostgresPreparedStatement import scala.language.implicitConversions import scala.reflect.ClassTag trait LowPriorityPostgresImplicits { this: CoreDsl => implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: BaseEncoder[O]): BaseEncoder[I] = mappedBaseEncoder(mapped, e) } trait PostgresEncoders extends LowPriorityPostgresImplicits with io.getquill.dsl.LowPriorityImplicits { this: NdbcContext[_, _, PostgresPreparedStatement, _] => type Encoder[T] = BaseEncoder[T] protected val zoneOffset: ZoneOffset def encoder[T, U](f: PostgresPreparedStatement => (Int, U) => PostgresPreparedStatement)(implicit ev: T => U): Encoder[T] = (idx, v, ps) => if (v == null) ps.setNull(idx) else f(ps)(idx, v) def arrayEncoder[T, U: ClassTag, Col <: Seq[T]](f: PostgresPreparedStatement => (Int, Array[U]) => PostgresPreparedStatement)(ev: T => U): Encoder[Col] = (idx, v, ps) => if (v == null) ps.setNull(idx) else f(ps)(idx, v.map(ev).toArray[U]) implicit override def anyValMappedEncoder[I <: AnyVal, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] = mappedEncoder implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] = (idx, v, ps) => if (v == null) ps.setNull(idx) else v match { case None => ps.setNull(idx) case Some(v) => e(idx, v, ps) } implicit def toLocalDateTime(d: Date) = LocalDateTime.ofInstant(d.toInstant(), zoneOffset) implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUUID) implicit val stringEncoder: Encoder[String] = encoder(_.setString) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(_.setBigDecimal)(_.bigDecimal) implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBoolean) implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte) implicit val shortEncoder: Encoder[Short] = encoder(_.setShort) implicit val intEncoder: Encoder[Int] = encoder(_.setInteger) implicit val longEncoder: Encoder[Long] = encoder(_.setLong) implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat) implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(_.setByteArray) implicit val dateEncoder: Encoder[Date] = encoder(_.setLocalDateTime) implicit val localDateEncoder: Encoder[LocalDate] = encoder(_.setLocalDate) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(_.setLocalDateTime) implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayEncoder[String, String, Col](_.setStringArray)(identity) implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, java.math.BigDecimal, Col](_.setBigDecimalArray)(_.bigDecimal) implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayEncoder[Boolean, java.lang.Boolean, Col](_.setBooleanArray)(_.booleanValue) implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayEncoder[Byte, java.lang.Short, Col](_.setShortArray)(identity) implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayEncoder[Short, java.lang.Short, Col](_.setShortArray)(_.shortValue) implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayEncoder[Int, java.lang.Integer, Col](_.setIntegerArray)(_.intValue) implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayEncoder[Long, java.lang.Long, Col](_.setLongArray)(_.longValue) implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayEncoder[Float, java.lang.Float, Col](_.setFloatArray)(_.floatValue) implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayEncoder[Double, java.lang.Double, Col](_.setDoubleArray)(_.doubleValue) implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayEncoder[Date, LocalDateTime, Col](_.setLocalDateTimeArray)(identity) implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, LocalDate, Col](_.setLocalDateArray)(identity) }
Example 79
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jasync import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime } import java.util.Date import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } trait Encoders { this: JAsyncContext[_, _, _] => type Encoder[T] = AsyncEncoder[T] type EncoderSqlType = SqlTypes.SqlTypes case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder.apply(index, value, row) } def encoder[T](sqlType: DecoderSqlType): Encoder[T] = encoder(identity[T], sqlType) def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] = AsyncEncoder[T](sqlType)(new BaseEncoder[T] { def apply(index: Index, value: T, row: PrepareRow) = row :+ f(value) }) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = AsyncEncoder(e.sqlType)(new BaseEncoder[I] { def apply(index: Index, value: I, row: PrepareRow) = e(index, mapped.f(value), row) }) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] { def apply(index: Index, value: Option[T], row: PrepareRow) = { value match { case None => nullEncoder(index, null, row) case Some(v) => d(index, v, row) } } }) private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL) implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal]((bd: BigDecimal) => bd.bigDecimal, SqlTypes.REAL) implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN) implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT) implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT) implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER) implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT) implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT) implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY) implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP) implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE) implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP) implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP) implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] = MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId))) implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] = MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId))) implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] = MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth)) implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] = MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond)) implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] = MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli)) implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder) }
Example 80
Source File: DagTest.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.codegen import java.time.LocalDateTime import io.getquill.codegen.dag.CatalogBasedAncestry import org.scalatest.BeforeAndAfter import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers._ import scala.reflect.{ ClassTag, classTag } // I.e. something the type-ancestry does not know about class UnknownClass class CodeGeneratorRunnerDagTest extends AnyFunSuite with BeforeAndAfter { case class TestCase[O](one: ClassTag[_], twos: Seq[ClassTag[_]], result: ClassTag[_]) val cases = Seq( TestCase(classTag[Int], Seq(classTag[Long]), classTag[Long]), TestCase(classTag[Long], Seq(classTag[Boolean], classTag[Int], classTag[Byte], classTag[Long]), classTag[Long]), TestCase(classTag[Int], Seq(classTag[Boolean], classTag[Int], classTag[Byte]), classTag[Int]), TestCase( classTag[BigDecimal], Seq( classTag[Boolean], classTag[Int], classTag[Byte], classTag[Long], classTag[BigDecimal] ), classTag[BigDecimal] ), TestCase( classTag[String], Seq( classTag[Boolean], classTag[Int], classTag[Long], classTag[Byte], classTag[BigDecimal], classTag[java.time.LocalDate], classTag[java.time.LocalDateTime] ), classTag[String] ), TestCase(classTag[java.time.LocalDate], Seq(classTag[LocalDateTime]), classTag[LocalDateTime]), TestCase(classTag[Short], Seq(classTag[Boolean], classTag[Byte]), classTag[Short]), TestCase(classTag[Short], Seq(classTag[Int]), classTag[Int]), TestCase(classTag[Int], Seq(classTag[Short]), classTag[Int]), TestCase(classTag[UnknownClass], Seq(classTag[String]), classTag[String]), TestCase(classTag[UnknownClass], Seq(classTag[UnknownClass]), classTag[UnknownClass]), // Don't know ancestry of unknown class to an Int (or any kind) so go directly to root of the ancestry i.e. String. TestCase(classTag[UnknownClass], Seq(classTag[Int]), classTag[String]) ) val casesIter = for { cas <- cases two <- cas.twos } yield (cas.one, two, cas.result) casesIter.foreach({ case (one, two, expected) => test(s"Common Ancestry between ${one} and ${two} should be ${expected}") { new CatalogBasedAncestry().apply(one, two) should equal(expected) } }) }
Example 81
Source File: FinagleMysqlEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.finagle.mysql import java.sql.Timestamp import java.time.{ LocalDate, LocalDateTime } import java.util.{ Date, UUID } import com.twitter.finagle.mysql.CanBeParameter._ import com.twitter.finagle.mysql.Parameter.wrap import com.twitter.finagle.mysql._ import io.getquill.FinagleMysqlContext trait FinagleMysqlEncoders { this: FinagleMysqlContext[_] => type Encoder[T] = FinagleMySqlEncoder[T] case class FinagleMySqlEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder(index, value, row) } def encoder[T](f: T => Parameter): Encoder[T] = FinagleMySqlEncoder((index, value, row) => row :+ f(value)) def encoder[T](implicit cbp: CanBeParameter[T]): Encoder[T] = encoder[T]((v: T) => v: Parameter) private[this] val nullEncoder = encoder((_: Null) => Parameter.NullParameter) implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] = FinagleMySqlEncoder { (index, value, row) => value match { case None => nullEncoder.encoder(index, null, row) case Some(v) => e.encoder(index, v, row) } } implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = FinagleMySqlEncoder(mappedBaseEncoder(mapped, e.encoder)) implicit val stringEncoder: Encoder[String] = encoder[String] implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal] { (value: BigDecimal) => BigDecimalValue(value): Parameter } implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean] implicit val byteEncoder: Encoder[Byte] = encoder[Byte] implicit val shortEncoder: Encoder[Short] = encoder[Short] implicit val intEncoder: Encoder[Int] = encoder[Int] implicit val longEncoder: Encoder[Long] = encoder[Long] implicit val floatEncoder: Encoder[Float] = encoder[Float] implicit val doubleEncoder: Encoder[Double] = encoder[Double] implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]] implicit val dateEncoder: Encoder[Date] = encoder[Date] { (value: Date) => timestampValue(new Timestamp(value.getTime)): Parameter } implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate] { (d: LocalDate) => DateValue(java.sql.Date.valueOf(d)): Parameter } implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder[LocalDateTime] { (d: LocalDateTime) => timestampValue(new Timestamp(d.atZone(injectionTimeZone.toZoneId).toInstant.toEpochMilli)): Parameter } implicit val uuidEncoder: Encoder[UUID] = mappedEncoder(MappedEncoding(_.toString), stringEncoder) }
Example 82
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.async import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime } import java.util.Date import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } trait Encoders { this: AsyncContext[_, _, _] => type Encoder[T] = AsyncEncoder[T] type EncoderSqlType = SqlTypes.SqlTypes case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder.apply(index, value, row) } def encoder[T](sqlType: DecoderSqlType): Encoder[T] = encoder(identity[T], sqlType) def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] = AsyncEncoder[T](sqlType)(new BaseEncoder[T] { def apply(index: Index, value: T, row: PrepareRow) = row :+ f(value) }) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = AsyncEncoder(e.sqlType)(new BaseEncoder[I] { def apply(index: Index, value: I, row: PrepareRow) = e(index, mapped.f(value), row) }) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] { def apply(index: Index, value: Option[T], row: PrepareRow) = { value match { case None => nullEncoder(index, null, row) case Some(v) => d(index, v, row) } } }) private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL) implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal](SqlTypes.REAL) implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN) implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT) implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT) implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER) implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT) implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT) implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY) implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP) implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE) implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP) implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP) implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] = MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId))) implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] = MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId))) implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] = MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth)) implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] = MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond)) implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] = MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli)) implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder) }
Example 83
Source File: Decoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.time.{ LocalDate, LocalDateTime } import java.util import java.util.Calendar import scala.math.BigDecimal.javaBigDecimal2bigDecimal trait Decoders { this: JdbcContextBase[_, _] => type Decoder[T] = JdbcDecoder[T] case class JdbcDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] { def apply(index: Index, row: ResultRow) = decoder(index + 1, row) } def decoder[T](d: BaseDecoder[T]): Decoder[T] = JdbcDecoder(d) def decoder[T](f: ResultRow => Index => T): Decoder[T] = decoder((index, row) => f(row)(index)) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = JdbcDecoder(mappedBaseDecoder(mapped, d.decoder)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = JdbcDecoder( (index, row) => { try { // According to the JDBC spec, we first need to read the object before `row.wasNull` works row.getObject(index) if (row.wasNull()) { None } else { Some(d.decoder(index, row)) } } catch { case _: NullPointerException if row.wasNull() => None } } ) implicit val stringDecoder: Decoder[String] = decoder(_.getString) implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder((index, row) => row.getBigDecimal(index)) implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) implicit val intDecoder: Decoder[Int] = decoder(_.getInt) implicit val longDecoder: Decoder[Long] = decoder(_.getLong) implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder(_.getBytes) implicit val dateDecoder: Decoder[util.Date] = decoder((index, row) => new util.Date(row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).getTime)) implicit val localDateDecoder: Decoder[LocalDate] = decoder((index, row) => row.getDate(index, Calendar.getInstance(dateTimeZone)).toLocalDate) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoder((index, row) => row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).toLocalDateTime) }
Example 84
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.sql.{ Date, Timestamp, Types } import java.time.{ LocalDate, LocalDateTime } import java.util.{ Calendar, TimeZone } import java.{ sql, util } trait Encoders { this: JdbcContextBase[_, _] => type Encoder[T] = JdbcEncoder[T] protected val dateTimeZone = TimeZone.getDefault case class JdbcEncoder[T](sqlType: Int, encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder(index + 1, value, row) } def encoder[T](sqlType: Int, f: (Index, T, PrepareRow) => Unit): Encoder[T] = JdbcEncoder(sqlType, (index: Index, value: T, row: PrepareRow) => { f(index, value, row) row }) def encoder[T](sqlType: Int, f: PrepareRow => (Index, T) => Unit): Encoder[T] = encoder(sqlType, (index: Index, value: T, row: PrepareRow) => f(row)(index, value)) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = JdbcEncoder(e.sqlType, mappedBaseEncoder(mapped, e.encoder)) private[this] val nullEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setNull) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = JdbcEncoder( d.sqlType, (index, value, row) => value match { case Some(v) => d.encoder(index, v, row) case None => nullEncoder.encoder(index, d.sqlType, row) } ) implicit val stringEncoder: Encoder[String] = encoder(Types.VARCHAR, _.setString) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(Types.NUMERIC, (index, value, row) => row.setBigDecimal(index, value.bigDecimal)) implicit val byteEncoder: Encoder[Byte] = encoder(Types.TINYINT, _.setByte) implicit val shortEncoder: Encoder[Short] = encoder(Types.SMALLINT, _.setShort) implicit val intEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setInt) implicit val longEncoder: Encoder[Long] = encoder(Types.BIGINT, _.setLong) implicit val floatEncoder: Encoder[Float] = encoder(Types.FLOAT, _.setFloat) implicit val doubleEncoder: Encoder[Double] = encoder(Types.DOUBLE, _.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(Types.VARBINARY, _.setBytes) implicit val dateEncoder: Encoder[util.Date] = encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, new sql.Timestamp(value.getTime), Calendar.getInstance(dateTimeZone))) implicit val localDateEncoder: Encoder[LocalDate] = encoder(Types.DATE, (index, value, row) => row.setDate(index, Date.valueOf(value), Calendar.getInstance(dateTimeZone))) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, Timestamp.valueOf(value), Calendar.getInstance(dateTimeZone))) }
Example 85
Source File: JdbcEncodingSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc.postgres import java.time.LocalDateTime import io.getquill.context.sql.EncodingSpec import io.getquill.Query class JdbcEncodingSpec extends EncodingSpec { val context = testContext import testContext._ "encodes and decodes types" in { testContext.run(delete) testContext.run(liftQuery(insertValues).foreach(e => insert(e))) verify(testContext.run(query[EncodingTestEntity])) } "encodes sets" in { testContext.run(query[EncodingTestEntity].delete) testContext.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insert(e))) val q = quote { (set: Query[Int]) => query[EncodingTestEntity].filter(t => set.contains(t.v6)) } verify(testContext.run(q(liftQuery(insertValues.map(_.v6))))) } "returning custom type" in { val uuid = testContext.run(insertBarCode(lift(barCodeEntry))).get val (barCode :: Nil) = testContext.run(findBarCodeByUuid(uuid)) verifyBarcode(barCode) } "LocalDateTime" in { case class EncodingTestEntity(v11: Option[LocalDateTime]) val now = LocalDateTime.now() val e1 = EncodingTestEntity(Some(now)) val e2 = EncodingTestEntity(None) val res: (List[EncodingTestEntity], List[EncodingTestEntity]) = performIO { val steps = for { _ <- testContext.runIO(query[EncodingTestEntity].delete) _ <- testContext.runIO(query[EncodingTestEntity].insert(lift(e1))) withoutNull <- testContext.runIO(query[EncodingTestEntity]) _ <- testContext.runIO(query[EncodingTestEntity].delete) _ <- testContext.runIO(query[EncodingTestEntity].insert(lift(e2))) withNull <- testContext.runIO(query[EncodingTestEntity]) } yield (withoutNull, withNull) steps } res._1 must contain theSameElementsAs List(EncodingTestEntity(Some(now))) res._2 must contain theSameElementsAs List(EncodingTestEntity(None)) } }
Example 86
Source File: TimePeriod.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages.impl.feature import java.time.temporal.WeekFields import java.time.{Instant, LocalDateTime, ZoneId} import com.salesforce.op.utils.date.DateTimeUtils import enumeratum.{Enum, EnumEntry} case class TimePeriodVal(value: Int, min: Int, max: Int) sealed abstract class TimePeriod(extractFn: LocalDateTime => TimePeriodVal) extends EnumEntry with Serializable { def extractTimePeriodVal(millis: Long): TimePeriodVal = extractFn( Instant .ofEpochMilli(millis) .atZone(ZoneId.of(DateTimeUtils.DefaultTimeZone.toString)).toLocalDateTime) def extractIntFromMillis(millis: Long): Int = extractTimePeriodVal(millis).value } object TimePeriod extends Enum[TimePeriod] { @transient val weekFields = WeekFields.of(java.time.DayOfWeek.MONDAY, 1) val values: Seq[TimePeriod] = findValues case object DayOfMonth extends TimePeriod(dt => TimePeriodVal(dt.getDayOfMonth, 1, 31)) case object DayOfWeek extends TimePeriod(dt => TimePeriodVal(dt.getDayOfWeek.getValue, 1, 7)) case object DayOfYear extends TimePeriod(dt => TimePeriodVal(dt.getDayOfYear, 1, 366)) case object HourOfDay extends TimePeriod(dt => TimePeriodVal(dt.getHour, 0, 24)) case object MonthOfYear extends TimePeriod(dt => TimePeriodVal(dt.getMonthValue, 1, 12)) case object WeekOfMonth extends TimePeriod(dt => TimePeriodVal(dt.get(weekFields.weekOfMonth()), 1, 6)) case object WeekOfYear extends TimePeriod(dt => TimePeriodVal(dt.get(weekFields.weekOfYear()), 1, 53)) }
Example 87
Source File: actors.scala From akka-viz with MIT License | 5 votes |
package postoffice import java.time.LocalDateTime import akka.actor._ import scala.util.Random class PostOfficeActor(val postOffice: PostOffice) extends Actor with ActorLogging { import PostOffice._ val myClient = context.actorOf(Props(classOf[PostOfficeClientActor]), "client") myClient ! postOffice.city override def receive: Receive = { case p @ Parcel(src, dest, weight) if src == postOffice.city => Thread.sleep(randomDelay) if (weight > WeightLimit) sender() ! Rejected(LocalDateTime.now(), p) else { sender() ! Pickup(LocalDateTime.now(), p) nextOffice(route(src -> dest)) ! p } case p @ Parcel(src, dest, _) if dest == postOffice.city => myClient ! Delivery(LocalDateTime.now(), p) case p @ Parcel(_, dest, _) => Thread.sleep(randomDelay) if (!lostPackage) nextOffice(route(postOffice.city -> dest)) ! p } def nextOffice(route: List[City]): ActorSelection = { val nextCity = route.dropWhile(_ != postOffice.city).drop(1).head val selection: ActorSelection = context.system.actorSelection(s"/user/$nextCity") selection } def lostPackage = Random.nextGaussian() < 0.002 } class PostOfficeClientActor extends Actor with ActorLogging { import PostOffice._ import scala.concurrent.duration._ var city: Option[City] = None override def receive: Actor.Receive = { case c: City => city = Some(c) sendPackage context.become(packageReply) } def packageReply: Actor.Receive = { case Pickup(_, p) => log.debug(s"Sent parcel $p") case Rejected(_, p) => log.debug(s"$p rejected, trying again") sender() ! p.copy(weight = p.weight - 0.02) case d: Delivery => log.debug(s"received $d") sendPackage } def sendPackage = { import context.dispatcher context.system.scheduler.scheduleOnce( randomDelay.milliseconds, context.parent, Parcel(city.get, Random.shuffle(Cities.filterNot(_ == city)).head, Random.nextDouble() * (WeightLimit + 0.10)) ) } }
Example 88
Source File: AWSSigningJestClientFactory.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.commons.clients.es import java.time.{LocalDateTime, ZoneId} import com.expedia.www.haystack.trace.commons.config.entities.AWSRequestSigningConfiguration import com.google.common.base.Supplier import io.searchbox.client.JestClientFactory import org.apache.http.impl.client.HttpClientBuilder import org.apache.http.impl.nio.client.HttpAsyncClientBuilder import org.slf4j.LoggerFactory import vc.inreach.aws.request.{AWSSigner, AWSSigningRequestInterceptor} import com.amazonaws.auth.AWSCredentialsProvider import com.amazonaws.auth.BasicAWSCredentials import com.amazonaws.auth.DefaultAWSCredentialsProviderChain import com.amazonaws.internal.StaticCredentialsProvider class AWSSigningJestClientFactory(awsRequestSigningConfig: AWSRequestSigningConfiguration) extends JestClientFactory { private val LOGGER = LoggerFactory.getLogger(classOf[AWSSigningJestClientFactory]) val awsSigner = new AWSSigner(getCredentialProvider, awsRequestSigningConfig.region, awsRequestSigningConfig.awsServiceName, new ClockSupplier) val requestInterceptor = new AWSSigningRequestInterceptor(awsSigner) override def configureHttpClient(builder: HttpClientBuilder): HttpClientBuilder = { builder.addInterceptorLast(requestInterceptor) } override def configureHttpClient(builder: HttpAsyncClientBuilder): HttpAsyncClientBuilder = { builder.addInterceptorLast(requestInterceptor) } def getCredentialProvider: AWSCredentialsProvider = { if (awsRequestSigningConfig.accessKey.isDefined) { LOGGER.info("using static aws credential provider with access and secret key for ES") new StaticCredentialsProvider( new BasicAWSCredentials(awsRequestSigningConfig.accessKey.get, awsRequestSigningConfig.secretKey.get)) } else { LOGGER.info("using default credential provider chain for ES") new DefaultAWSCredentialsProviderChain } } } class ClockSupplier extends Supplier[LocalDateTime] { override def get(): LocalDateTime = { LocalDateTime.now(ZoneId.of("UTC")) } }
Example 89
Source File: ScheduleEntryRepository.scala From aecor with MIT License | 5 votes |
package aecor.schedule import java.time.LocalDateTime import aecor.schedule.ScheduleEntryRepository.ScheduleEntry trait ScheduleEntryRepository[F[_]] { def insertScheduleEntry(scheduleBucketId: ScheduleBucketId, entryId: String, dueDate: LocalDateTime): F[Unit] def markScheduleEntryAsFired(scheduleBucketId: ScheduleBucketId, entryId: String): F[Unit] def processEntries(from: LocalDateTime, to: LocalDateTime, parallelism: Int)( f: ScheduleEntry => F[Unit] ): F[Option[ScheduleEntry]] } object ScheduleEntryRepository { final case class ScheduleEntry(bucketId: ScheduleBucketId, entryId: String, dueDate: LocalDateTime, timeBucket: String, fired: Boolean) }
Example 90
Source File: ScheduleBucket.scala From aecor with MIT License | 5 votes |
package aecor.schedule import java.time.{ LocalDateTime, ZoneOffset } import aecor.encoding.WireProtocol import aecor.macros.boopickle.BoopickleWireProtocol import cats.tagless.{ Derive, FunctorK } private[aecor] trait ScheduleBucket[F[_]] { def addScheduleEntry(entryId: String, correlationId: String, dueDate: LocalDateTime): F[Unit] def fireEntry(entryId: String): F[Unit] } private[aecor] object ScheduleBucket { import boopickle.Default._ implicit val localDateTimePickler: Pickler[LocalDateTime] = transformPickler( (ldt: (Long, Int)) => LocalDateTime.ofEpochSecond(ldt._1, ldt._2, ZoneOffset.UTC) )((ldt: LocalDateTime) => (ldt.toEpochSecond(ZoneOffset.UTC), ldt.getNano)) implicit def functorK: FunctorK[ScheduleBucket] = Derive.functorK implicit def wireProtocol: WireProtocol[ScheduleBucket] = BoopickleWireProtocol.derive }
Example 91
Source File: DefaultSchedule.scala From aecor with MIT License | 5 votes |
package aecor.schedule import java.time.LocalDateTime import java.util.UUID import aecor.data._ import aecor.runtime.akkapersistence.readside.{ CommittableEventJournalQuery, JournalEntry } import aecor.util.Clock import akka.NotUsed import akka.stream.scaladsl.Source import cats.effect.Effect import cats.implicits._ import aecor.util.effect._ import scala.concurrent.duration.FiniteDuration private[schedule] class DefaultSchedule[F[_]: Effect]( clock: Clock[F], buckets: ScheduleBucketId => ScheduleBucket[F], bucketLength: FiniteDuration, aggregateJournal: CommittableEventJournalQuery[F, UUID, ScheduleBucketId, ScheduleEvent], eventTag: EventTag ) extends Schedule[F] { override def addScheduleEntry(scheduleName: String, entryId: String, correlationId: String, dueDate: LocalDateTime): F[Unit] = for { zone <- clock.zone scheduleBucket = dueDate.atZone(zone).toEpochSecond / bucketLength.toSeconds _ <- buckets(ScheduleBucketId(scheduleName, scheduleBucket.toString)) .addScheduleEntry(entryId, correlationId, dueDate) } yield () override def committableScheduleEvents( scheduleName: String, consumerId: ConsumerId ): Source[Committable[F, JournalEntry[UUID, ScheduleBucketId, ScheduleEvent]], NotUsed] = aggregateJournal .eventsByTag(eventTag, ConsumerId(scheduleName + consumerId.value)) .flatMapConcat { case m if m.value.event.entityKey.scheduleName == scheduleName => Source.single(m) case other => Source .fromFuture(other.commit.unsafeToFuture()) .flatMapConcat( _ => Source.empty[Committable[F, JournalEntry[UUID, ScheduleBucketId, ScheduleEvent]]] ) } }
Example 92
Source File: TestScheduleEntryRepository.scala From aecor with MIT License | 5 votes |
package aecor.tests.e2e import java.time.LocalDateTime import aecor.schedule.CassandraScheduleEntryRepository.TimeBucket import aecor.schedule.{ ScheduleBucketId, ScheduleEntryRepository } import aecor.schedule.ScheduleEntryRepository.ScheduleEntry import aecor.testkit._ import monocle.Lens import cats.mtl.MonadState import cats.implicits._ import monocle.Lens object TestScheduleEntryRepository { def apply[F[_]: MonadState[*[_], S], S]( lens: Lens[S, Vector[ScheduleEntry]] ): ScheduleEntryRepository[F] = new TestScheduleEntryRepository(lens) } class TestScheduleEntryRepository[F[_]: MonadState[*[_], S], S]( lens: Lens[S, Vector[ScheduleEntry]] ) extends ScheduleEntryRepository[F] { val F = lens.transformMonadState(MonadState[F, S]) implicit val monad = F.monad override def insertScheduleEntry(scheduleBucketId: ScheduleBucketId, entryId: String, dueDate: LocalDateTime): F[Unit] = F.modify { scheduleEntries => scheduleEntries :+ ScheduleEntry( scheduleBucketId, entryId, dueDate, TimeBucket(dueDate.toLocalDate).key, false ) } override def markScheduleEntryAsFired(bucketId: ScheduleBucketId, entryId: String): F[Unit] = F.modify { scheduleEntries => scheduleEntries.map { e => if (e.bucketId == bucketId && e.entryId == entryId) { e.copy(fired = true) } else { e } } } override def processEntries(from: LocalDateTime, to: LocalDateTime, parallelism: Int)( f: (ScheduleEntryRepository.ScheduleEntry) => F[Unit] ): F[Option[ScheduleEntryRepository.ScheduleEntry]] = F.get.flatMap { entries => entries.foldLeft(none[ScheduleEntry].pure[F]) { (acc, entry) => if (entry.dueDate.isAfter(from) && (entry.dueDate.isBefore(to) || entry.dueDate == to)) { acc.flatMap(_ => f(entry)).map(_ => entry.some) } else { acc } } } }
Example 93
Source File: ScheduleEventCodecSpec.scala From aecor with MIT License | 5 votes |
package aecor.tests import java.time.temporal.{ ChronoField, Temporal } import java.time.{ Instant, LocalDateTime } import aecor.runtime.akkapersistence.serialization.{ PersistentDecoder, PersistentEncoder } import aecor.schedule.ScheduleEvent import org.scalacheck.{ Arbitrary, Gen, Properties, ScalacheckShapeless } import org.scalacheck.Prop.forAll class ScheduleEventCodecSpec extends Properties("ScheduleEventCodec") with ScalacheckShapeless { val encoder = PersistentEncoder[ScheduleEvent] val decoder = PersistentDecoder[ScheduleEvent] // OpenJDK 9+ offers more precise system clock than millisecond. // https://bugs.openjdk.java.net/browse/JDK-8068730 def dropBelowMillis[A <: Temporal](t: A): A = t.`with`(ChronoField.MICRO_OF_SECOND, t.getLong(ChronoField.MILLI_OF_SECOND) * 1000L) .asInstanceOf[A] implicit val arbitraryLocalDateTime = Arbitrary( Gen.lzy(Gen.const(dropBelowMillis(LocalDateTime.now()))) ) implicit val arbitraryInstant = Arbitrary(Gen.lzy(Gen.const(dropBelowMillis(Instant.now())))) property("encode/decode") = forAll { e: ScheduleEvent => val repr = encoder.encode(e) val decoded = decoder.decode(repr) decoded == Right(e) } }
Example 94
Source File: DateEncoderTest.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.record.encoder import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime, LocalTime} import com.sksamuel.avro4s.{AvroSchema, DefaultFieldMapper, Encoder, ImmutableRecord} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers //noinspection ScalaDeprecation class DateEncoderTest extends AnyFunSuite with Matchers { test("encode LocalTime as TIME-MILLIS") { case class Foo(s: LocalTime) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(LocalTime.of(12, 50, 45))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(46245000000L))) } test("encode LocalDate as DATE") { case class Foo(s: LocalDate) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(LocalDate.of(2018, 9, 10))) shouldBe ImmutableRecord(schema, Vector(java.lang.Integer.valueOf(17784))) } test("encode java.sql.Date as DATE") { case class Foo(s: Date) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(Date.valueOf(LocalDate.of(2018, 9, 10)))) shouldBe ImmutableRecord(schema, Vector(java.lang.Integer.valueOf(17784))) } test("encode LocalDateTime as timestamp-nanos") { case class Foo(s: LocalDateTime) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 123))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739000000123L))) Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 123009))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739000123009L))) Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 328187943))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739328187943L))) } test("encode Timestamp as TIMESTAMP-MILLIS") { case class Foo(s: Timestamp) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(Timestamp.from(Instant.ofEpochMilli(1538312231000L)))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1538312231000L))) } test("encode Instant as TIMESTAMP-MILLIS") { case class Foo(s: Instant) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(Instant.ofEpochMilli(1538312231000L))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1538312231000L))) } }
Example 95
Source File: DateDecoderTest.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.record.decoder import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime, LocalTime} import com.sksamuel.avro4s.SchemaFor.TimestampNanosLogicalType import com.sksamuel.avro4s.{AvroSchema, Decoder, SchemaFor} import org.apache.avro.generic.GenericData import org.apache.avro.{LogicalTypes, SchemaBuilder} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers //noinspection ScalaDeprecation class DateDecoderTest extends AnyFunSuite with Matchers { case class WithLocalTime(z: LocalTime) case class WithLocalDate(z: LocalDate) case class WithDate(z: Date) case class WithLocalDateTime(z: LocalDateTime) case class WithTimestamp(z: Timestamp) case class WithInstant(z: Instant) test("decode int to LocalTime") { val schema = AvroSchema[WithLocalTime] val record = new GenericData.Record(schema) record.put("z", 46245000000L) Decoder[WithLocalTime].decode(record) shouldBe WithLocalTime(LocalTime.of(12, 50, 45)) } test("decode int to LocalDate") { val schema = AvroSchema[WithLocalDate] val record = new GenericData.Record(schema) record.put("z", 17784) Decoder[WithLocalDate].decode(record) shouldBe WithLocalDate(LocalDate.of(2018, 9, 10)) } test("decode int to java.sql.Date") { val schema = AvroSchema[WithDate] val record = new GenericData.Record(schema) record.put("z", 17784) Decoder[WithDate].decode(record) shouldBe WithDate(Date.valueOf(LocalDate.of(2018, 9, 10))) } test("decode timestamp-millis to LocalDateTime") { val dateSchema = LogicalTypes.timestampMillis().addToSchema(SchemaBuilder.builder.longType) val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord() val record = new GenericData.Record(schema) record.put("z", 1572707106376L) Decoder[WithLocalDateTime].withSchema(SchemaFor(schema)).decode(record) shouldBe WithLocalDateTime( LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376000000)) } test("decode timestamp-micros to LocalDateTime") { val dateSchema = LogicalTypes.timestampMicros().addToSchema(SchemaBuilder.builder.longType) val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord() val record = new GenericData.Record(schema) record.put("z", 1572707106376001L) Decoder[WithLocalDateTime].withSchema(SchemaFor(schema)).decode(record) shouldBe WithLocalDateTime( LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376001000)) } test("decode timestamp-nanos to LocalDateTime") { val dateSchema = TimestampNanosLogicalType.addToSchema(SchemaBuilder.builder.longType) val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord() val record = new GenericData.Record(schema) record.put("z", 1572707106376000002L) Decoder[WithLocalDateTime].decode(record) shouldBe WithLocalDateTime( LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376000002)) } test("decode long to Timestamp") { val schema = AvroSchema[WithTimestamp] val record = new GenericData.Record(schema) record.put("z", 1538312231000L) Decoder[WithTimestamp].decode(record) shouldBe WithTimestamp(new Timestamp(1538312231000L)) } test("decode long to Instant") { val schema = AvroSchema[WithInstant] val record = new GenericData.Record(schema) record.put("z", 1538312231000L) Decoder[WithInstant].decode(record) shouldBe WithInstant(Instant.ofEpochMilli(1538312231000L)) } }
Example 96
Source File: DateSchemaTest.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.schema import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime, LocalTime} import com.sksamuel.avro4s.AvroSchema import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class DateSchemaTest extends AnyFunSuite with Matchers { test("generate date logical type for LocalDate") { case class LocalDateTest(date: LocalDate) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localdate.json")) val schema = AvroSchema[LocalDateTest] schema.toString(true) shouldBe expected.toString(true) } test("generate date logical type for Date") { case class DateTest(date: Date) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/date.json")) val schema = AvroSchema[DateTest] schema.toString(true) shouldBe expected.toString(true) } test("generate time logical type for LocalTime") { case class LocalTimeTest(time: LocalTime) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localtime.json")) val schema = AvroSchema[LocalTimeTest] schema.toString(true) shouldBe expected.toString(true) } test("generate timestamp-nanos for LocalDateTime") { case class LocalDateTimeTest(time: LocalDateTime) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localdatetime.json")) val schema = AvroSchema[LocalDateTimeTest] schema.toString(true) shouldBe expected.toString(true) } test("generate timestamp-millis logical type for Instant") { case class InstantTest(instant: Instant) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/instant.json")) val schema = AvroSchema[InstantTest] schema.toString(true) shouldBe expected.toString(true) } test("generate timestamp-millis logical type for Timestamp") { case class TimestampTest(ts: Timestamp) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/timestamp.json")) val schema = AvroSchema[TimestampTest] schema.toString(true) shouldBe expected.toString(true) } }
Example 97
Source File: MistLoggingSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.core.logging import java.time.format.DateTimeFormatter import java.time.{LocalDateTime, ZoneOffset} import org.scalatest.{FunSpec, Matchers} class MistLoggingSpec extends FunSpec with Matchers { describe("levels") { it("should restore level from int") { Level.fromInt(1) shouldBe Level.Debug Level.fromInt(2) shouldBe Level.Info Level.fromInt(3) shouldBe Level.Warn Level.fromInt(4) shouldBe Level.Error } } describe("log event") { it("should have correct format") { val ts = LocalDateTime.now(ZoneOffset.UTC) val e = LogEvent.mkInfo("job-id", "Message", ts.toInstant(ZoneOffset.UTC).toEpochMilli) val expectedDate = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(ts) val expected = s"INFO $expectedDate [job-id] Message" e.mkString shouldBe expected } it("should have stack traces") { val ts = LocalDateTime.now(ZoneOffset.UTC) val error = new RuntimeException("Test error") val e = LogEvent.mkError("job-id", "Error", Some(error), ts.toInstant(ZoneOffset.UTC).toEpochMilli) val expectedDate = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(ts) val expected = s"""ERROR $expectedDate [job-id] Error |java.lang.RuntimeException: Test error""".stripMargin e.mkString should startWith(expected) } } }
Example 98
Source File: localdatetime.scala From cats-time with MIT License | 5 votes |
package io.chrisdavenport.cats.time.instances import cats._ import cats.implicits._ import java.time.LocalDateTime import java.time.format.DateTimeFormatter import java.time.format.DateTimeFormatter.ISO_LOCAL_DATE_TIME trait localdatetime { final def showLocalDateTime(formatter: DateTimeFormatter): Show[LocalDateTime] = Show[String].contramap(_.format(formatter)) implicit final val localdatetimeInstances = new Show[LocalDateTime] with Order[LocalDateTime] with Hash[LocalDateTime]{ override def hash(x: LocalDateTime): Int = x.hashCode override def compare(x: LocalDateTime, y: LocalDateTime): Int = x.compareTo(y) override def show(x: LocalDateTime): String = x.format(ISO_LOCAL_DATE_TIME) } } object localdatetime extends localdatetime
Example 99
Source File: Merge.scala From tofu with Apache License 2.0 | 5 votes |
package tofu.data package derived import java.time.{Instant, LocalDate, LocalDateTime, ZonedDateTime} import cats.kernel.Semigroup import magnolia.{CaseClass, Magnolia, SealedTrait} import simulacrum.typeclass import derevo.Derivation @typeclass trait Merge[A] { def merge(a: A, b: A): A } trait MergeInstances1 { type Typeclass[A] = Merge[A] def combine[T](caseClass: CaseClass[Typeclass, T]): Typeclass[T] = (a, b) => caseClass.construct(p => p.typeclass.merge(p.dereference(a), p.dereference(b))) def dispatch[T](sealedTrait: SealedTrait[Typeclass, T]): Typeclass[T] = (a, b) => sealedTrait.dispatch(a) { h => if (h.cast.isDefinedAt(b)) h.typeclass.merge(h.cast(a), h.cast(b)) else a } implicit def instance[A]: Merge[A] = macro Magnolia.gen[A] } object Merge extends Derivation[Merge] with MergeInstances1 { implicit def optionInstance[A](implicit m: Merge[A]): Merge[Option[A]] = (ao, bo) => ao.fold(bo)(a => bo.fold(ao)(b => Some(m.merge(a, b)))) implicit def primitiveInstance[A: Primitive]: Merge[A] = (a: A, _: A) => a sealed class Primitive[A] final implicit object primitiveByte extends Primitive[Byte] final implicit object primitiveShort extends Primitive[Short] final implicit object primitiveInt extends Primitive[Int] final implicit object primitiveLong extends Primitive[Long] final implicit object primitiveChar extends Primitive[Char] final implicit object primitiveFloat extends Primitive[Float] final implicit object primitiveDouble extends Primitive[Double] final implicit object primitiveUnit extends Primitive[Unit] final implicit object primitiveBigDecimal extends Primitive[BigDecimal] final implicit object primitiveBigInt extends Primitive[BigInt] final implicit object primitiveLocalDateTime extends Primitive[LocalDateTime] final implicit object primitiveZonedDateTime extends Primitive[ZonedDateTime] final implicit object primitiveLocalDate extends Primitive[LocalDate] final implicit object primitiveInstant extends Primitive[Instant] final implicit object primitiveString extends Primitive[String] } object Merged { trait OpaqueTag extends Any type Base = Any { type MergedOpaque } type Mer[A] <: Base with OpaqueTag def apply[A](value: A): Mer[A] = value.asInstanceOf[Mer[A]] implicit final class MergedOps[A](private val mer: Mer[A]) extends AnyVal { def value: A = mer.asInstanceOf[A] } implicit def mergedSemigroup[A: Merge]: Semigroup[Merged[A]] = (x, y) => apply(Merge[A].merge(x.value, y.value)) }
Example 100
Source File: ApiCfp.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.web.api.domain import java.time.LocalDateTime import gospeak.core.domain.utils.{BasicCtx, OrgaCtx} import gospeak.core.domain.{Cfp, CommonCfp, Group, User} import gospeak.web.api.domain.utils.{ApiInfo, ApiPlace} import play.api.libs.json.{Json, Writes} object ApiCfp { // data to display for orgas (everything) final case class Orga(slug: String, name: String, begin: Option[LocalDateTime], close: Option[LocalDateTime], description: String, tags: Seq[String], info: ApiInfo) object Orga { implicit val writes: Writes[Orga] = Json.writes[Orga] } def orga(cfp: Cfp, users: Seq[User])(implicit ctx: OrgaCtx): Orga = new Orga( slug = cfp.slug.value, name = cfp.name.value, begin = cfp.begin, close = cfp.close, description = cfp.description.value, tags = cfp.tags.map(_.value), info = ApiInfo.from(cfp.info, users)) // data to display publicly final case class Published(kind: String, ref: String, name: String, logo: Option[String], url: Option[String], begin: Option[LocalDateTime], close: Option[LocalDateTime], location: Option[ApiPlace], description: String, eventStart: Option[LocalDateTime], eventFinish: Option[LocalDateTime], eventUrl: Option[String], eventTickets: Option[String], eventVideos: Option[String], twitterAccount: Option[String], twitterHashtag: Option[String], tags: Seq[String], group: Option[ApiGroup.Embed]) object Published { implicit val writes: Writes[Published] = Json.writes[Published] } def published(cfp: CommonCfp, groups: Seq[Group])(implicit ctx: BasicCtx): Published = new Published( kind = cfp.fold(_ => "external")(_ => "internal"), ref = cfp.fold(_.id.value)(_.slug.value), name = cfp.name, logo = cfp.logo.map(_.value), url = cfp.external.map(_.url.value), begin = cfp.begin, close = cfp.close, location = cfp.location.map(ApiPlace.from), description = cfp.description.value, eventStart = cfp.external.flatMap(_.event.start), eventFinish = cfp.external.flatMap(_.event.finish), tags = cfp.tags.map(_.value), eventUrl = cfp.external.flatMap(_.event.url).map(_.value), eventTickets = cfp.external.flatMap(_.event.tickets).map(_.value), eventVideos = cfp.external.flatMap(_.event.videos).map(_.value), twitterAccount = cfp.external.flatMap(_.event.twitterAccount).map(_.url.value), twitterHashtag = cfp.external.flatMap(_.event.twitterHashtag).map(_.value), group = cfp.internal.flatMap(i => groups.find(_.id == i.group.id)).map(ApiGroup.embed)) // embedded data in other models, should be public final case class Embed(slug: String, name: String, begin: Option[LocalDateTime], close: Option[LocalDateTime]) object Embed { implicit val writes: Writes[Embed] = Json.writes[Embed] } def embed(cfp: Cfp)(implicit ctx: BasicCtx): Embed = new Embed( slug = cfp.slug.value, name = cfp.name.value, begin = cfp.begin, close = cfp.close) }
Example 101
Source File: ApiExternalCfp.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.web.api.domain import java.time.LocalDateTime import gospeak.core.domain.ExternalCfp import gospeak.core.domain.utils.BasicCtx import play.api.libs.json.{Json, Writes} object ApiExternalCfp { // data to display publicly final case class Published(id: String, url: String, name: String, description: String, logo: Option[String], begin: Option[LocalDateTime], close: Option[LocalDateTime], eventUrl: Option[String], eventStart: Option[LocalDateTime], eventFinish: Option[LocalDateTime], eventLocation: Option[String], eventTwitterAccount: Option[String], eventTwitterHashtag: Option[String], tags: Seq[String]) object Published { implicit val writes: Writes[Published] = Json.writes[Published] } def published(cfp: ExternalCfp.Full)(implicit ctx: BasicCtx): Published = new Published( id = cfp.id.value, url = cfp.url.value, name = cfp.event.name.value, description = cfp.description.value, logo = cfp.event.logo.map(_.url.value), begin = cfp.begin, close = cfp.close, eventUrl = cfp.event.url.map(_.value), eventStart = cfp.event.start, eventFinish = cfp.event.finish, eventLocation = cfp.event.location.map(_.value), eventTwitterAccount = cfp.event.twitterAccount.map(_.url.value), eventTwitterHashtag = cfp.event.twitterHashtag.map(_.url), tags = cfp.event.tags.map(_.value)) }
Example 102
Source File: MessageHandler.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.web.services import java.time.LocalDateTime import cats.data.OptionT import cats.effect.IO import gospeak.core.ApplicationConf import gospeak.core.domain.Group import gospeak.core.domain.Group.Settings.Action import gospeak.core.domain.Group.Settings.Action.Trigger import gospeak.core.domain.messages.Message import gospeak.core.domain.utils.Constants import gospeak.core.services.email.EmailSrv import gospeak.core.services.slack.SlackSrv import gospeak.core.services.storage.{OrgaGroupRepo, OrgaGroupSettingsRepo} import gospeak.core.services.twitter.{Tweets, TwitterSrv} import gospeak.libs.scala.Extensions._ import gospeak.libs.scala.domain.{CustomException, EmailAddress} import gospeak.web.services.MessageSrv._ import io.circe.Json import org.slf4j.LoggerFactory import scala.util.control.NonFatal class MessageHandler(appConf: ApplicationConf, groupRepo: OrgaGroupRepo, groupSettingsRepo: OrgaGroupSettingsRepo, emailSrv: EmailSrv, slackSrv: SlackSrv, twitterSrv: TwitterSrv) { private val logger = LoggerFactory.getLogger(this.getClass) def groupActionHandler(msg: Message): IO[Unit] = (msg match { case m: Message.GroupMessage => handleGroupAction(m.group.slug, m, eMessage(m)) case _ => IO.pure(0) }).map(_ => ()).recover { case NonFatal(_) => () } def gospeakHandler(msg: Message): IO[Unit] = (msg match { case m: Message.ExternalCfpCreated => gospeakTwitt(m) case _ => IO.pure(0) }).map(_ => ()).recover { case NonFatal(_) => () } def logHandler(msg: Message): IO[Unit] = IO.pure(logger.info(s"Message sent: $msg")) private def handleGroupAction(group: Group.Slug, msg: Message.GroupMessage, data: Json): IO[Int] = (for { groupElt <- OptionT(groupRepo.find(group)) actions <- OptionT.liftF(groupSettingsRepo.findActions(groupElt.id)) accounts <- OptionT.liftF(groupSettingsRepo.findAccounts(groupElt.id)) actionsToExec = Trigger.all.filter(_.message == msg.ref).flatMap(actions.getOrElse(_, Seq())) results <- OptionT.liftF(actionsToExec.map(execGroupAction(accounts, _, data)).sequence) } yield results.length).value.map(_.getOrElse(0)) private def execGroupAction(accounts: Group.Settings.Accounts, action: Action, data: Json): IO[Unit] = action match { case email: Action.Email => (for { to <- email.to.render(data).left.map(e => CustomException(e.message)).flatMap(EmailAddress.from).map(EmailAddress.Contact(_)) subject <- email.subject.render(data).left.map(e => CustomException(e.message)) content <- email.content.render(data).map(_.toHtml).leftMap(e => CustomException(e.message)) } yield emailSrv.send(EmailSrv.Email( from = Constants.Gospeak.noreplyEmail, to = Seq(to), subject = subject, content = EmailSrv.HtmlContent(content.value) ))).toIO.flatMap(identity).map(_ => ()) case Action.Slack(slack) => accounts.slack.map(slackSrv.exec(slack, data, _, appConf.aesKey)).getOrElse(IO.raiseError(CustomException("No credentials for Slack"))) } private def gospeakTwitt(msg: Message.ExternalCfpCreated): IO[Int] = { if (msg.cfp.isActive(LocalDateTime.now())) { twitterSrv.tweet(Tweets.externalCfpCreated(msg)).map(_ => 1) } else { IO.pure(0) } } }
Example 103
Source File: QueryStringBindablesSpec.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.web.utils import java.net.URLEncoder import java.time.{LocalDate, LocalDateTime} import gospeak.core.domain.UserRequest import gospeak.libs.scala.domain.{Page, Url} import gospeak.web.testingutils.BaseSpec import gospeak.web.utils.QueryStringBindables._ class QueryStringBindablesSpec extends BaseSpec { describe("QueryStringBindables") { describe("LocalDateTime") { it("should parse and format dates") { val ldt = LocalDateTime.of(2019, 9, 21, 19, 12) val date = "21/09/2019" val dateTime = s"$date 19:12" val dateEncoded = URLEncoder.encode(date, "UTF-8") val dateTimeEncoded = URLEncoder.encode(dateTime, "UTF-8") LocalDateTime.parse(dateTime, dtf1) shouldBe ldt LocalDateTime.parse(dateTimeEncoded, dtf2) shouldBe ldt LocalDate.parse(date, df1).atTime(19, 12) shouldBe ldt LocalDate.parse(dateEncoded, df2).atTime(19, 12) shouldBe ldt ldt.format(df1) shouldBe date } it("should bind & unbind a LocalDateTime when no params") { val ldt = LocalDateTime.of(2019, 9, 21, 0, 0) val date = "21/09/2019" val dateTimeEncoded = URLEncoder.encode(date + " 00:00", "UTF-8") localDateTimeQueryStringBindable.bind("key", Map("key" -> Seq(date))) shouldBe Some(Right(ldt)) localDateTimeQueryStringBindable.unbind("key", ldt) shouldBe s"key=$dateTimeEncoded" } } describe("Page.Params") { it("should bind & unbind a Page.Params when no params") { val params = Page.Params() pageParamsQueryStringBindable.bind("", Map()) shouldBe Some(Right(params)) pageParamsQueryStringBindable.unbind("", params) shouldBe "" } it("should bind & unbind a Page.Params when all params") { val params = buildParams(2, 30, "test", "name") pageParamsQueryStringBindable.bind("", Map( Page.No.key -> Seq("2"), Page.Size.key -> Seq("30"), Page.Search.key -> Seq("test"), Page.OrderBy.key -> Seq("name"))) shouldBe Some(Right(params)) pageParamsQueryStringBindable.unbind("", params) shouldBe s"${Page.No.key}=2&${Page.Size.key}=30&${Page.Search.key}=test&${Page.OrderBy.key}=name" } it("should bind & unbind filters") { val params = Page.Params.defaults.toggleFilter("f1").withFilter("f2", "v2") pageParamsQueryStringBindable.bind("", Map( "f1" -> Seq("true"), "f2" -> Seq("v2"))) shouldBe Some(Right(params)) pageParamsQueryStringBindable.unbind("", params) shouldBe s"f1=true&f2=v2" } } it("should bind & unbind a Url") { val url = Url.from("http://youtube.com").right.get urlQueryStringBindable.bind("key", Map("key" -> Seq("http://youtube.com"))) shouldBe Some(Right(url)) urlQueryStringBindable.unbind("key", url) shouldBe s"key=http%3A%2F%2Fyoutube.com" } it("should bind & unbind a UserRequest.Id") { val id = UserRequest.Id.generate() userRequestIdQueryStringBindable.bind("key", Map("key" -> Seq(id.value))) shouldBe Some(Right(id)) userRequestIdQueryStringBindable.unbind("key", id) shouldBe s"key=${id.value}" } } private def buildParams(no: Int, size: Int, search: String, order: String) = Page.Params(Page.No(no), Page.Size(size), Some(Page.Search(search)), Some(Page.OrderBy(order))) }
Example 104
Source File: MsgEvent.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain.messages import java.time.LocalDateTime import gospeak.core.domain.Event import gospeak.libs.scala.domain.{Mustache, Tag} final case class MsgEvent(slug: Event.Slug, name: Event.Name, kind: Event.Kind, start: LocalDateTime, description: Mustache.Markdown[Message.EventInfo], cfp: Option[MsgCfp.Embed], venue: Option[MsgVenue.Embed], proposals: Seq[MsgProposal.Embed], tags: Seq[Tag], published: Boolean, links: Map[String, String], publicLink: String, orgaLink: String, meetupLink: Option[String]) { def embed: MsgEvent.Embed = MsgEvent.Embed(slug, name, kind, start, description, venue, tags, published, links, publicLink, orgaLink, meetupLink) } object MsgEvent { final case class Embed(slug: Event.Slug, name: Event.Name, kind: Event.Kind, start: LocalDateTime, description: Mustache.Markdown[Message.EventInfo], venue: Option[MsgVenue.Embed], tags: Seq[Tag], published: Boolean, links: Map[String, String], publicLink: String, orgaLink: String, meetupLink: Option[String]) }
Example 105
Source File: CommonEvent.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.LocalDateTime import gospeak.core.domain.messages.Message import gospeak.core.domain.utils.Info import gospeak.core.domain.utils.SocialAccounts.SocialAccount.TwitterAccount import gospeak.libs.scala.domain._ final case class CommonEvent(name: Event.Name, kind: Event.Kind, start: Option[LocalDateTime], location: Option[GMapPlace], twitterAccount: Option[TwitterAccount], twitterHashtag: Option[TwitterHashtag], tags: Seq[Tag], extra: Either[CommonEvent.External, CommonEvent.Internal], info: Info) { def logo: Option[Logo] = extra.fold(_.logo, i => i.group.logo.orElse(i.venue.map(_.logo))) def users: List[User.Id] = info.users def fold[A](f: CommonEvent.External => A)(g: CommonEvent.Internal => A): A = extra.fold(f, g) def internal: Option[CommonEvent.Internal] = extra.right.toOption def external: Option[CommonEvent.External] = extra.left.toOption } object CommonEvent { final case class InternalGroup(id: Group.Id, slug: Group.Slug, name: Group.Name, logo: Option[Logo]) final case class InternalCfp(id: Cfp.Id, slug: Cfp.Slug, name: Cfp.Name) final case class InternalVenue(id: Venue.Id, name: Partner.Name, logo: Logo) final case class Internal(id: Event.Id, slug: Event.Slug, description: Mustache.Markdown[Message.EventInfo], group: InternalGroup, cfp: Option[InternalCfp], venue: Option[InternalVenue]) final case class External(id: ExternalEvent.Id, logo: Option[Logo], description: Markdown, url: Option[Url], tickets: Option[Url], videos: Option[Url]) }
Example 106
Source File: Partner.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.{LocalDate, LocalDateTime} import gospeak.core.domain.utils.{Info, SocialAccounts} import gospeak.libs.scala.domain._ final case class Partner(id: Partner.Id, group: Group.Id, slug: Partner.Slug, name: Partner.Name, notes: Markdown, // private infos for the group description: Option[Markdown], // public description logo: Logo, social: SocialAccounts, info: Info) { def data: Partner.Data = Partner.Data(this) def users: List[User.Id] = info.users } object Partner { def apply(group: Group.Id, data: Data, info: Info): Partner = new Partner(Id.generate(), group, data.slug, data.name, data.notes, data.description, data.logo, data.social, info) final class Id private(value: String) extends DataClass(value) with IId object Id extends UuidIdBuilder[Id]("Partner.Id", new Id(_)) { val empty = new Id("00000000-0000-0000-0000-000000000000") } final class Slug private(value: String) extends DataClass(value) with ISlug object Slug extends SlugBuilder[Slug]("Partner.Slug", new Slug(_)) final case class Name(value: String) extends AnyVal final case class Full(partner: Partner, venueCount: Long, sponsorCount: Long, lastSponsorDate: Option[LocalDate], contactCount: Long, eventCount: Long, lastEventDate: Option[LocalDateTime]) { def slug: Slug = partner.slug def name: Name = partner.name def logo: Logo = partner.logo def social: SocialAccounts = partner.social } final case class Data(slug: Partner.Slug, name: Partner.Name, notes: Markdown, description: Option[Markdown], logo: Logo, social: SocialAccounts) object Data { def apply(p: Partner): Data = new Data(p.slug, p.name, p.notes, p.description, p.logo, p.social) } }
Example 107
Source File: ExternalEvent.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.LocalDateTime import gospeak.core.domain.utils.Info import gospeak.core.domain.utils.SocialAccounts.SocialAccount.TwitterAccount import gospeak.libs.scala.domain._ final case class ExternalEvent(id: ExternalEvent.Id, name: Event.Name, kind: Event.Kind, logo: Option[Logo], description: Markdown, start: Option[LocalDateTime], finish: Option[LocalDateTime], location: Option[GMapPlace], url: Option[Url], tickets: Option[Url], videos: Option[Url.Videos], twitterAccount: Option[TwitterAccount], twitterHashtag: Option[TwitterHashtag], tags: Seq[Tag], info: Info) { def data: ExternalEvent.Data = ExternalEvent.Data(this) def users: List[User.Id] = info.users } object ExternalEvent { def apply(d: Data, info: Info): ExternalEvent = new ExternalEvent(Id.generate(), d.name, d.kind, d.logo, d.description, d.start, d.finish, d.location, d.url, d.tickets, d.videos, d.twitterAccount, d.twitterHashtag, d.tags, info) final class Id private(value: String) extends DataClass(value) with IId object Id extends UuidIdBuilder[Id]("ExternalEvent.Id", new Id(_)) final case class Data(name: Event.Name, kind: Event.Kind, logo: Option[Logo], description: Markdown, start: Option[LocalDateTime], finish: Option[LocalDateTime], location: Option[GMapPlace], url: Option[Url], tickets: Option[Url], videos: Option[Url.Videos], twitterAccount: Option[TwitterAccount], twitterHashtag: Option[TwitterHashtag], tags: Seq[Tag]) object Data { def apply(e: ExternalEvent): Data = new Data(e.name, e.kind, e.logo, e.description, e.start, e.finish, e.location, e.url, e.tickets, e.videos, e.twitterAccount, e.twitterHashtag, e.tags) } }
Example 108
Source File: CommonCfp.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.temporal.ChronoUnit import java.time.{Instant, LocalDateTime} import gospeak.core.domain.utils.Constants import gospeak.core.domain.utils.SocialAccounts.SocialAccount.TwitterAccount import gospeak.libs.scala.Extensions._ import gospeak.libs.scala.domain._ final case class CommonCfp(name: String, logo: Option[Logo], begin: Option[LocalDateTime], close: Option[LocalDateTime], location: Option[GMapPlace], description: Markdown, tags: Seq[Tag], extra: Either[CommonCfp.External, CommonCfp.Internal]) { def closesInDays(nb: Int, now: Instant): Boolean = close.exists(_.toInstant(Constants.defaultZoneId).isBefore(now.minus(nb, ChronoUnit.DAYS))) def fold[A](f: CommonCfp.External => A)(g: CommonCfp.Internal => A): A = extra.fold(f, g) def internal: Option[CommonCfp.Internal] = extra.right.toOption def external: Option[CommonCfp.External] = extra.left.toOption } object CommonCfp { def apply(group: Group, cfp: Cfp): CommonCfp = new CommonCfp( name = cfp.name.value, logo = group.logo, begin = cfp.begin, close = cfp.close, location = group.location, description = cfp.description, tags = cfp.tags, extra = Right(Internal( slug = cfp.slug, group = InternalGroup( id = group.id, slug = group.slug)))) def apply(cfp: ExternalCfp.Full): CommonCfp = new CommonCfp( name = cfp.event.name.value, logo = cfp.event.logo, begin = cfp.begin, close = cfp.close, location = cfp.event.location, description = cfp.description, tags = cfp.event.tags, extra = Left(External( id = cfp.id, url = cfp.url, event = ExternalExternalEvent( start = cfp.event.start, finish = cfp.event.finish, url = cfp.event.url, tickets = cfp.event.tickets, videos = cfp.event.videos, twitterAccount = cfp.event.twitterAccount, twitterHashtag = cfp.event.twitterHashtag)))) final case class InternalGroup(id: Group.Id, slug: Group.Slug) final case class Internal(slug: Cfp.Slug, group: InternalGroup) final case class ExternalExternalEvent(start: Option[LocalDateTime], finish: Option[LocalDateTime], url: Option[Url], tickets: Option[Url], videos: Option[Url], twitterAccount: Option[TwitterAccount], twitterHashtag: Option[TwitterHashtag]) final case class External(id: ExternalCfp.Id, url: Url, event: ExternalExternalEvent) }
Example 109
Source File: TwittsSpec.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.services.twitter import java.time.LocalDateTime import com.danielasfregola.randomdatagenerator.RandomDataGenerator import gospeak.core.domain.messages.{Message, MsgExternalCfp, MsgExternalEvent, MsgUser} import gospeak.core.domain.utils.SocialAccounts.SocialAccount.TwitterAccount import gospeak.core.domain.utils.{Constants, SocialAccounts} import gospeak.core.domain.{Event, User} import gospeak.core.testingutils.BaseSpec import gospeak.core.testingutils.Generators._ import gospeak.libs.scala.Extensions._ import gospeak.libs.scala.domain._ class TwittsSpec extends BaseSpec with RandomDataGenerator { protected val place: GMapPlace = random[GMapPlace] private val event = MsgExternalEvent( name = Event.Name("Devoxx"), start = None, location = None, twitterAccount = None, twitterHashtag = None, tags = Seq(), publicLink = "https://gospeak.io/events/ext/1a887b22-ebcf-41eb-a3ab-fd7ca1a53689") private val cfp = MsgExternalCfp( begin = None, close = None, publicLink = "https://gospeak.io/cfps/ext/f02d5dc9-a93b-4861-a8ff-66962187d850") private val user = MsgUser.Embed( slug = User.Slug.from("loicknuchel").get, name = User.Name("Loïc Knuchel"), avatar = Avatar(Url.from("https://avatars0.githubusercontent.com/u/653009").get), title = None, company = None, website = None, links = SocialAccounts.fromStrings(twitter = Some("https://twitter.com/loicknuchel")).get, public = true) private val ldt = LocalDateTime.of(2020, 2, 10, 19, 0) private val now = ldt.toInstant(Constants.defaultZoneId) describe("Twitts") { it("should format externalCfpCreated when cfp is mostly empty") { val msg = Message.ExternalCfpCreated( event = event.copy( start = None, location = None, twitterAccount = None, twitterHashtag = None, tags = Seq()), cfp = cfp.copy( close = None), by = user.copy( links = SocialAccounts.fromUrls()), at = now) Tweets.externalCfpCreated(msg) shouldBe s"""!!Speaker announcement!! |Devoxx is happening |Submit your proposals at https://gospeak.io/cfps/ext/f02d5dc9-a93b-4861-a8ff-66962187d850 |#speaking |Thanks Loïc Knuchel for the post! |""".stripMargin.trim } it("should format externalCfpCreated when cfp is full") { val msg = Message.ExternalCfpCreated( event = event.copy( start = Some(ldt), location = Some(place.copy(country = "France", locality = Some("Paris"))), twitterAccount = Some(TwitterAccount(Url.Twitter.from("https://twitter.com/devoxx").get)), twitterHashtag = Some(TwitterHashtag.from("#Devoxx").get), tags = Seq(Tag("tech"), Tag("big data"))), cfp = cfp.copy( close = Some(ldt)), by = user.copy( links = SocialAccounts.fromUrls(twitter = Some(Url.Twitter.from("https://twitter.com/jack").get))), at = now) Tweets.externalCfpCreated(msg) shouldBe s"""!!Speaker announcement!! |@devoxx is happening on February 10 in Paris, France |Submit your proposals before February 10 at https://gospeak.io/cfps/ext/f02d5dc9-a93b-4861-a8ff-66962187d850 |#speaking #Devoxx #tech #bigdata |Thanks @jack for the post! |""".stripMargin.trim } } }
Example 110
Source File: ExternalCfpSpec.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.LocalDateTime import com.danielasfregola.randomdatagenerator.RandomDataGenerator import gospeak.core.testingutils.BaseSpec import gospeak.core.testingutils.Generators._ class ExternalCfpSpec extends BaseSpec with RandomDataGenerator { private val now = LocalDateTime.now() private val yesterday = now.minusDays(1) private val tomorrow = now.plusDays(1) private val lastWeek = now.minusDays(7) private val nextWeek = now.plusDays(7) private val cfp = random[ExternalCfp] describe("ExternalCfp") { it("should be future when begin is in the future") { cfp.copy(begin = None, close = None).isFuture(now) shouldBe false cfp.copy(begin = Some(nextWeek), close = None).isFuture(now) shouldBe true cfp.copy(begin = Some(lastWeek), close = None).isFuture(now) shouldBe false } it("should be past when close is in the past") { cfp.copy(begin = None, close = None).isPast(now) shouldBe false cfp.copy(begin = None, close = Some(nextWeek)).isPast(now) shouldBe false cfp.copy(begin = None, close = Some(lastWeek)).isPast(now) shouldBe true } it("should be active when after begin, if defined and before close, if defined") { cfp.copy(begin = None, close = None).isActive(now) shouldBe true cfp.copy(begin = Some(nextWeek), close = None).isActive(now) shouldBe false cfp.copy(begin = Some(lastWeek), close = None).isActive(now) shouldBe true cfp.copy(begin = None, close = Some(nextWeek)).isActive(now) shouldBe true cfp.copy(begin = None, close = Some(lastWeek)).isActive(now) shouldBe false cfp.copy(begin = Some(lastWeek), close = Some(yesterday)).isActive(now) shouldBe false cfp.copy(begin = Some(lastWeek), close = Some(nextWeek)).isActive(now) shouldBe true cfp.copy(begin = Some(tomorrow), close = Some(nextWeek)).isActive(now) shouldBe false } } }
Example 111
Source File: JobRunMarshallerTest.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome package repository.impl.kv.marshaller import java.time.{LocalDateTime, ZoneOffset} import dcos.metronome.model._ import org.scalatest.{FunSuite, Matchers} import scala.concurrent.duration._ class JobRunMarshallerTest extends FunSuite with Matchers { test("round-trip of a JobRun") { val f = new Fixture JobRunMarshaller.fromBytes(JobRunMarshaller.toBytes(f.jobRun)) should be(Some(f.jobRun)) } test("unmarshal with invalid proto data should return None") { val invalidBytes = "foobar".getBytes JobRunMarshaller.fromBytes(invalidBytes.to[IndexedSeq]) should be(None) } class Fixture { val jobSpec = JobSpec(JobId("job.id")) val jobRun = JobRun( JobRunId(jobSpec.id, "run.id"), jobSpec, JobRunStatus.Active, LocalDateTime.parse("2004-09-06T08:50:12.000").toInstant(ZoneOffset.UTC), Some(LocalDateTime.parse("2004-09-06T08:50:12.000").toInstant(ZoneOffset.UTC)), Some(1 minute), Map.empty ) } }
Example 112
Source File: JobHistoryMarshallerTest.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome package repository.impl.kv.marshaller import java.time.{LocalDateTime, ZoneOffset} import dcos.metronome.model._ import mesosphere.marathon.core.task.Task import org.scalatest.{FunSuite, Matchers} class JobHistoryMarshallerTest extends FunSuite with Matchers { test("round-trip of a JobHistory") { val f = new Fixture JobHistoryMarshaller.fromBytes(JobHistoryMarshaller.toBytes(f.jobHistory)) should be(Some(f.jobHistory)) } test("unmarshal with invalid proto data should return None") { val invalidBytes = "foobar".getBytes JobHistoryMarshaller.fromBytes(invalidBytes.to[IndexedSeq]) should be(None) } class Fixture { val successfulJobRunInfo = JobRunInfo( JobRunId(JobId("/test"), "successful"), LocalDateTime.parse("2004-09-06T08:50:12.000").toInstant(ZoneOffset.UTC), LocalDateTime.parse("2014-09-06T08:50:12.000").toInstant(ZoneOffset.UTC), tasks = Seq(Task.Id("test_finished.77a7bc7d-4429-11e9-969f-3a74960279c0")) ) val finishedJobRunInfo = JobRunInfo( JobRunId(JobId("/test"), "finished"), LocalDateTime.parse("1984-09-06T08:50:12.000").toInstant(ZoneOffset.UTC), LocalDateTime.parse("1994-09-06T08:50:12.000").toInstant(ZoneOffset.UTC), tasks = Seq(Task.Id("test_finished.77a7bc7d-4429-11e9-969f-3a74960279c0")) ) val jobHistory = JobHistory( JobId("/my/wonderful/job"), successCount = 1337, failureCount = 31337, lastSuccessAt = Some(LocalDateTime.parse("2014-09-06T08:50:12.000").toInstant(ZoneOffset.UTC)), lastFailureAt = Some(LocalDateTime.parse("2014-09-06T07:50:12.000").toInstant(ZoneOffset.UTC)), successfulRuns = Seq(successfulJobRunInfo), failedRuns = Seq(finishedJobRunInfo) ) } }
Example 113
Source File: SettableClock.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome import java.time.{Clock, Instant, LocalDateTime, ZoneOffset, ZoneId, Duration} import scala.concurrent.duration.FiniteDuration object SettableClock { private val defaultJavaClock = Clock.fixed(LocalDateTime.of(2015, 4, 9, 12, 30, 0).toInstant(ZoneOffset.UTC), ZoneOffset.UTC) def ofNow() = new SettableClock(Clock.fixed(Instant.now(), ZoneOffset.UTC)) } class SettableClock(private[this] var clock: Clock = SettableClock.defaultJavaClock) extends Clock { private[this] var subscribers: List[() => Unit] = Nil def onChange(fn: () => Unit): Unit = synchronized { subscribers = fn :: subscribers } override def getZone: ZoneId = clock.getZone override def instant(): Instant = clock.instant() override def withZone(zoneId: ZoneId): Clock = new SettableClock(clock.withZone(zoneId)) def +=(duration: FiniteDuration): Unit = plus(duration) def plus(duration: FiniteDuration): this.type = plus(Duration.ofMillis(duration.toMillis)) def plus(duration: Duration): this.type = { clock = Clock.offset(clock, duration) subscribers.foreach(_()) this } def at(instant: Instant): this.type = { clock = Clock.fixed(instant, clock.getZone) subscribers.foreach(_()) this } }
Example 114
Source File: BatchExecution.scala From marvin-engine-executor with Apache License 2.0 | 5 votes |
package org.marvin.model import java.time.LocalDateTime import java.time.format.DateTimeFormatter sealed abstract class ExecutionStatus(val name: String, val code: Int) { override def toString: String = name } case object Working extends ExecutionStatus(name="working", code=0) case object Finished extends ExecutionStatus(name="finished", code=1) case object Failed extends ExecutionStatus(name="failed", code=(-1)) case class BatchExecution(actionName: String, protocol: String, datetime: LocalDateTime, status: ExecutionStatus){ override def toString: String = s"$actionName | $protocol | $status" override def equals(obj: scala.Any): Boolean = this.toString == obj.toString val formattedDatetime: String = datetime.format(DateTimeFormatter.ofPattern("MM/dd/yyyy HH:mm:ss")) }
Example 115
Source File: DynamoOrderRepository.scala From orders-aws with Apache License 2.0 | 5 votes |
package works.weave.socks.aws.orders.dataaccess.dynamo import com.amazonaws.services.dynamodbv2.model.AttributeValue import com.amazonaws.services.dynamodbv2.model.GetItemResult import com.amazonaws.services.dynamodbv2.model.QueryRequest import java.time.LocalDateTime import java.util.UUID import org.springframework.stereotype.Component import scala.collection.JavaConverters._ import works.weave.socks.aws.orders.domain.repository.OrderRepository import works.weave.socks.aws.orders.domain.repository.OrderRepository.Customer import works.weave.socks.aws.orders.domain.repository.OrderRepository.Order import DynamoOrderRepository._ import com.amazonaws.services.dynamodbv2.model.ComparisonOperator import com.amazonaws.services.dynamodbv2.model.Condition import com.amazonaws.services.dynamodbv2.model.ScanRequest import works.weave.socks.spring.aws.DynamoConfiguration @Component class DynamoOrderRepository(dynamoConnection : DynamoConfiguration) extends OrderRepository { override def find(key : UUID) : Option[Order] = { val r : Option[GetItemResult] = Option(dynamoConnection.client.getItem(OrdersTableName, Map("id" -> new AttributeValue(key.toString)).asJava)) r.map(rr => fromDB(rr.getItem)) } def customerFromJSON(json : String) : OrderRepository.Customer = { Customer() } def customerToJSON(customer : OrderRepository.Customer) : String = { "{}" } private def fromDB(map : java.util.Map[String, AttributeValue]) : Order = { Order( id = UUID.fromString(map.get("id").getS), customerId = map.get("id").getS, customer = customerFromJSON(map.get("customer").getS), date = LocalDateTime.parse(map.get("date").getS), total = map.get("total").getS.toFloat) } override def save(order : OrderRepository.Order) : Unit = { require(order.customerId != null) val _ = dynamoConnection.client.putItem(OrdersTableName, Map( "id" -> new AttributeValue(order.id.toString), "date" -> new AttributeValue(order.date.toString), "customerId" -> new AttributeValue(order.customerId), "customer" -> new AttributeValue(customerToJSON(order.customer)), "total" -> new AttributeValue(order.total.toString)).asJava) } override def searchByCustomerId(customerId : String) : List[Order] = { // FIXME: performance val r = dynamoConnection.client.scan(new ScanRequest(OrdersTableName).addScanFilterEntry("customerId", new Condition().withComparisonOperator(ComparisonOperator.EQ).withAttributeValueList(List(new AttributeValue(customerId)).asJava))) r.getItems.asScala.toList map fromDB } } object DynamoOrderRepository { val OrdersTableName = "orders" }
Example 116
Source File: SQLQuerySpec.scala From scruid with Apache License 2.0 | 5 votes |
package ing.wbaa.druid import java.time.{ LocalDateTime, ZonedDateTime } import akka.stream.scaladsl.Sink import ing.wbaa.druid.SQL._ import ing.wbaa.druid.client.CirceDecoders import io.circe.generic.auto._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.should.Matchers import org.scalatest.time.{ Millis, Seconds, Span } import org.scalatest.wordspec.AnyWordSpec class SQLQuerySpec extends AnyWordSpec with Matchers with ScalaFutures with CirceDecoders { implicit override val patienceConfig = PatienceConfig(timeout = Span(20, Seconds), interval = Span(5, Millis)) private val totalNumberOfEntries = 39244 private val usOnlyNumberOfEntries = 528 implicit val config = DruidConfig() implicit val mat = config.client.actorMaterializer case class Result(hourTime: ZonedDateTime, count: Int) "SQL query" should { val sqlQuery: SQLQuery = dsql""" |SELECT FLOOR(__time to HOUR) AS hourTime, count(*) AS "count" |FROM wikipedia |WHERE "__time" BETWEEN TIMESTAMP '2015-09-12 00:00:00' AND TIMESTAMP '2015-09-13 00:00:00' |GROUP BY 1 |""".stripMargin "successfully be interpreted by Druid" in { val resultsF = sqlQuery.execute() whenReady(resultsF) { response => response.list[Result].map(_.count).sum shouldBe totalNumberOfEntries } } "support streaming" in { val resultsF = sqlQuery.streamAs[Result]().runWith(Sink.seq) whenReady(resultsF) { results => results.map(_.count).sum shouldBe totalNumberOfEntries } } } "SQL parameterized query" should { val fromDateTime = LocalDateTime.of(2015, 9, 12, 0, 0, 0, 0) val untilDateTime = fromDateTime.plusDays(1) val countryIsoCode = "US" val sqlQuery: SQLQuery = dsql""" |SELECT FLOOR(__time to HOUR) AS hourTime, count(*) AS "count" |FROM wikipedia |WHERE "__time" BETWEEN ${fromDateTime} AND ${untilDateTime} AND countryIsoCode = ${countryIsoCode} |GROUP BY 1 |""".stripMargin "be expressed as a parameterized query with three parameters" in { sqlQuery.query.count(_ == '?') shouldBe 3 sqlQuery.parameters.size shouldBe 3 sqlQuery.parameters(0) shouldBe SQLQueryParameter(SQLQueryParameterType.Timestamp, "2015-09-12 00:00:00") sqlQuery.parameters(1) shouldBe SQLQueryParameter(SQLQueryParameterType.Timestamp, "2015-09-13 00:00:00") sqlQuery.parameters(2) shouldBe SQLQueryParameter(SQLQueryParameterType.Varchar, "US") } "successfully be interpreted by Druid" in { val resultsF = sqlQuery.execute() whenReady(resultsF) { response => response.list[Result].map(_.count).sum shouldBe usOnlyNumberOfEntries } } "support streaming" in { val resultsF = sqlQuery.streamAs[Result]().runWith(Sink.seq) whenReady(resultsF) { results => results.map(_.count).sum shouldBe usOnlyNumberOfEntries } } } }
Example 117
Source File: ParameterConversions.scala From scruid with Apache License 2.0 | 5 votes |
package ing.wbaa.druid.sql import java.sql.Timestamp import java.time.{ Instant, LocalDate, LocalDateTime } import scala.language.implicitConversions import ing.wbaa.druid.{ DruidConfig, SQLQueryParameter, SQLQueryParameterType } trait ParameterConversions { implicit def char2Param(v: Char): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Char, v.toString) implicit def string2Param(v: String): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Varchar, v) implicit def byte2Param(v: Byte): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Tinyint, v.toString) implicit def short2Param(v: Short): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Smallint, v.toString) implicit def int2Param(v: Int): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Integer, v.toString) implicit def long2Param(v: Long): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Bigint, v.toString) implicit def float2Param(v: Float): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Float, v.toString) implicit def double2Param(v: Double): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Double, v.toString) implicit def boolean2Param(v: Boolean): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Boolean, v.toString) implicit def localDate2Param(v: LocalDate)(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Date, v.format(config.FormatterDate)) implicit def localDateTime2Param( v: LocalDateTime )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, v.format(config.FormatterDateTime)) implicit def timestamp2Param(v: Timestamp)(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v.toInstant)) implicit def instant2Param( v: Instant )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v)) }
Example 118
Source File: FallbackExperimentStrategy.scala From izanami with Apache License 2.0 | 5 votes |
package izanami.experiments import java.time.LocalDateTime import akka.http.scaladsl.util.FastFuture import izanami._ import izanami.scaladsl.{ExperimentClient, ExperimentsClient} import play.api.libs.json.{JsObject, Json} import scala.concurrent.Future object FallbackExperimentStrategy { def apply(fallback: Experiments): FallbackExperimentStrategy = new FallbackExperimentStrategy(fallback) } class FallbackExperimentStrategy(fallback: Experiments) extends ExperimentsClient { override def experiment(id: String): Future[Option[ExperimentClient]] = FastFuture.successful(fallback.experiments.find(_.id == id).map { fb => ExperimentClient(this, fb.experiment) }) override def list(pattern: Seq[String]): Future[Seq[ExperimentClient]] = FastFuture.successful( fallback.experiments.map(fb => ExperimentClient(this, fb.experiment)).filter(ec => ec.matchPattern(pattern)) ) override def tree(pattern: Seq[String], clientId: String): Future[JsObject] = FastFuture.successful( fallback.experiments .filter(_.enabled) .filter(ec => ec.matchPatterns(pattern)) .map { _.tree } .foldLeft(Json.obj())(_ deepMerge _) ) override def getVariantFor(experimentId: String, clientId: String): Future[Option[Variant]] = FastFuture.successful(fallback.experiments.find(_.id == experimentId).map(_.variant)) override def markVariantDisplayed(experimentId: String, clientId: String): Future[ExperimentVariantDisplayed] = { val experiment: ExperimentFallback = fallback.experiments .find(_.id == experimentId) .getOrElse(ExperimentFallback(experimentId, "", "", false, Variant("", "", ""))) FastFuture.successful( ExperimentVariantDisplayed( s"${experiment.id}:${experiment.variant.id}:${clientId}:${System.currentTimeMillis()}", experiment.id, clientId, experiment.variant, LocalDateTime.now(), 0, experiment.variant.id ) ) } override def markVariantWon(experimentId: String, clientId: String): Future[ExperimentVariantWon] = { val experiment: ExperimentFallback = fallback.experiments .find(_.id == experimentId) .getOrElse(ExperimentFallback(experimentId, "", "", false, Variant("", "", ""))) FastFuture.successful( ExperimentVariantWon( s"${experiment.id}:${experiment.variant.id}:${clientId}:${System.currentTimeMillis()}", experiment.id, clientId, experiment.variant, LocalDateTime.now(), 0, experiment.variant.id ) ) } }
Example 119
Source File: ExperimentVariantEventTest.scala From izanami with Apache License 2.0 | 5 votes |
package domains.abtesting import java.time.LocalDateTime import java.time.temporal.ChronoUnit import akka.NotUsed import akka.actor.ActorSystem import akka.stream.scaladsl.{Flow, Sink, Source} import domains.Key import domains.abtesting.events._ import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures} import test.IzanamiSpec class ExperimentVariantEventTest extends IzanamiSpec with ScalaFutures with IntegrationPatience { "ExperimentVariantEvent" must { "aggregate event" in { implicit val system: ActorSystem = ActorSystem() val variantId = "vId" val variant = Variant(variantId, "None", None, Traffic(0), None) val flow: Flow[ExperimentVariantEvent, VariantResult, NotUsed] = ExperimentVariantEvent.eventAggregation("experiment.id", 1, ChronoUnit.HOURS) val firstDate = LocalDateTime.now().minus(5, ChronoUnit.HOURS) val experimentKey = Key(s"experiment:id") def experimentVariantEventKey(counter: Int): ExperimentVariantEventKey = ExperimentVariantEventKey(experimentKey, variantId, s"client:id:$counter", "namespace", s"$counter") def clientId(i: Int): String = s"client:id:$i" def date(i: Int): LocalDateTime = firstDate.plus(15 * i, ChronoUnit.MINUTES) val source = (1 to 20) .flatMap { counter => val d = date(counter) val key = experimentVariantEventKey(counter) counter match { case i if i % 2 > 0 => List(ExperimentVariantDisplayed(key, experimentKey, clientId(i), variant, d, 0, variantId)) case i => List( ExperimentVariantDisplayed(key, experimentKey, clientId(i), variant, d, 0, variantId), ExperimentVariantWon(key, experimentKey, clientId(i), variant, d, 0, variantId) ) } } val expectedEvents = Seq( ExperimentResultEvent(experimentKey, variant, date(1), 0.0, "vId"), ExperimentResultEvent(experimentKey, variant, date(5), 40.0, "vId"), ExperimentResultEvent(experimentKey, variant, date(9), 44.44444444444444, "vId"), ExperimentResultEvent(experimentKey, variant, date(13), 46.15384615384615, "vId"), ExperimentResultEvent(experimentKey, variant, date(17), 47.05882352941177, "vId") ) val evts = Source(source).via(flow).runWith(Sink.seq).futureValue val allEvents = evts.flatMap(_.events) allEvents must be(expectedEvents) } } }
Example 120
Source File: WebhookInstances.scala From izanami with Apache License 2.0 | 5 votes |
package domains.webhook import java.time.LocalDateTime import domains.webhook.Webhook.WebhookKey import domains.{Domain, IsAllowed, Key} import domains.auth.AuthInfo object WebhookInstances { import play.api.libs.json._ import play.api.libs.functional.syntax._ private val reads: Reads[Webhook] = ( (__ \ "clientId").read[WebhookKey] and (__ \ "callbackUrl").read[String] and (__ \ "domains").read[Seq[Domain.Domain]].orElse(Reads.pure(Seq.empty[Domain.Domain])) and (__ \ "patterns").read[Seq[String]].orElse(Reads.pure(Seq.empty[String])) and (__ \ "types").read[Seq[String]].orElse(Reads.pure(Seq.empty[String])) and (__ \ "headers").read[JsObject].orElse(Reads.pure(Json.obj())) and (__ \ "created").read[LocalDateTime].orElse(Reads.pure(LocalDateTime.now())) and (__ \ "isBanned").read[Boolean].orElse(Reads.pure(false)) )(Webhook.apply _) private val writes = Json.writes[Webhook] implicit val format = Format(reads, writes) }
Example 121
Source File: ExperimentVariantEventInstances.scala From izanami with Apache License 2.0 | 5 votes |
package domains.abtesting.events import java.time.LocalDateTime import domains.Key import domains.abtesting.ExperimentInstances._ import play.api.libs.json._ object ExperimentVariantEventKeyInstances { implicit val format: Format[ExperimentVariantEventKey] = Format( Key.format.map { k => ExperimentVariantEventKey(k) }, Writes[ExperimentVariantEventKey](vk => Key.format.writes(vk.key)) ) } object ExperimentVariantDisplayedInstances { implicit val format = { implicit val kf: Format[ExperimentVariantEventKey] = ExperimentVariantEventKeyInstances.format implicit val dateTimeReads: Reads[LocalDateTime] = Reads.localDateTimeReads("yyyy-MM-dd'T'HH:mm:ss.SSS") implicit val dateTimeWrite: Writes[LocalDateTime] = Writes.temporalWrites("yyyy-MM-dd'T'HH:mm:ss.SSS") Json.format[ExperimentVariantDisplayed] } } object ExperimentVariantWonInstances { implicit val format = { implicit val kf: Format[ExperimentVariantEventKey] = ExperimentVariantEventKeyInstances.format implicit val dateTimeReads: Reads[LocalDateTime] = Reads.localDateTimeReads("yyyy-MM-dd'T'HH:mm:ss.SSS") implicit val dateTimeWrite: Writes[LocalDateTime] = Writes.temporalWrites("yyyy-MM-dd'T'HH:mm:ss.SSS") Json.format[ExperimentVariantWon] } } object ExperimentVariantEventInstances { private val reads: Reads[ExperimentVariantEvent] = { Reads[ExperimentVariantEvent] { case event if (event \ "@type") .asOpt[String] .contains("VariantDisplayedEvent") => ExperimentVariantDisplayedInstances.format.reads(event) case event if (event \ "@type").asOpt[String].contains("VariantWonEvent") => ExperimentVariantWonInstances.format.reads(event) case _ => JsError("error.bad.format") } } private val writes: Writes[ExperimentVariantEvent] = { Writes[ExperimentVariantEvent] { case e: ExperimentVariantDisplayed => ExperimentVariantDisplayedInstances.format.writes(e) ++ Json.obj("@type" -> "VariantDisplayedEvent") case e: ExperimentVariantWon => ExperimentVariantWonInstances.format.writes(e).as[JsObject] ++ Json.obj("@type" -> "VariantWonEvent") } } implicit val format = Format(reads, writes) }
Example 122
Source File: ExperimentInstances.scala From izanami with Apache License 2.0 | 5 votes |
package domains.abtesting import java.time.LocalDateTime import domains.abtesting.events.ExperimentVariantEventInstances import play.api.libs.json._ import play.api.libs.json.Reads._ import play.api.libs.functional.syntax._ import play.api.libs.json.Writes.temporalWrites object ExperimentInstances { import libs.json._ // Traffic val trafficReads: Reads[Traffic] = { import Ordering.Double.TotalOrdering __.read[Double](min(0.0) keepAnd max(1.0)).map(Traffic.apply) } val trafficWrites: Writes[Traffic] = Writes[Traffic] { t => JsNumber(t.traffic) } implicit val trafficFormat: Format[Traffic] = Format(trafficReads, trafficWrites) implicit val trafficEq: cats.Eq[Traffic] = cats.Eq.fromUniversalEquals private val datePattern = "yyyy-MM-dd HH:mm:ss" private val currentCampaignFormat = { implicit val dateFormat: Format[LocalDateTime] = Format(localDateTimeReads(datePattern), temporalWrites[LocalDateTime, String](datePattern)) Json.format[CurrentCampaign] } private val closedCampaignFormat = { implicit val dateFormat: Format[LocalDateTime] = Format(localDateTimeReads(datePattern), temporalWrites[LocalDateTime, String](datePattern)) Json.format[ClosedCampaign] } private val campaignReads: Reads[Campaign] = Reads { case js: JsObject if (js \ "won").asOpt[String].isDefined => closedCampaignFormat.reads(js) case js: JsObject => currentCampaignFormat.reads(js) case _ => JsError("jsobject.expected") } private val campaignWrite: Writes[Campaign] = Writes { case c: CurrentCampaign => currentCampaignFormat.writes(c) case c: ClosedCampaign => closedCampaignFormat.writes(c) } implicit val campaignFormat: Format[Campaign] = Format(campaignReads, campaignWrite) implicit val campaignEq: cats.Eq[Campaign] = cats.Eq.fromUniversalEquals implicit val variantFormat: Format[Variant] = Json.format[Variant] implicit val variantEq: cats.Eq[Variant] = cats.Eq.fromUniversalEquals implicit val experimentResultEventFormat: Format[ExperimentResultEvent] = Json.format[ExperimentResultEvent] implicit val format: Format[Experiment] = Json.format[Experiment] implicit val experimentEq: cats.Eq[Experiment] = cats.Eq.fromUniversalEquals implicit val variantResultFormat: Format[VariantResult] = { import domains.abtesting.events.ExperimentVariantEvent implicit val eveFormat: Format[ExperimentVariantEvent] = ExperimentVariantEventInstances.format Json.format[VariantResult] } implicit val experimentResultFormat: Format[ExperimentResult] = Json.format[ExperimentResult] }
Example 123
Source File: EventsController.scala From izanami with Apache License 2.0 | 5 votes |
package controllers import akka.actor.ActorSystem import controllers.actions.SecuredAuthContext import domains.Domain.Domain import domains.events.{EventStore, EventStoreContext} import play.api.libs.EventSource import play.api.libs.EventSource.{EventDataExtractor, EventIdExtractor, EventNameExtractor} import play.api.libs.json.{JsString, Json} import play.api.mvc.{AbstractController, ActionBuilder, AnyContent, ControllerComponents} import libs.http.HttpContext import akka.stream.scaladsl.Flow import scala.util.Success import scala.util.Failure import libs.logs.IzanamiLogger import java.time.LocalDateTime import play.api.libs.json.JsValue import scala.concurrent.duration.DurationDouble import domains.auth.AuthInfo import domains.Key class EventsController(system: ActorSystem, AuthAction: ActionBuilder[SecuredAuthContext, AnyContent], cc: ControllerComponents)(implicit r: HttpContext[EventStoreContext]) extends AbstractController(cc) { import libs.http._ import domains.events.Events._ import system.dispatcher private implicit val nameExtractor = EventNameExtractor[IzanamiEvent](_ => None) //Some(event.`type`)) private implicit val idExtractor = EventIdExtractor[IzanamiEvent](event => Some(s"${event._id}")) //Some(event.key.key)) private implicit val dataExtractor = EventDataExtractor[IzanamiEvent](event => Json.stringify(event.toJson)) def allEvents(patterns: String, domains: String) = events(domains.split(",").toIndexedSeq, patterns) def eventsForADomain(domain: String, patterns: String) = events(domain.split(",").toIndexedSeq, patterns) val logEvent = Flow[IzanamiEvent].map { event => event } case class KeepAliveEvent() extends IzanamiEvent { val _id: Long = 0 val domain: Domain = domains.Domain.Unknown val authInfo: Option[AuthInfo.Service] = None val key: Key = Key("na") def timestamp: LocalDateTime = LocalDateTime.now() val `type`: String = "KEEP_ALIVE" val payload: JsValue = Json.obj() } val keepAlive = Flow[IzanamiEvent].keepAlive(30.seconds, () => KeepAliveEvent()) // TODO abilitations private def events[T <: IzanamiEvent](domains: Seq[String], patterns: String) = AuthAction.asyncTask[EventStoreContext] { ctx => val allPatterns: Seq[String] = ctx.authorizedPatterns ++ patterns .split(",") .toList val lastEventId = ctx.request.headers.get("Last-Event-ID").map(_.toLong) val allDomains = domains.map(JsString).flatMap(_.validate[Domain].asOpt) EventStore .events(allDomains, allPatterns, lastEventId) .map { source => val eventSource = (source via keepAlive via logEvent via EventSource.flow).watchTermination() { (_, fDone) => fDone.onComplete { case Success(_) => IzanamiLogger.debug("SSE disconnected") case Failure(e) => IzanamiLogger.error("Error during SSE ", e) } fDone } Ok.chunked(eventSource).as("text/event-stream") } } }
Example 124
Source File: ClickGenerator.scala From Apache-Spark-2x-Machine-Learning-Cookbook with MIT License | 5 votes |
package spark.ml.cookbook.chapter13 import java.time.LocalDateTime import scala.util.Random._ case class ClickEvent(userId: String, ipAddress: String, time: String, url: String, statusCode: String) object ClickGenerator { val statusCodeData = Seq(200, 404, 500) val urlData = Seq("http://www.fakefoo.com", "http://www.fakefoo.com/downloads", "http://www.fakefoo.com/search", "http://www.fakefoo.com/login", "http://www.fakefoo.com/settings", "http://www.fakefoo.com/news", "http://www.fakefoo.com/reports", "http://www.fakefoo.com/images", "http://www.fakefoo.com/css", "http://www.fakefoo.com/sounds", "http://www.fakefoo.com/admin", "http://www.fakefoo.com/accounts" ) val ipAddressData = generateIpAddress() val timeStampData = generateTimeStamp() val userIdData = generateUserId() def generateIpAddress(): Seq[String] = { for (n <- 1 to 255) yield s"127.0.0.$n" } def generateTimeStamp(): Seq[String] = { val now = LocalDateTime.now() for (n <- 1 to 1000) yield LocalDateTime.of(now.toLocalDate, now.toLocalTime.plusSeconds(n)).toString } def generateUserId(): Seq[Int] = { for (id <- 1 to 1000) yield id } def generateClicks(clicks: Int = 1): Seq[String] = { 0.until(clicks).map(i => { val statusCode = statusCodeData(nextInt(statusCodeData.size)) val ipAddress = ipAddressData(nextInt(ipAddressData.size)) val timeStamp = timeStampData(nextInt(timeStampData.size)) val url = urlData(nextInt(urlData.size)) val userId = userIdData(nextInt(userIdData.size)) s"$userId,$ipAddress,$timeStamp,$url,$statusCode" }) } def parseClicks(data: String): ClickEvent = { val fields = data.split(",") new ClickEvent(fields(0), fields(1), fields(2), fields(3), fields(4)) } }
Example 125
Source File: DatabaseITest.scala From cuttle with Apache License 2.0 | 5 votes |
package com.criteo.cuttle import java.time.{Instant, LocalDateTime, ZoneOffset} import java.time.temporal.ChronoUnit import scala.concurrent.Future import cats.effect.IO import doobie.implicits._ import doobie.scalatest.IOChecker import com.criteo.cuttle.Auth.User class DatabaseITest extends DatabaseSuite with IOChecker with TestScheduling { val dbConfig = DatabaseConfig( Seq(DBLocation("localhost", 3388)), dbName, "root", "" ) // IOChecker needs a transactor for performing its queries override val transactor: doobie.Transactor[IO] = Database.newHikariTransactor(dbConfig).allocated.unsafeRunSync()._1 test("should establish the connection and instanciate a trasactor") { assert(Database.connect(dbConfig).isInstanceOf[doobie.Transactor[IO]]) } test("should validate getPausedJobIdsQuery") { Database.reset() Database.connect(dbConfig) check(queries.getPausedJobIdsQuery) } test("should validate paused jobs queries") { Database.reset() val xa = Database.connect(dbConfig) val id = "id1" val job = Job(id, testScheduling) { _ => Future.successful(Completed) } val pausedJob = PausedJob(job.id, User("user1"), Instant.now().truncatedTo(ChronoUnit.SECONDS)) assert(queries.pauseJob(pausedJob).transact(xa).unsafeRunSync() == 1) assert(queries.getPausedJobs.transact(xa).unsafeRunSync() == Seq(pausedJob)) } test("paused_jobs migration(1) should set default values for old pauses") { Database.reset() Database.schemaEvolutions.head.transact(transactor).unsafeRunSync() sql"INSERT INTO paused_jobs VALUES ('1')".update.run.transact(transactor).unsafeRunSync() val id = sql"SELECT * FROM paused_jobs".query[String].unique.transact(transactor).unsafeRunSync() assert(id == "1") Database.schemaEvolutions(1).transact(transactor).unsafeRunSync() val pausedJob = sql"SELECT * FROM paused_jobs".query[PausedJob].unique.transact(transactor).unsafeRunSync() assert(pausedJob.id == "1") assert(pausedJob.user == User("not defined user")) assert(pausedJob.date == LocalDateTime.parse("1991-11-01T15:42:00").toInstant(ZoneOffset.UTC)) } test("we should be able to retrieve finished executions") { Database.reset() Database.doSchemaUpdates.transact(transactor).unsafeRunSync() val ctx = TestContext() val date = Some(Instant.now()) val el = ExecutionLog("id", "hello", date, date, ctx.asJson, ExecutionStatus.ExecutionSuccessful, None, 10) (0 to 100).foreach { i => queries.logExecution(el.copy(s"${el.id}-$i"), ctx.logIntoDatabase).transact(transactor).unsafeRunSync() val executionLogSize = queries.getExecutionLogSize(Set("hello")).transact(transactor).unsafeRunSync() assert(executionLogSize == i + 1) } } }
Example 126
Source File: PipelineAction.scala From marvin-engine-executor with Apache License 2.0 | 5 votes |
package org.marvin.executor.actions import java.time.LocalDateTime import java.util.NoSuchElementException import akka.Done import akka.actor.{Actor, ActorLogging, ActorRef, Props} import akka.pattern.ask import akka.util.Timeout import org.marvin.artifact.manager.ArtifactSaver import org.marvin.artifact.manager.ArtifactSaver.SaveToRemote import org.marvin.exception.MarvinEExecutorException import org.marvin.executor.actions.PipelineAction.{PipelineExecute, PipelineExecutionStatus} import org.marvin.executor.proxies.BatchActionProxy import org.marvin.executor.proxies.EngineProxy.{ExecuteBatch, Reload} import org.marvin.model._ import org.marvin.util.{JsonUtil, LocalCache} import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import scala.util.Success object PipelineAction { case class PipelineExecute(protocol:String, params:String) case class PipelineExecutionStatus(protocol:String) } class PipelineAction(metadata: EngineMetadata) extends Actor with ActorLogging{ implicit val ec = context.dispatcher var artifactSaver: ActorRef = _ var cache: LocalCache[BatchExecution] = _ override def preStart() = { artifactSaver = context.actorOf(ArtifactSaver.build(metadata), name = "artifactSaver") cache = new LocalCache[BatchExecution](maximumSize = 10000L, defaultTTL = 30.days) } override def receive = { case PipelineExecute(protocol, params) => implicit val futureTimeout = Timeout(metadata.pipelineTimeout milliseconds) log.info(s"Starting to process pipeline process with. Protocol: [$protocol] and Params: [$params].") cache.save(protocol, new BatchExecution("pipeline", protocol, LocalDateTime.now, Working)) try{ for(actionName <- metadata.pipelineActions){ val engineActionMetadata = metadata.actionsMap(actionName) val _actor: ActorRef = context.actorOf(Props(new BatchActionProxy(engineActionMetadata)), name = actionName.concat("Actor")) Await.result((_actor ? Reload(protocol)), futureTimeout.duration) Await.result((_actor ? ExecuteBatch(protocol, params)), futureTimeout.duration) context stop _actor val futures:ListBuffer[Future[Done]] = ListBuffer[Future[Done]]() for(artifactName <- engineActionMetadata.artifactsToPersist) { futures += (artifactSaver ? SaveToRemote(artifactName, protocol)).mapTo[Done] } if (!futures.isEmpty) Future.sequence(futures).onComplete{ case Success(response) => log.info(s"All artifacts from [$actionName] were saved with success!! [$response]") } } }catch { case e: Exception => cache.save(protocol, new BatchExecution("pipeline", protocol, LocalDateTime.now, Failed)) throw e } cache.save(protocol, new BatchExecution("pipeline", protocol, LocalDateTime.now, Finished)) case PipelineExecutionStatus(protocol) => log.info(s"Getting pipeline execution status to protocol $protocol.") try { sender ! JsonUtil.toJson(cache.load(protocol).get) }catch { case _: NoSuchElementException => sender ! akka.actor.Status.Failure(new MarvinEExecutorException(s"Protocol $protocol not found!")) } case Done => log.info("Work Done!") case _ => log.warning(s"Not valid message !!") } }
Example 127
Source File: package.scala From albedo with MIT License | 5 votes |
package ws.vinta.albedo import java.time.LocalDateTime import java.time.format.DateTimeFormatter import org.apache.spark.sql.SparkSession package object settings { private val spark = SparkSession.builder().getOrCreate() private val sc = spark.sparkContext val dataDir: String = sc.getConf.get("spark.albedo.dataDir", "./spark-data") val checkpointDir: String = sc.getConf.get("spark.albedo.checkpointDir", "./spark-data/checkpoint") def today: String = { val now = LocalDateTime.now() val formatter = DateTimeFormatter.ofPattern("yyyyMMdd") now.format(formatter) } def md5(text: String): String = { java.security.MessageDigest.getInstance("MD5").digest(text.getBytes()).map(0xFF & _).map { "%02x".format(_) }.foldLeft(""){_ + _} } }
Example 128
Source File: Rfc3339UtilTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.controllers import java.time.{ LocalDateTime, ZoneId, ZoneOffset, ZonedDateTime } import org.scalatest.{ FunSpec, MustMatchers } class Rfc3339UtilTest extends FunSpec with MustMatchers { val dtz = ZoneId.of("UTC") val offset = ZoneOffset.UTC //noinspection ScalaStyle val date = ZonedDateTime.of(LocalDateTime.ofEpochSecond(1451911387L, 0, offset), dtz) describe("Rfc3339UtilTest") { it("should parse RFC3339 DateTime") { Rfc3339Util.parseDateTime("2007-05-01T15:43:26-00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26+00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452-01:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T16:43:26.345200Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+01:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T14:43:26.345200Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26.345200Z[UTC]" } it("should parse RFC3339 Date") { Rfc3339Util.parseDate("2007-05-01").toString mustBe "2007-05-01" Rfc3339Util.parseDate("2008-05-01").toString mustBe "2008-05-01" Rfc3339Util.parseDate("2007-08-01").toString mustBe "2007-08-01" Rfc3339Util.parseDate("2007-05-08").toString mustBe "2007-05-08" } it("should write DateTime") { Rfc3339Util.writeDateTime(date) mustBe "2016-01-04T12:43:07.0000+0000" } it("should write Date") { Rfc3339Util.writeDate(date.toLocalDate) mustBe "2016-01-04" } } }
Example 129
Source File: ItTestPlugin.scala From Waves with MIT License | 5 votes |
import java.time.LocalDateTime import java.time.format.DateTimeFormatter import sbt.Keys._ import sbt.Tests.Group import sbt._ // Separate projects for integration tests because of IDEA: https://youtrack.jetbrains.com/issue/SCL-14363#focus=streamItem-27-3061842.0-0 object ItTestPlugin extends AutoPlugin { object autoImport extends ItKeys import autoImport._ override def projectSettings: Seq[Def.Setting[_]] = inConfig(Test)( Seq( logDirectory := { val runId = Option(System.getenv("RUN_ID")).getOrElse { val formatter = DateTimeFormatter.ofPattern("MM-dd--HH_mm_ss") formatter.format(LocalDateTime.now()) // git branch? } val r = target.value / "logs" / runId IO.createDirectory(r) r }, // Example: SCALATEST_EXCLUDE_TAGS="package1.Tag1 package2.Tag2 package3.Tag3" testOptions += { val excludeTags = sys.env.get("SCALATEST_EXCLUDE_TAGS").fold(Seq.empty[String])(Seq("-l", _)) val args = Seq("-fFW", (logDirectory.value / "summary.log").toString) ++ excludeTags Tests.Argument(TestFrameworks.ScalaTest, args: _*) }, parallelExecution in Test := true, tags in test += Tags.ForkedTestGroup -> 1, tags in testOnly += Tags.ForkedTestGroup -> 1, tags in testQuick += Tags.ForkedTestGroup -> 1, testGrouping := { // ffs, sbt! // https://github.com/sbt/sbt/issues/3266 val javaHomeValue = javaHome.value val logDirectoryValue = logDirectory.value val envVarsValue = envVars.value val javaOptionsValue = javaOptions.value for { group <- testGrouping.value suite <- group.tests } yield Group( suite.name, Seq(suite), Tests.SubProcess( ForkOptions( javaHome = javaHomeValue, outputStrategy = outputStrategy.value, bootJars = Vector.empty[java.io.File], workingDirectory = Option(baseDirectory.value), runJVMOptions = Vector( "-Dwaves.it.logging.appender=FILE", s"-Dwaves.it.logging.dir=${logDirectoryValue / suite.name.replaceAll("""(\w)\w*\.""", "$1.")}" // foo.bar.Baz -> f.b.Baz ) ++ javaOptionsValue, connectInput = false, envVars = envVarsValue )) ) } )) } trait ItKeys { val logDirectory = taskKey[File]("The directory where logs of integration tests are written") }
Example 130
Source File: SparkTemperatureProcessor.scala From smart-meter with MIT License | 5 votes |
package com.logimethods.nats.connector.spark.app import java.util.Properties; import java.io.File import java.io.Serializable import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.storage.StorageLevel; import org.apache.spark.streaming._ import com.datastax.spark.connector.streaming._ import com.datastax.spark.connector.SomeColumns import io.nats.client.ConnectionFactory._ import java.nio.ByteBuffer import org.apache.log4j.{Level, LogManager, PropertyConfigurator} import com.logimethods.connector.nats.to_spark._ import com.logimethods.scala.connector.spark.to_nats._ import java.util.function._ import java.time.{LocalDateTime, ZoneOffset} object SparkTemperatureProcessor extends App with SparkStreamingProcessor { val log = LogManager.getRootLogger log.setLevel(Level.WARN) val (properties, target, logLevel, sc, ssc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl, streamingDuration) = setupStreaming(args) // Temperatures // val temperatures = if (inputNatsStreaming) { NatsToSparkConnector .receiveFromNatsStreaming(classOf[Tuple2[Long,Float]], StorageLevel.MEMORY_ONLY, clusterId) .withNatsURL(natsUrl) .withSubjects(inputSubject) .withDataDecoder(dataDecoder) .asStreamOf(ssc) } else { NatsToSparkConnector .receiveFromNats(classOf[Tuple2[Long,Float]], StorageLevel.MEMORY_ONLY) .withProperties(properties) .withSubjects(inputSubject) .withDataDecoder(dataDecoder) .asStreamOf(ssc) } // Ideally, should be the AVG val singleTemperature = temperatures.reduceByKey(Math.max(_,_)) if (logLevel.contains("TEMPERATURE")) { singleTemperature.print() } singleTemperature.saveToCassandra("smartmeter", "temperature") val temperatureReport = singleTemperature.map({case (epoch, temperature) => (s"""{"epoch": $epoch, "temperature": $temperature}""") }) SparkToNatsConnectorPool.newPool() .withProperties(properties) .withSubjects(outputSubject) // "smartmeter.extract.temperature" .publishToNats(temperatureReport) // Start // ssc.start(); ssc.awaitTermination() }
Example 131
Source File: SparkProcessor.scala From smart-meter with MIT License | 5 votes |
package com.logimethods.nats.connector.spark.app import java.util.Properties; import java.io.File import java.io.Serializable import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.storage.StorageLevel; import org.apache.spark.streaming._ import io.nats.client.Nats._ import io.nats.client.ConnectionFactory._ import java.nio.ByteBuffer import org.apache.log4j.{Level, LogManager, PropertyConfigurator} import com.logimethods.connector.nats.to_spark._ import com.logimethods.scala.connector.spark.to_nats._ import java.util.function._ import java.time.{LocalDateTime, ZoneOffset} trait SparkProcessor { def setup(args: Array[String]) = { val inputSubject = args(0) // val inputNatsStreaming = inputSubject.toUpperCase.contains("STREAMING") val outputSubject = args(1) // val outputNatsStreaming = outputSubject.toUpperCase.contains("STREAMING") println("Will process messages from '" + inputSubject + "' to '" + outputSubject + "'") val logLevel = scala.util.Properties.envOrElse("LOG_LEVEL", "INFO") println("LOG_LEVEL = " + logLevel) val targets = scala.util.Properties.envOrElse("TARGETS", "ALL") println("TARGETS = " + targets) val cassandraUrl = System.getenv("CASSANDRA_URL") println("CASSANDRA_URL = " + cassandraUrl) val sparkMasterUrl = System.getenv("SPARK_MASTER_URL") println("SPARK_MASTER_URL = " + sparkMasterUrl) val sparkCoresMax = System.getenv("SPARK_CORES_MAX") println("SPARK_CORES_MAX = " + sparkCoresMax) val conf = new SparkConf() .setAppName(args(2)) .setMaster(sparkMasterUrl) .set("spark.cores.max", sparkCoresMax) .set("spark.cassandra.connection.host", cassandraUrl); val sc = new SparkContext(conf); // val streamingDuration = scala.util.Properties.envOrElse("STREAMING_DURATION", "2000").toInt // val ssc = new StreamingContext(sc, new Duration(streamingDuration)); /// ssc.checkpoint("/spark/storage") val properties = new Properties(); val natsUrl = System.getenv("NATS_URI") println("NATS_URI = " + natsUrl) properties.put("servers", natsUrl) properties.put(PROP_URL, natsUrl) val clusterId = System.getenv("NATS_CLUSTER_ID") val inputNatsStreaming = inputSubject.toUpperCase.contains("STREAMING") val outputNatsStreaming = outputSubject.toUpperCase.contains("STREAMING") (properties, targets, logLevel, sc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl) } def dataDecoder: Array[Byte] => Tuple2[Long,Float] = bytes => { val buffer = ByteBuffer.wrap(bytes); val epoch = buffer.getLong() val value = buffer.getFloat() (epoch, value) } } trait SparkStreamingProcessor extends SparkProcessor { def setupStreaming(args: Array[String]) = { val (properties, target, logLevel, sc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl) = setup(args) val streamingDuration = scala.util.Properties.envOrElse("STREAMING_DURATION", "2000").toInt println("STREAMING_DURATION = " + streamingDuration) val ssc = new StreamingContext(sc, new Duration(streamingDuration)); // ssc.checkpoint("/spark/storage") (properties, target, logLevel, sc, ssc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl, streamingDuration) } }
Example 132
Source File: SparkPredictionTrainer.scala From smart-meter with MIT License | 5 votes |
package com.logimethods.nats.connector.spark.app import java.util.Properties; import java.io.File import java.io.Serializable import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.storage.StorageLevel; import org.apache.spark.streaming._ import io.nats.client.ConnectionFactory._ import java.nio.ByteBuffer import org.apache.log4j.{Level, LogManager, PropertyConfigurator} import com.logimethods.connector.nats.to_spark._ import com.logimethods.scala.connector.spark.to_nats._ import org.apache.spark.ml.classification.MultilayerPerceptronClassifier import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator import java.util.function._ import java.time.{LocalDateTime, ZoneOffset} import java.time.DayOfWeek._ import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel object SparkPredictionTrainer extends App with SparkPredictionProcessor { log.setLevel(Level.WARN) val (properties, targets, logLevel, sc, inputNatsStreaming, inputSubject, outputSubject, clusterId, outputNatsStreaming, natsUrl) = setup(args) val streamingDuration = scala.util.Properties.envOrElse("STREAMING_DURATION", "2000").toInt println("STREAMING_DURATION = " + streamingDuration) new Thread(new Runnable { def run() { while( true ){ try { val data = SparkPredictionProcessor.getData(sc, THRESHOLD) val model = trainer.fit(data) model.write.overwrite.save(PREDICTION_MODEL_PATH) println("New model of size " + data.count() + " trained: " + model.uid) Thread.sleep(streamingDuration) } catch { case e: Throwable => log.error(e) } } } }).start() }
Example 133
Source File: ParameterMappers.scala From neotypes with MIT License | 5 votes |
package neotypes package implicits.mappers import java.time.{Duration, LocalDate, LocalDateTime, LocalTime, Period, OffsetDateTime, OffsetTime, ZonedDateTime} import java.util.UUID import mappers.ParameterMapper import org.neo4j.driver.v1.Value import org.neo4j.driver.v1.types.{IsoDuration, Point} import scala.collection.Iterable import scala.jdk.CollectionConverters._ trait ParameterMappers { implicit final val BooleanParameterMapper: ParameterMapper[Boolean] = ParameterMapper.fromCast(Boolean.box) implicit final val ByteArrayParameterMapper: ParameterMapper[Array[Byte]] = ParameterMapper.identity implicit final val DoubleParameterMapper: ParameterMapper[Double] = ParameterMapper.fromCast(Double.box) implicit final val DurationParameterMapper: ParameterMapper[Duration] = ParameterMapper.identity implicit final val FloatParameterMapper: ParameterMapper[Float] = ParameterMapper.fromCast(Float.box) implicit final val IntParameterMapper: ParameterMapper[Int] = ParameterMapper.fromCast(Int.box) implicit final val IsoDurationParameterMapper: ParameterMapper[IsoDuration] = ParameterMapper.identity implicit final val LocalDateParameterMapper: ParameterMapper[LocalDate] = ParameterMapper.identity implicit final val LocalDateTimeParameterMapper: ParameterMapper[LocalDateTime] = ParameterMapper.identity implicit final val LocalTimeParameterMapper: ParameterMapper[LocalTime] = ParameterMapper.identity implicit final val LongParameterMapper: ParameterMapper[Long] = ParameterMapper.fromCast(Long.box) implicit final val OffsetDateTimeParameterMapper: ParameterMapper[OffsetDateTime] = ParameterMapper.identity implicit final val OffsetTimeParameterMapper: ParameterMapper[OffsetTime] = ParameterMapper.identity implicit final val PeriodParameterMapper: ParameterMapper[Period] = ParameterMapper.identity implicit final val PointParameterMapper: ParameterMapper[Point] = ParameterMapper.identity implicit final val StringParameterMapper: ParameterMapper[String] = ParameterMapper.identity implicit final val UUIDParameterMapper: ParameterMapper[UUID] = ParameterMapper[String].contramap(_.toString) implicit final val ValueParameterMapper: ParameterMapper[Value] = ParameterMapper.identity implicit final val ZonedDateTimeParameterMapper: ParameterMapper[ZonedDateTime] = ParameterMapper.identity private final def iterableParameterMapper[T](mapper: ParameterMapper[T]): ParameterMapper[Iterable[T]] = ParameterMapper.fromCast { col => col.iterator.map(v => mapper.toQueryParam(v).underlying).asJava } implicit final def collectionParameterMapper[T, C[_]](implicit mapper: ParameterMapper[T], ev: C[T] <:< Iterable[T]): ParameterMapper[C[T]] = iterableParameterMapper(mapper).contramap(ev) private final def iterableMapParameterMapper[V](mapper: ParameterMapper[V]): ParameterMapper[Iterable[(String, V)]] = ParameterMapper.fromCast { col => col.iterator.map { case (key, v) => key -> mapper.toQueryParam(v).underlying }.toMap.asJava } implicit final def mapParameterMapper[V, M[_, _]](implicit mapper: ParameterMapper[V], ev: M[String, V] <:< Iterable[(String, V)]): ParameterMapper[M[String, V]] = iterableMapParameterMapper(mapper).contramap(ev) implicit final def optionAnyRefParameterMapper[T](implicit mapper: ParameterMapper[T]): ParameterMapper[Option[T]] = ParameterMapper.fromCast { optional => optional.map(v => mapper.toQueryParam(v).underlying).orNull } }
Example 134
Source File: Executor.scala From neo4j-spark-connector with Apache License 2.0 | 5 votes |
package org.neo4j.spark import java.time.{LocalDate, LocalDateTime, OffsetTime, ZoneOffset, ZonedDateTime} import java.util import java.sql.Timestamp import org.apache.spark.SparkContext import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.types.StructType import org.neo4j.spark.dataframe.CypherTypes import org.neo4j.spark.utils.{Neo4jSessionAwareIterator, Neo4jUtils} import scala.collection.JavaConverters._ object Executor { def convert(value: AnyRef): Any = value match { case it: util.Collection[_] => it.toArray() case m: java.util.Map[_,_] => m.asScala case _ => Neo4jUtils.convert(value) } def toJava(parameters: Map[String, Any]): java.util.Map[String, Object] = { parameters.mapValues(toJava).asJava } private def toJava(x: Any): AnyRef = x match { case y: Seq[_] => y.asJava case _ => x.asInstanceOf[AnyRef] } val EMPTY = Array.empty[Any] val EMPTY_RESULT = new CypherResult(new StructType(), Iterator.empty) class CypherResult(val schema: StructType, val rows: Iterator[Array[Any]]) { def sparkRows: Iterator[Row] = rows.map(row => new GenericRowWithSchema(row, schema)) def fields = schema.fieldNames } def execute(sc: SparkContext, query: String, parameters: Map[String, AnyRef]): CypherResult = { execute(Neo4jConfig(sc.getConf), query, parameters) } private def rows(result: Iterator[_]) = { var i = 0 while (result.hasNext) i = i + 1 i } def execute(config: Neo4jConfig, query: String, parameters: Map[String, Any], write: Boolean = false): CypherResult = { val result = new Neo4jSessionAwareIterator(config, query, toJava(parameters), write) if (!result.hasNext) { return EMPTY_RESULT } val peek = result.peek() val keyCount = peek.size() if (keyCount == 0) { return new CypherResult(new StructType(), Array.fill[Array[Any]](rows(result))(EMPTY).toIterator) } val keys = peek.keys().asScala val fields = keys.map(k => (k, peek.get(k).`type`())).map(keyType => CypherTypes.field(keyType)) val schema = StructType(fields) val it = result.map(record => { val row = new Array[Any](keyCount) var i = 0 while (i < keyCount) { val value = convert(record.get(i).asObject()) row.update(i, value) i = i + 1 } row }) new CypherResult(schema, it) } }
Example 135
Source File: package.scala From fs2-cron with Apache License 2.0 | 5 votes |
package eu.timepit import java.time.LocalDateTime import java.time.temporal.ChronoUnit import java.util.concurrent.TimeUnit import cats.ApplicativeError import cats.effect.{Concurrent, Sync, Timer} import cron4s.expr.CronExpr import cron4s.lib.javatime._ import fs2.Stream import scala.concurrent.duration.FiniteDuration package object fs2cron { def sleepCron[F[_]: Sync](cronExpr: CronExpr)(implicit timer: Timer[F]): Stream[F, Unit] = durationFromNow(cronExpr).flatMap(Stream.sleep[F]) def schedule[F[_]: Concurrent, A](tasks: List[(CronExpr, Stream[F, A])])(implicit timer: Timer[F] ): Stream[F, A] = { val scheduled = tasks.map { case (cronExpr, task) => awakeEveryCron[F](cronExpr) >> task } Stream.emits(scheduled).covary[F].parJoinUnbounded } }
Example 136
Source File: RollingFileLogger.scala From odin with Apache License 2.0 | 4 votes |
package io.odin.loggers import java.nio.file.{Files, Path, Paths} import java.time.{Instant, LocalDateTime} import java.time.format.DateTimeFormatter import java.util.TimeZone import java.util.concurrent.TimeUnit import cats.Monad import cats.effect.concurrent.Ref import cats.effect.{Concurrent, ContextShift, Fiber, Resource, Timer} import cats.syntax.all._ import io.odin.formatter.Formatter import io.odin.{Level, Logger, LoggerMessage} import scala.concurrent.duration.{FiniteDuration, _} object RollingFileLogger { def apply[F[_]]( fileNamePattern: LocalDateTime => String, maxFileSizeInBytes: Option[Long], rolloverInterval: Option[FiniteDuration], formatter: Formatter, minLevel: Level )(implicit F: Concurrent[F], timer: Timer[F], cs: ContextShift[F]): Resource[F, Logger[F]] = { new RollingFileLoggerFactory( fileNamePattern, maxFileSizeInBytes, rolloverInterval, formatter, minLevel, FileLogger.apply[F] ).mk } private[odin] class RefLogger[F[_]: Timer: Monad]( current: Ref[F, Logger[F]], override val minLevel: Level ) extends DefaultLogger[F](minLevel) { def log(msg: LoggerMessage): F[Unit] = current.get.flatMap(_.log(msg)) override def log(msgs: List[LoggerMessage]): F[Unit] = current.get.flatMap(_.log(msgs)) } private[odin] class RollingFileLoggerFactory[F[_]]( fileNamePattern: LocalDateTime => String, maxFileSizeInBytes: Option[Long], rolloverInterval: Option[FiniteDuration], formatter: Formatter, minLevel: Level, underlyingLogger: (String, Formatter, Level) => Resource[F, Logger[F]], fileSizeCheck: Path => Long = Files.size )(implicit F: Concurrent[F], timer: Timer[F], cs: ContextShift[F]) { val df: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss") def mk: Resource[F, Logger[F]] = { val logger = for { ((logger, watcherFiber), release) <- allocate.allocated refLogger <- Ref.of(logger) refRelease <- Ref.of(release) _ <- F.start(rollingLoop(watcherFiber, refLogger, refRelease)) } yield { (new RefLogger(refLogger, minLevel), refRelease) } Resource.make(logger)(_._2.get.flatten).map { case (logger, _) => logger } } def now: F[Long] = timer.clock.realTime(TimeUnit.MILLISECONDS) def rollingLoop(watcher: Fiber[F, Unit], logger: Ref[F, Logger[F]], release: Ref[F, F[Unit]]): F[Unit] = for { _ <- watcher.join oldRelease <- release.get ((newLogger, newWatcher), newRelease) <- allocate.allocated _ <- logger.set(newLogger) _ <- release.set(newRelease) _ <- oldRelease _ <- rollingLoop(newWatcher, logger, release) } yield () } }