org.joda.time.DateTimeZone Scala Examples
The following examples show how to use org.joda.time.DateTimeZone.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DateTimeNoMillisSerializer.scala From avoin-voitto with MIT License | 6 votes |
package liigavoitto.util import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.ISODateTimeFormat import org.json4s.CustomSerializer import org.json4s.JsonAST.{ JNull, JString } case object DateTimeNoMillisSerializer extends CustomSerializer[DateTime](format => ( { case JString(s) => ISODateTimeFormat.dateTimeNoMillis().withZone(Time.zone).parseDateTime(s) case JNull => null }, { case d: DateTime => JString(ISODateTimeFormat.dateTimeNoMillis().withZone(Time.zone).print(d)) } )) object Time { val zone = DateTimeZone.forID("Europe/Helsinki") }
Example 2
Source File: TemporalBounds.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.entity import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.DateTimeFormat import play.api.libs.json._ import play.api.libs.json.Reads._ import play.api.libs.functional.syntax._ case class TemporalBounds(from: DateTime, to: DateTime) { import TemporalBounds._ // TODO make this smarter - for now, we'll just print to/from years override def toString() = if (from == to) { yearFormatter.print(from) } else { s"${yearFormatter.print(from)}/${yearFormatter.print(to)}" } } object TemporalBounds { // For convenience private val yearFormatter = DateTimeFormat.forPattern("yyyy").withZone(DateTimeZone.UTC) private val dateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd").withZone(DateTimeZone.UTC) implicit val dateFormat = Format( JsPath.read[JsString].map { json => dateFormatter.parseDateTime(json.value) }, Writes[DateTime] { dt => Json.toJson(dateFormatter.print(dt)) } ) private def flexDateWrite(dt: DateTime): JsValue = if (dt.monthOfYear == 1 && dt.dayOfMonth == 1 && dt.minuteOfDay == 0) Json.toJson(dt.year.get) else Json.toJson(dt) implicit val temporalBoundsFormat: Format[TemporalBounds] = ( (JsPath \ "from").format[JsValue].inmap[DateTime](flexDateRead, flexDateWrite) and (JsPath \ "to").format[JsValue].inmap[DateTime](flexDateRead, flexDateWrite) )(TemporalBounds.apply, unlift(TemporalBounds.unapply)) def computeUnion(bounds: Seq[TemporalBounds]): TemporalBounds = { val from = bounds.map(_.from.getMillis).min val to = bounds.map(_.to.getMillis).max TemporalBounds( new DateTime(from, DateTimeZone.UTC), new DateTime(to, DateTimeZone.UTC)) } def fromYears(from: Int, to: Int): TemporalBounds = { val f = new DateTime(DateTimeZone.UTC).withDate(from, 1, 1).withTime(0, 0, 0, 0) val t = new DateTime(DateTimeZone.UTC).withDate(to, 1, 1).withTime(0, 0, 0, 0) TemporalBounds(f, t) } }
Example 3
Source File: SimpleWebSocketActor.scala From monix-sample with Apache License 2.0 | 5 votes |
package engine import akka.actor.{Actor, ActorRef, Props} import engine.SimpleWebSocketActor.Next import monix.execution.Scheduler import monix.reactive.Observable import monix.execution.Ack.Continue import monix.execution.cancelables.CompositeCancelable import org.joda.time.{DateTime, DateTimeZone} import play.api.libs.json.{JsValue, Json, Writes} import scala.concurrent.duration._ import engine.BackPressuredWebSocketActor._ class SimpleWebSocketActor[T: Writes] (producer: Observable[T], out: ActorRef)(implicit s: Scheduler) extends Actor { def receive: Receive = { case Next(jsValue) => out ! jsValue } private[this] val subscription = CompositeCancelable() override def preStart(): Unit = { super.preStart() val source = { val initial = Observable.evalOnce(initMessage(now())) val obs = initial ++ producer.map(x => Json.toJson(x)) val timeout = obs.debounce(3.seconds).map(_ => keepAliveMessage(now())) Observable.merge(obs, timeout) } subscription += source.subscribe { jsValue => self ! Next(jsValue) Continue } } override def postStop(): Unit = { subscription.cancel() super.postStop() } def now(): Long = DateTime.now(DateTimeZone.UTC).getMillis } object SimpleWebSocketActor { case class Next(value: JsValue) }
Example 4
Source File: PlayGlobalSettings.scala From play-silhouette-postgres-async-seed with Apache License 2.0 | 5 votes |
package utils import java.util.TimeZone import jdub.async.Database import org.joda.time.DateTimeZone import play.api.{ Application, GlobalSettings } import services.database.Schema object PlayGlobalSettings extends GlobalSettings { override def onStart(app: Application) = { DateTimeZone.setDefault(DateTimeZone.UTC) TimeZone.setDefault(TimeZone.getTimeZone("UTC")) val cnf = play.api.Play.current.configuration val host = cnf.getString("db.host").getOrElse("127.0.0.1") val port = 5432 val database = cnf.getString("db.database") val username = cnf.getString("db.username").getOrElse("silhouette") val password = cnf.getString("db.password") Database.open(username, host, port, password, database) Schema.update() super.onStart(app) } override def onStop(app: Application) = { Database.close() super.onStop(app) } }
Example 5
Source File: DateTimeDifferenceTimeSeries.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.timeseries.dt import be.cetic.tsimulus.timeseries.TimeSeries import org.joda.time.{DateTimeZone, LocalDateTime, Duration} class DateTimeDifferenceTimeSeries(a: TimeSeries[LocalDateTime], b: TimeSeries[LocalDateTime]) extends TimeSeries[Duration] { override def compute(times: Stream[LocalDateTime]): Stream[(LocalDateTime, Option[Duration])] = { val z = a.compute(times) zip b.compute(times) z.map(s => { if(s._1._2.isEmpty || s._2._2.isEmpty) (s._1._1, None) else (s._1._1, Some(new Duration(s._1._2.get.toDateTime(DateTimeZone.UTC), s._2._2.get.toDateTime(DateTimeZone.UTC)))) }) } override def compute(time: LocalDateTime): Option[Duration] = { val aTime = a.compute(time) val bTime = b.compute(time) if (aTime.isEmpty || bTime.isEmpty) None else Some(new Duration(aTime.get.toDateTime(DateTimeZone.UTC), bTime.get.toDateTime(DateTimeZone.UTC))) } }
Example 6
Source File: LogisticTimeSeries.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.timeseries.binary import be.cetic.tsimulus.timeseries.TimeSeries import org.joda.time.{DateTimeZone, LocalDateTime} import scala.util.Random case class LogisticTimeSeries(base: TimeSeries[Double], location: Double, scale: Double, seed: Int) extends TimeSeries[Boolean] { override def compute(times: Stream[LocalDateTime]) = { def logit(x: Double) = 1 / (1 + Math.exp(- ((x - location) / scale))) base.compute(times).map { case (t,v) => (t, v.map(x => { val r = new Random(seed + t.toDateTime(DateTimeZone.UTC).getMillis) r.nextDouble() < logit(x) } ))} } override def compute(time: LocalDateTime): Option[Boolean] = { val r = new Random(seed+time.toDateTime(DateTimeZone.UTC).getMillis) def logit(x: Double) = 1 / (1 + Math.exp(- ((x - location) / scale))) base.compute(time).map(x => r.nextDouble() < logit(x)) } }
Example 7
Source File: CorrelatedTimeSeries.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.timeseries.composite import be.cetic.tsimulus.timeseries.TimeSeries import org.joda.time.{DateTimeZone, LocalDateTime} import scala.util.Random case class CorrelatedTimeSeries(base: TimeSeries[Double], seed: Int, rho: Double) extends TimeSeries[Double] { val rho_square = rho*rho override def compute(times: Stream[LocalDateTime]) = { val r = new Random(seed) base.compute(times) .map {case(t,v) => (t, v.map(a => (rho * a) + (math.sqrt(1 - rho_square) * r.nextGaussian)))} } override def compute(time: LocalDateTime): Option[Double] = { val r = new Random(seed+time.toDateTime(DateTimeZone.UTC).getMillis) base.compute(time) match { case None => None case Some(x) => Some((rho * x) + (math.sqrt(1 - rho_square) * r.nextGaussian)) } } }
Example 8
Source File: DateTimeConverter.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.commons.datetime import java.sql.Timestamp import org.joda.time.format.{DateTimeFormatter, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} trait DateTimeConverter { val zone: DateTimeZone = DateTimeZone.getDefault val dateTimeFormatter: DateTimeFormatter = ISODateTimeFormat.dateTime() def toString(dateTime: DateTime): String = dateTime.toString(dateTimeFormatter) def parseDateTime(s: String): DateTime = dateTimeFormatter.parseDateTime(s).withZone(zone) def parseTimestamp(s: String): Timestamp = new Timestamp(parseDateTime(s).getMillis) def now: DateTime = new DateTime(zone) def fromMillis(millis: Long): DateTime = new DateTime(zone).withMillis(millis) def dateTime( year: Int, monthOfyear: Int, dayOfMonth: Int, hourOfDay: Int = 0, minutesOfHour: Int = 0, secondsOfMinute: Int = 0): DateTime = new DateTime(year, monthOfyear, dayOfMonth, hourOfDay, minutesOfHour, secondsOfMinute, zone) def dateTimeFromUTC( year: Int, monthOfyear: Int, dayOfMonth: Int, hourOfDay: Int = 0, minutesOfHour: Int = 0, secondsOfMinute: Int = 0): DateTime = new DateTime( year, monthOfyear, dayOfMonth, hourOfDay, minutesOfHour, secondsOfMinute, DateTimeZone.UTC).withZone(DateTimeConverter.zone) } object DateTimeConverter extends DateTimeConverter
Example 9
Source File: KibanaForwarder.scala From shield with MIT License | 5 votes |
package shield.actors.listeners import akka.actor.{Actor, ActorLogging} import com.amazonaws.auth.{AWSCredentials, DefaultAWSCredentialsProviderChain} import com.typesafe.config.Config import shield.actors.RestartLogging import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTimeZone, DateTime} import shield.aws.AWSSigningConfig import shield.metrics.Instrumented import spray.client.pipelining._ import spray.http.HttpResponse import shield.aws.AWSImplicits._ import spray.json.DefaultJsonProtocol._ import spray.json._ // todo: ensure useful mapping on the index class KibanaForwarder(id: String, host: String, indexPrefix: String, ttype: String, maxOutstanding: Int, signingParams: AWSSigningConfig) extends Actor with ActorLogging with RestartLogging with Instrumented { implicit val ctx = context.dispatcher // todo: timeout? val awsSigningConfig = signingParams val pipeline = sendReceive val dayFormat = DateTimeFormat.forPattern("yyyy.MM.dd") val outstandingCounter = metrics.counter("outstandingPosts", id) val droppedMeter = metrics.meter("droppedAccessLogs", id) val postTimer = timing("postToKibana", id) def receive = { case LogsFlushed => outstandingCounter -= 1 case AccessLogs(buffer) => if (buffer.nonEmpty) { if (outstandingCounter.count >= maxOutstanding) { droppedMeter.mark(buffer.length) } else postTimer { outstandingCounter += 1 val date = DateTimeFormat.forPattern("yyyy.MM.dd").print(DateTime.now(DateTimeZone.UTC)) // todo: CompactPrint is 1% cpu under load tests. Faster serialization library? val orderedCommands = buffer.flatMap { doc => List( JsObject( "index" -> JsObject( "_index" -> JsString(s"$indexPrefix-$date"), "_type" -> JsString(ttype) ) ).toJson.compactPrint, doc.toJson.compactPrint ) } val req = Post(s"$host/_bulk", orderedCommands.mkString("\n") + "\n").withAWSSigning(awsSigningConfig) pipeline(req) andThen LogCollector.handleResults(self, droppedMeter, log, buffer.length) } } } }
Example 10
Source File: AuthUtil.scala From shield with MIT License | 5 votes |
package shield.aws import java.nio.charset.StandardCharsets import java.security.MessageDigest import javax.crypto.Mac import javax.crypto.spec.SecretKeySpec import org.apache.commons.codec.binary.Hex import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, DateTimeZone} import spray.http.HttpHeaders.RawHeader import spray.http.HttpRequest object HexBytesUtil { def hex2bytes(hex: String): Array[Byte] = { hex.replaceAll("[^0-9A-Fa-f]", "").sliding(2, 2).toArray.map(Integer.parseInt(_, 16).toByte) } def bytes2hex(bytes: Array[Byte], sep: Option[String] = None): String = { sep match { case None => bytes.map("%02x".format(_)).mkString case _ => bytes.map("%02x".format(_)).mkString(sep.get) } } }
Example 11
Source File: DateTimeUtilsTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.utils.date import com.salesforce.op.test.TestCommon import org.joda.time.{DateTime, DateTimeZone} import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class DateTimeUtilsTest extends FlatSpec with TestCommon { val dateStr = "2017-03-29T14:00:07.000Z" val date = DateTime.parse(dateStr) val now = DateTime.now(DateTimeZone.UTC) Spec(DateTimeUtils.getClass) should "parse date in Iso format" in { DateTimeUtils.parse(dateStr) shouldBe date.getMillis } it should "parse date in yyyy-MM-dd HH:mm:ss.SSS format" in { val formattedStr = "2017-03-29 14:00:07.000" DateTimeUtils.parse(formattedStr) shouldBe date.getMillis } it should "parse Unix" in { DateTimeUtils.parseUnix(now.getMillis) shouldBe now.toString("YYYY/MM/dd") } it should "get range between two dates" in { val numberOfDays = 500 val diff = DateTimeUtils.getRange( date.minusDays(numberOfDays).toString("YYYY/MM/dd"), date.toString("YYYY/MM/dd") ) diff.length shouldBe numberOfDays + 1 } it should "get date difference days from start date" in { val datePlusDays = DateTimeUtils.getDatePlusDays(now.toString("YYYY/MM/dd"), 31) datePlusDays shouldBe now.plusDays(31).toString("YYYY/MM/dd") } }
Example 12
Source File: DruidEventBeam.scala From gimel with Apache License 2.0 | 5 votes |
package com.paypal.gimel.druid.util import com.metamx.tranquility.beam.{Beam, ClusteredBeamTuning} import com.metamx.tranquility.druid.{DruidBeams, DruidLocation, DruidRollup, SpecificDruidDimensions} import com.metamx.tranquility.spark.BeamFactory import com.metamx.tranquility.typeclass.Timestamper import io.druid.data.input.impl.TimestampSpec import org.apache.curator.framework.CuratorFrameworkFactory import org.apache.curator.retry.BoundedExponentialBackoffRetry import org.joda.time.{DateTime, DateTimeZone, Period} import com.paypal.gimel.druid.conf.DruidClientConfiguration lazy val BeamInstance: Beam[Map[String, Any]] = { // Tranquility uses ZooKeeper (through Curator framework) for coordination. val curator = CuratorFrameworkFactory.newClient( druidConfig.zookeeper, new BoundedExponentialBackoffRetry(100, 3000, 5) ) curator.start() // Transforms List[DruidDimensions] from the DruidClientConfiguration to List[String] val dimensions = druidConfig .dimensions .map(_.name) // Transforms List[DruidMetrics] from the DruidClientConfiguration to List[AggregatorFactory] val aggregators = druidConfig .metrics .map(_.getAggregator) // Building a Druid Beam DruidBeams .builder() .curator(curator) .discoveryPath(druidConfig.discoveryPath) .location(DruidLocation.create(druidConfig.indexService, druidConfig.datasource)) .rollup(DruidRollup(SpecificDruidDimensions(dimensions), aggregators, DruidUtility.fetchQueryGranularity(druidConfig.queryGranularity))) .tuning( ClusteredBeamTuning ( segmentGranularity = druidConfig.segmentGranularity, windowPeriod = new Period(druidConfig.windowPeriod), partitions = druidConfig.numPartitions, replicants = druidConfig.numReplicants ) ) .timestampSpec(new TimestampSpec(druidConfig.timestamp_field, "iso", null)) .buildBeam() } } class DruidEventBeam(config: DruidClientConfiguration) extends BeamFactory[Map[String, Any]] { // Return a singleton, so the same connection is shared across all tasks in the same JVM. def makeBeam: Beam[Map[String, Any]] = { DruidEventBeam.init(config) DruidEventBeam.BeamInstance } }
Example 13
Source File: ImplicitJodaTimeGenerators.scala From scalacheck-ops with Apache License 2.0 | 5 votes |
package org.scalacheck.ops.time.joda import org.joda.time.chrono._ import org.joda.time.{Chronology, DateTime, DateTimeZone, LocalDateTime} import org.scalacheck.Arbitrary import org.scalacheck.Gen._ import org.scalacheck.ops.time.joda.ChronologyOps._ import scala.collection.JavaConverters._ trait ImplicitJodaTimeGenerators { implicit val arbDateTimeZone: Arbitrary[DateTimeZone] = { val ids = DateTimeZone.getAvailableIDs.asScala.toSeq val zones = ids map DateTimeZone.forID Arbitrary(oneOf(zones)) } implicit def arbChronology(implicit arbZone: Arbitrary[DateTimeZone]): Arbitrary[Chronology] = { val chronologyBuilders: Seq[DateTimeZone => Chronology] = Seq( BuddhistChronology.getInstance, CopticChronology.getInstance, EthiopicChronology.getInstance, GregorianChronology.getInstance, IslamicChronology.getInstance, ISOChronology.getInstance, JulianChronology.getInstance ) Arbitrary { for { build <- oneOf(chronologyBuilders) zone <- arbZone.arbitrary } yield build(zone) } } implicit def arbDateTime(implicit params: JodaTimeParams = JodaDateTimeGenerators.defaultParams): Arbitrary[DateTime] = { val maxMillis = params.chronology.maxMillis Arbitrary(chooseNum(0, maxMillis).flatMap(new DateTime(_, params.chronology))) } implicit def arbLocalDateTime(implicit params: JodaTimeParams = JodaLocalDateTimeGenerators.defaultParams): Arbitrary[LocalDateTime] = { val maxMillis = params.chronology.maxMillis Arbitrary(chooseNum(0, maxMillis).flatMap(new LocalDateTime(_, params.chronology))) } }
Example 14
Source File: LPFCrosswalk.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.entity.builtin.importer.crosswalks.geojson.lpf import com.vividsolutions.jts.geom.{Geometry, GeometryCollection} import java.io.InputStream import org.joda.time.{DateTime, DateTimeZone} import play.api.libs.json._ import play.api.libs.functional.syntax._ import services.HasGeometrySafe import services.entity._ import services.entity.builtin.importer.crosswalks.geojson.BaseGeoJSONCrosswalk object LPFCrosswalk extends BaseGeoJSONCrosswalk { private def toEntityRecord(identifier: String, f: LPFFeature) = EntityRecord( f.id, identifier, DateTime.now().withZone(DateTimeZone.UTC), None, // lastChangedAt f.title, f.descriptions.map(_.toDescription), f.namings.map(_.toName), f.normalizedGeometry, f.normalizedGeometry.map(_.getCentroid.getCoordinate), None, // country code None, // temporal bounds f.placetypes.map(_.label), None, // priority Seq.empty[Link] // TODO create from matches ) def fromJsonLines(identifier: String)(record: String): Option[EntityRecord] = super.fromJson[LPFFeature](record, { f => toEntityRecord(identifier, f) }) def fromGeoJSON(identifier: String)(in: InputStream): Seq[EntityRecord] = { play.api.Logger.info("Parsing FeatureCollection") val maybeFc = Json.fromJson[LPFFeatureCollection](Json.parse(in)) if (maybeFc.isError) play.api.Logger.error(maybeFc.toString) else play.api.Logger.info("Successful.") val fc = maybeFc.get fc.features.map(toEntityRecord(identifier, _)) } } case class LPFFeature( id: String, title: String, countryCode: Option[String], namings: Seq[Naming], parthood: Seq[Parthood], placetypes: Seq[PlaceType], descriptions: Seq[LPFDescription], exactMatches: Seq[String], closeMatches: Seq[String], geometry: Option[Geometry]) { lazy val links = closeMatches.map(Link(_, LinkType.CLOSE_MATCH)) exactMatches.map(Link(_, LinkType.EXACT_MATCH)) lazy val normalizedGeometry = geometry.map { _ match { case geom: GeometryCollection => if (geom.getNumGeometries == 1) geom.getGeometryN(0) else geom case geom => geom }} } object LPFFeature extends HasGeometrySafe { implicit val lpfReads: Reads[LPFFeature] = ( (JsPath \ "@id").read[String] and (JsPath \ "properties"\ "title").read[String] and (JsPath \ "properties" \ "ccode").readNullable[String] and (JsPath \ "namings").readNullable[Seq[Naming]].map(_.getOrElse(Seq.empty[Naming])) and (JsPath \ "parthood").readNullable[Seq[Parthood]].map(_.getOrElse(Seq.empty[Parthood])) and (JsPath \ "placetypes").readNullable[Seq[PlaceType]].map(_.getOrElse(Seq.empty[PlaceType])) and (JsPath \ "description").readNullable[Seq[LPFDescription]].map(_.getOrElse(Seq.empty[LPFDescription])) and (JsPath \ "exactMatch").readNullable[Seq[String]].map(_.getOrElse(Seq.empty[String])) and (JsPath \ "closeMatch").readNullable[Seq[String]].map(_.getOrElse(Seq.empty[String])) and (JsPath \ "geometry").readNullable[Geometry] .map[Option[Geometry]] { case Some(x) if x == null => None // Avoids Some(null) that happens for bad GeoJSON case other => other } )(LPFFeature.apply _) } // TODO we really need to get rid of this redundancy... case class LPFFeatureCollection(features: Seq[LPFFeature]) object LPFFeatureCollection { implicit val lpfFeatureCollectionReads: Reads[LPFFeatureCollection] = (JsPath \ "features").read[Seq[LPFFeature]].map(LPFFeatureCollection(_)) }
Example 15
Source File: PelagiosGeoJSONCrosswalk.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.entity.builtin.importer.crosswalks.geojson import com.vividsolutions.jts.geom.{Coordinate, Geometry} import java.io.InputStream import services.HasGeometry import services.entity._ import org.joda.time.{DateTime, DateTimeZone} import play.api.libs.json._ import play.api.libs.functional.syntax._ object PelagiosGeoJSONCrosswalk extends BaseGeoJSONCrosswalk { def fromGeoJSON(identifier: String)(in: InputStream): Seq[EntityRecord] = { val fc = Json.fromJson[PelagiosGazetteerFeatureCollection](Json.parse(in)).get fc.features.map(f => EntityRecord( f.uri, identifier, DateTime.now().withZone(DateTimeZone.UTC), None, // lastChangedAt f.title, f.descriptions, f.names, f.geometry, f.geometry.map(_.getCentroid.getCoordinate), None, // country code None, // temporal bounds Seq.empty[String], // subjects None, // priority Seq.empty[Link] )) } } case class PelagiosGazetteerFeature( uri : String, title : String, descriptions : Seq[Description], names : Seq[Name], geometry : Option[Geometry], representativePoint : Option[Coordinate]) case class PelagiosGazetteerFeatureCollection(features: Seq[PelagiosGazetteerFeature]) object PelagiosGazetteerFeature extends HasGeometry { implicit val pelagiosGeoJSONReads: Reads[PelagiosGazetteerFeature] = ( (JsPath \ "uri").read[String] and (JsPath \ "title").read[String] and (JsPath \ "descriptions").readNullable[Seq[Description]].map(_.getOrElse(Seq.empty[Description])) and (JsPath \ "names").readNullable[Seq[Name]].map(_.getOrElse(Seq.empty[Name])) and (JsPath \ "geometry").readNullable[Geometry] .map[Option[Geometry]] { case Some(x) if x == null => None // Avoids Some(null) that happens for bad GeoJSON case other => other } and (JsPath \ "representative_point").readNullable[Coordinate] )(PelagiosGazetteerFeature.apply _) } object PelagiosGazetteerFeatureCollection { implicit val pelagiosGazetteerFeatureCollectionRead: Reads[PelagiosGazetteerFeatureCollection] = (JsPath \ "features").read[Seq[PelagiosGazetteerFeature]].map(PelagiosGazetteerFeatureCollection(_)) }
Example 16
Source File: PelagiosRDFCrosswalk.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.entity.builtin.importer.crosswalks.rdf import java.io.{File, FileInputStream, InputStream} import services.entity._ import org.joda.time.{DateTime, DateTimeZone} import org.pelagios.Scalagios import org.pelagios.api.PeriodOfTime object PelagiosRDFCrosswalk { private def toLinks(uris: Seq[String], linkType: LinkType.Value) = uris.map(uri => Link(EntityRecord.normalizeURI(uri), linkType)) private def convertPeriodOfTime(period: PeriodOfTime): TemporalBounds = { val startDate = period.start val endDate = period.end.getOrElse(startDate) TemporalBounds( new DateTime(startDate).withZone(DateTimeZone.UTC), new DateTime(endDate).withZone(DateTimeZone.UTC)) } def fromRDF(filename: String, identifier: String): InputStream => Seq[EntityRecord] = { def convertPlace(place: org.pelagios.api.gazetteer.Place) = EntityRecord( EntityRecord.normalizeURI(place.uri), identifier, DateTime.now().withZone(DateTimeZone.UTC), None, place.label, place.descriptions.map(l => Description(l.chars, l.lang)), place.names.map(l => Name(l.chars, l.lang)), place.location.map(_.geometry), place.location.map(_.pointLocation), None, // country code place.temporalCoverage.map(convertPeriodOfTime(_)), place.category.map(category => Seq(category.toString)).getOrElse(Seq.empty[String]), None, // priority { toLinks(place.closeMatches, LinkType.CLOSE_MATCH) ++ toLinks(place.exactMatches, LinkType.EXACT_MATCH) }) // Return crosswalk function { stream: InputStream => Scalagios.readPlaces(stream, filename).map(convertPlace).toSeq } } def readFile(file: File, identifier: String): Seq[EntityRecord] = fromRDF(file.getName, identifier)(new FileInputStream(file)) }
Example 17
Source File: TrafficMonitoring.scala From CM-Well with Apache License 2.0 | 5 votes |
package trafficshaping import actions.{MarkdownTable, MarkdownTuple} import akka.util.Timeout import cmwell.ctrl.config.Jvms import cmwell.domain.{FileContent, FileInfoton, SystemFields, VirtualInfoton} import k.grid.Grid import java.net.InetAddress import scala.concurrent.Future import akka.pattern.ask import org.joda.time.{DateTime, DateTimeZone} import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global import scala.util.Try object TrafficMonitoring { implicit val timeout = Timeout(5.seconds) def mergeCounters(m1: Map[String, RequestCounter], m2: Map[String, RequestCounter]): Map[String, RequestCounter] = { val keyset = m1.keySet ++ m2.keySet keyset.map { key => val v1 = m1.get(key).map(_.counter).getOrElse(0L) val v2 = m2.get(key).map(_.counter).getOrElse(0L) key -> RequestCounter(v1 + v2) }.toMap } def nslookup(ip: String): String = Try(InetAddress.getByName(ip)).map(_.getHostName).getOrElse("NA") def traffic(path: String, dc: String): Future[Option[VirtualInfoton]] = { val setFut = Grid.jvms(Jvms.WS).map { jvm => (Grid.selectActor(CongestionAnalyzer.name, jvm) ? GetTrafficData).mapTo[TrafficData] } val futSet = cmwell.util.concurrent.successes(setFut) futSet.map { set => val trafficData = set.foldLeft(TrafficData(Map.empty[String, RequestorCounter])) { case (r1, r2) => val keyset = r1.requestors.keySet ++ r2.requestors.keySet val newMap = keyset.map { key => val v1 = r1.requestors.getOrElse(key, RequestorCounter(NoPenalty, Map.empty[String, RequestCounter])) val v2 = r2.requestors.getOrElse(key, RequestorCounter(NoPenalty, Map.empty[String, RequestCounter])) key -> RequestorCounter(PenaltyStage.chooseHighest(v1.penalty, v2.penalty), mergeCounters(v1.requestsCounters, v2.requestsCounters)) }.toMap TrafficData(newMap) } val reqMap = trafficData.requestors val reqTypes = reqMap.values.flatMap(_.requestsCounters.keySet).toSet.toSeq.sorted val header = MarkdownTuple("IP", "Host", "Proc Duration", "Plan").add(reqTypes) val reqTuples = reqMap.map { r => val requestorCounters = r._2.requestsCounters val allCounters = reqTypes.map(t => t -> RequestCounter(0L)).toMap ++ requestorCounters val counters = allCounters.toSeq.sortBy(_._1).map(_._2.counter.toString) MarkdownTuple(r._1, nslookup(r._1), r._2.requestsTime.toString, r._2.penalty.toString).add(counters) }.toSeq val table = MarkdownTable(header, reqTuples) val statusText = if (TrafficShaper.isEnabled) "### Traffic shaping is enabled" else "### Traffic shaping is disabled" val content = statusText + "\n\n\n" + table.get Some( VirtualInfoton(FileInfoton(SystemFields(path, new DateTime(DateTimeZone.UTC), "VirtualInfoton", dc, None, "", "http"), content = Some(FileContent(content.getBytes, "text/x-markdown")))) ) } } }
Example 18
Source File: UserActivityFeed.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.contribution.feed.user import com.sksamuel.elastic4s.searches.RichSearchResponse import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram import org.elasticsearch.search.aggregations.bucket.terms.Terms import org.joda.time.{DateTime, DateTimeZone} import play.api.mvc.{AnyContent, Request} import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext import services.ContentType import services.contribution.{ContributionAction, ItemType} import services.document.DocumentService import services.user.User object UserActivityFeed { private def parseAggregations(response: RichSearchResponse) = { val overTime = response.aggregations.getAs[InternalFilter]("over_time") .getAggregations.get("per_day").asInstanceOf[InternalDateHistogram] // Note: ES result is in ascending order, but we want descending (most recent first) overTime.getBuckets.asScala.toSeq.reverse.map { bucket => val timestamp = new DateTime(bucket.getKey.asInstanceOf[DateTime].getMillis, DateTimeZone.UTC) val byUser: Seq[ActivityPerUser] = bucket.getAggregations.get("by_user").asInstanceOf[Terms] .getBuckets.asScala.map { bucket => val byDocument: Seq[UserActivityPerDocument] = bucket.getAggregations.get("by_doc_id").asInstanceOf[Terms] .getBuckets.asScala.map { bucket => // ActivityEntry is a flattened version of the three last nesting levels (action, item type and content type) val entries = bucket.getAggregations.get("by_action").asInstanceOf[Terms] .getBuckets.asScala.flatMap { bucket => val thisAction = ContributionAction.withName(bucket.getKeyAsString) bucket.getAggregations.get("by_item_type").asInstanceOf[Terms] .getBuckets.asScala.flatMap { bucket => val thisType = ItemType.withName(bucket.getKeyAsString) bucket.getAggregations.get("by_content_type").asInstanceOf[Terms] .getBuckets.asScala.flatMap { bucket => // Content types also include "super types" ("TEXT", "IMAGE") which exist as // shorthands to simplify ES query, but are not actual valid content // types - just skip those val maybeContentType = ContentType.withName(bucket.getKeyAsString) maybeContentType.map { contentType => UserActivityFeedEntry(thisAction, thisType, contentType, bucket.getDocCount) } } } } RawUserActivityPerDocument(bucket.getKeyAsString, bucket.getDocCount, entries) } ActivityPerUser(bucket.getKeyAsString, bucket.getDocCount, byDocument) } UserActivityPerDay(timestamp, bucket.getDocCount, byUser) } } def fromSearchResponse( loggedInAs: Option[String], response: RichSearchResponse )(implicit documents: DocumentService, request: Request[AnyContent], ctx: ExecutionContext ) = { val rawFeed = parseAggregations(response) // Get all distinct doc IDs in the feed and check if the current user has read permissions val docIds = rawFeed.flatMap { perDay => perDay.users.flatMap { perUser => perUser.documents.map { _.asInstanceOf[RawUserActivityPerDocument].documentId } } }.toSeq.distinct documents.getDocumentRecordsByIdWithAccessLevel(docIds, loggedInAs).map { docsAndPermissions => // Filter the raw feed, so that only documents with sufficient runtime permissions show up val visibleFeed = rawFeed.map(_.filter(docsAndPermissions)) // Inject document metadata (which isn't stored in ElasticSearch) val docs = docsAndPermissions.map(_._1) visibleFeed.map(_.enrich(docs)) } } }
Example 19
Source File: DocumentActivityFeed.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.contribution.feed.document import com.sksamuel.elastic4s.searches.RichSearchResponse import java.util.UUID import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram import org.elasticsearch.search.aggregations.bucket.terms.Terms import org.joda.time.{DateTime, DateTimeZone} import play.api.libs.json._ import play.api.libs.functional.syntax._ import play.api.mvc.{AnyContent, Request} import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext import services.contribution.{ContributionAction, ItemType} import services.document.{DocumentService, ExtendedDocumentMetadata} case class DocumentActivityFeed( url: String, title: String, author: Option[String], owner: String, activities: Seq[DocumentDayActivity]) object DocumentActivityFeed { implicit val documentActivityFeedWrites: Writes[DocumentActivityFeed] = ( (JsPath \ "url").write[String] and (JsPath \ "title").write[String] and (JsPath \ "author").writeNullable[String] and (JsPath \ "owner").write[String] and (JsPath \ "timeline").write[Seq[DocumentDayActivity]] )(unlift(DocumentActivityFeed.unapply)) private def parseAggregations( documentId: String, response: RichSearchResponse, doc: ExtendedDocumentMetadata )(implicit request: Request[AnyContent]) = { val overTime = response.aggregations.getAs[InternalFilter]("over_time") .getAggregations.get("per_day").asInstanceOf[InternalDateHistogram] // Note: ES result is in ascending order, but we want descending (most recent first) val activities = overTime.getBuckets.asScala.toSeq.reverse.map { bucket => val timestamp = new DateTime(bucket.getKey.asInstanceOf[DateTime].getMillis, DateTimeZone.UTC) val byUser: Seq[DocumentActivityByUser] = bucket.getAggregations.get("by_user").asInstanceOf[Terms] .getBuckets.asScala.map { bucket => val byPart: Seq[DocumentActivityByPart] = bucket.getAggregations.get("by_part").asInstanceOf[Terms] .getBuckets.asScala.map { bucket => // ActivityEntry is a flattened version of the two last nesting levels (action and type) val entries = bucket.getAggregations.get("by_action").asInstanceOf[Terms] .getBuckets.asScala.flatMap { bucket => val thisAction = ContributionAction.withName(bucket.getKeyAsString) bucket.getAggregations.get("by_item_type").asInstanceOf[Terms] .getBuckets.asScala.map { bucket => DocumentActivityFeedEntry(thisAction, ItemType.withName(bucket.getKeyAsString), bucket.getDocCount) } } DocumentActivityByPart.build( UUID.fromString(bucket.getKeyAsString), bucket.getDocCount, entries, doc) } DocumentActivityByUser(bucket.getKeyAsString, bucket.getDocCount, byPart) } DocumentDayActivity(timestamp, bucket.getDocCount, byUser) } DocumentActivityFeed( controllers.document.routes.DocumentController.initialDocumentView(documentId).absoluteURL, doc.title, doc.author, doc.ownerName, activities) } def fromSearchResponse( documentId: String, response: RichSearchResponse )(implicit request: Request[AnyContent], documents: DocumentService, ctx: ExecutionContext ) = documents.getExtendedMeta(documentId).map { _ match { case Some((doc, _)) => parseAggregations(documentId, response, doc) case None => // Should never happen - let it crash throw new Exception("Data integrity error: activity feed for document that is not in the DB") }} }
Example 20
Source File: ContributionSpec.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.contribution import java.util.UUID import services.ContentType import org.specs2.mutable._ import org.specs2.runner._ import org.junit.runner._ import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.DateTimeFormat import play.api.libs.json.Json import play.api.test._ import play.api.test.Helpers._ import scala.io.Source @RunWith(classOf[JUnitRunner]) class ContributionSpec extends Specification { private val DATE_TIME_PATTERN = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") private val madeAt = DateTime.parse("2016-06-03T13:02:00Z", DATE_TIME_PATTERN).withZone(DateTimeZone.UTC) "The sample Contribution" should { "be properly created from JSON" in { val json = Source.fromFile("test/resources/services/contribution/contribution.json").getLines().mkString("\n") val result = Json.fromJson[Contribution](Json.parse(json)) // Parsed without errors? result.isSuccess must equalTo(true) val contribution = result.get contribution.action must equalTo(ContributionAction.CONFIRM_BODY) contribution.madeBy must equalTo("rainer") contribution.madeAt must equalTo(madeAt) val item = contribution.affectsItem item.itemType must equalTo(ItemType.PLACE_BODY) item.documentId must equalTo("98muze1cl3saib") item.documentOwner must equalTo("rainer") item.filepartId must equalTo(Some(UUID.fromString("a7126845-16ac-434b-99bd-0f297e227822"))) item.contentType must equalTo(Some(ContentType.TEXT_PLAIN)) item.annotationId must equalTo(Some(UUID.fromString("7cfa1504-26de-45ef-a590-8b60ea8a60e8"))) item.annotationVersionId must equalTo(Some(UUID.fromString("e868423f-5ea9-42ed-bb7d-5e1fac9195a0"))) contribution.affectsUsers must equalTo(Seq("otheruser")) } } "JSON serialization/parsing roundtrip" should { "yield an equal Contribution" in { val contribution = Contribution( ContributionAction.DELETE_BODY, "rainer", madeAt, Item( ItemType.COMMENT_BODY, "98muze1cl3saib", "rainer", Some(UUID.fromString("7ccbf5dd-335b-4d59-bff6-d8d59d977825")), Some(ContentType.TEXT_TEIXML), Some(UUID.fromString("7cfa1504-26de-45ef-a590-8b60ea8a60e8")), Some(UUID.fromString("e868423f-5ea9-42ed-bb7d-5e1fac9195a0")), Some("just a comment"), None ), Seq("rainer"), None) // Convert to JSON val serialized = Json.prettyPrint(Json.toJson(contribution)) val parseResult = Json.fromJson[Contribution](Json.parse(serialized)) parseResult.isSuccess must equalTo(true) parseResult.get must equalTo(contribution) } } }
Example 21
Source File: RelationSpec.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.annotation.relation import org.joda.time.{DateTime, DateTimeZone} import org.joda.time.format.DateTimeFormat import org.junit.runner._ import org.specs2.mutable._ import org.specs2.runner._ import play.api.test._ import play.api.test.Helpers._ import play.api.libs.json.Json import scala.io.Source import services.annotation.{Annotation, AnnotationBody} @RunWith(classOf[JUnitRunner]) class RelationSpec extends Specification { private val DATE_TIME_PATTERN = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") import services.annotation.BackendAnnotation._ "The sample annotation" should { "be properly created from JSON" in { val json = Source.fromFile("test/resources/services/annotation/annotation-with-relation.json").getLines().mkString("\n") val result = Json.fromJson[Annotation](Json.parse(json)) result.isSuccess must equalTo(true) val relations = result.get.relations relations.size must equalTo(1) relations.head.bodies.size must equalTo(1) val body = relations.head.bodies.head body.hasType must equalTo(AnnotationBody.TAG) body.lastModifiedBy must equalTo(Some("rainer")) body.lastModifiedAt must equalTo(DateTime.parse("2018-05-07T15:31:00Z", DATE_TIME_PATTERN).withZone(DateTimeZone.UTC)) body.value must equalTo("flyingTo") } } }
Example 22
Source File: VisitSpec.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.visit import java.util.UUID import services.ContentType import services.RuntimeAccessLevel import org.specs2.mutable._ import org.specs2.runner._ import org.junit.runner._ import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.DateTimeFormat import play.api.libs.json.Json import play.api.test._ import play.api.test.Helpers._ import scala.io.Source @RunWith(classOf[JUnitRunner]) class VisitSpec extends Specification { private val DATE_TIME_PATTERN = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") private val visitedAt = DateTime.parse("2016-11-08T07:27:00Z", DATE_TIME_PATTERN).withZone(DateTimeZone.UTC) "The sample Visit" should { "be properly created from JSON" in { val json = Source.fromFile("test/resources/services/visit/visit.json").getLines().mkString("\n") val result = Json.fromJson[Visit](Json.parse(json)) result.isSuccess must equalTo(true) val visit = result.get visit.url must equalTo("http://recogito.pelagios.org/document/fb2f3hm1ihnwgn/part/1/edit") visit.referer must equalTo(Some("http://recogito.pelagios.org/rainer")) visit.visitedAt must equalTo(visitedAt) visit.responseFormat must equalTo("text/html") visit.accessLevel must equalTo(Some(RuntimeAccessLevel.READ_ALL)) val client = visit.client client.userAgent must equalTo("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/52.0.2743.116 Chrome/52.0.2743.116 Safari/537.36") client.browser must equalTo("CHROME") client.os must equalTo("LINUX") client.deviceType must equalTo("COMPUTER") val item = visit.visitedItem.get item.documentId must equalTo("fb2f3hm1ihnwgn") item.documentOwner must equalTo("rainer") item.filepartId must equalTo(Some(UUID.fromString("a7126845-16ac-434b-99bd-0f297e227822"))) item.contentType must equalTo(Some(ContentType.TEXT_PLAIN)) } } "JSON serialization/parsing roundtrip" should { "yield an equal Visit" in { val visit = Visit( "http://recogito.pelagios.org/document/fb2f3hm1ihnwgn/part/1/edit", Some("http://recogito.pelagios.org/rainer"), visitedAt, Client( "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36", "CHROME", "LINUX", "COMPUTER" ), "text/html", Some(VisitedItem( "fb2f3hm1ihnwgn", "rainer", Some(UUID.randomUUID), Some(ContentType.TEXT_PLAIN) )), Some(RuntimeAccessLevel.READ_ALL) ) // Convert to JSON val serialized = Json.prettyPrint(Json.toJson(visit)) val parseResult = Json.fromJson[Visit](Json.parse(serialized)) parseResult.isSuccess must equalTo(true) parseResult.get must equalTo(visit) } } }
Example 23
Source File: BaseEndpoints.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http import akka.actor.ActorSystem import akka.http.scaladsl.model.StatusCodes import akka.japi.Util._ import com.github.vonnagy.service.container.http.directives.CIDRDirectives import com.github.vonnagy.service.container.http.routing.RoutedEndpoints import com.github.vonnagy.service.container.service.ServicesManager.ShutdownService import org.joda.time.{DateTime, DateTimeZone} import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.util.{Failure, Success} class BaseEndpoints(implicit system: ActorSystem, executor: ExecutionContext) extends RoutedEndpoints with CIDRDirectives { lazy val config = system.settings.config.getConfig("container.http") lazy val serviceActor = system.actorSelection("/user/service") implicit val marshaller = plainMarshaller val route = { path("favicon.ico") { complete(StatusCodes.NoContent) } ~ path("ping") { complete("pong: ".concat(new DateTime(System.currentTimeMillis(), DateTimeZone.UTC).toString)) } ~ path("shutdown") { post { cidrFilter(immutableSeq(config.getStringList("cidr.allow")), immutableSeq(config.getStringList("cidr.deny"))) { ctx => ctx.complete("The system is being shutdown: ".concat(new DateTime(System.currentTimeMillis(), DateTimeZone.UTC).toString)) andThen { case _ => // Send a message to the root actor of this service serviceActor.resolveOne()(3 seconds).onComplete { case Success(ref) => ref ! ShutdownService(true) case Failure(_) => sys.exit() } } } } } } }
Example 24
Source File: CreateIndexTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.elastic6 import com.datamountaineer.kcql.Kcql import com.datamountaineer.streamreactor.connect.elastic6.indexname.CreateIndex import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class CreateIndexTest extends AnyWordSpec with Matchers { "CreateIndex" should { "create an index name without suffix when suffix not set" in { val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA") CreateIndex.getIndexName(kcql) shouldBe "index_name" } "create an index name with suffix when suffix is set" in { val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA WITHINDEXSUFFIX=_suffix_{YYYY-MM-dd}") val formattedDateTime = new DateTime(DateTimeZone.UTC).toString("YYYY-MM-dd") CreateIndex.getIndexName(kcql) shouldBe s"index_name_suffix_$formattedDateTime" } } }
Example 25
Source File: ManifestUploader.scala From teamcity-s3-plugin with Apache License 2.0 | 5 votes |
package com.gu.teamcity import java.io.ByteArrayInputStream import java.util.Date import jetbrains.buildServer.messages.{BuildMessage1, DefaultMessagesInfo, Status} import jetbrains.buildServer.serverSide.{BuildServerAdapter, SRunningBuild} import org.joda.time.{DateTime, DateTimeZone} import org.json4s.JsonAST.JObject import org.json4s.JsonDSL._ import org.json4s.native.JsonMethods._ import scala.util.{Failure, Success} class ManifestUploader(config: S3ConfigManager, s3: S3) extends BuildServerAdapter { override def beforeBuildFinish(runningBuild: SRunningBuild) { import scala.collection.convert.wrapAsScala._ if (!runningBuild.isHasInternalArtifactsOnly) { val properties = Seq( "projectName" -> S3Plugin.cleanFullName(runningBuild), "buildNumber" -> runningBuild.getBuildNumber, "startTime" -> new DateTime(runningBuild.getStartDate).withZone(DateTimeZone.UTC).toString //Joda default is ISO8601 ) ++ runningBuild.getRevisions.flatMap(revision => Seq( "revision" -> revision.getRevision, "vcsURL" -> revision.getRoot.getProperties.get("url") )) ++ Option(runningBuild.getBranch).map(b => "branch" -> b.getDisplayName ).orElse(runningBuild.getVcsRootEntries.headOption.map(r => "branch" -> r.getProperties.get("branch") )) val propertiesJSON = pretty(render(properties.foldLeft(JObject())(_ ~ _))) val jsBytes = propertiesJSON.getBytes("UTF-8") config.buildManifestBucket.map { bucket => s3.upload(bucket, runningBuild, "build.json", new ByteArrayInputStream(jsBytes), jsBytes.length) match { case Failure(e) => runningBuild.getBuildLog().message(s"Error uploading manifest: ${e.getMessage}", Status.ERROR,new Date,DefaultMessagesInfo.MSG_BUILD_FAILURE,DefaultMessagesInfo.SOURCE_ID,null) case Success(_) => runningBuild.getBuildLog().message("Manifest S3 upload complete", Status.NORMAL,new Date,DefaultMessagesInfo.MSG_TEXT,DefaultMessagesInfo.SOURCE_ID,null) } } } } private def normalMessage(text: String) = new BuildMessage1(DefaultMessagesInfo.SOURCE_ID, DefaultMessagesInfo.MSG_TEXT, Status.NORMAL, new Date, text) }
Example 26
Source File: Scheduler.scala From cave with MIT License | 5 votes |
package actors import java.util.concurrent.{Executor, TimeUnit} import akka.actor.{Actor, ActorLogging} import akka.pattern.ask import akka.util.Timeout import com.cave.metrics.data.evaluator.AlertParser import com.cave.metrics.data.{Check, Schedule} import init.{AwsWrapper, Init} import org.joda.time.format.ISODateTimeFormat import org.joda.time.{Minutes, LocalTime, DateTime, DateTimeZone} import scala.concurrent.ExecutionContext import scala.concurrent.duration._ object Scheduler { object DoWork object Die case class NotificationUrlChange(newUrl: String) } class Scheduler(schedule: Schedule, awsWrapper: AwsWrapper) extends Actor with ActorLogging with AlertParser { private[actors] def leader = Init.leader var notificationUrl: String = schedule.notificationUrl implicit val timeout = Timeout(2, TimeUnit.SECONDS) val (waitTime, period) = getSchedule(schedule.alert.period) val Formatter = ISODateTimeFormat.dateTimeNoMillis() implicit val executor = context.dispatcher.asInstanceOf[Executor with ExecutionContext] private val queueCheckSchedule = context.system.scheduler.schedule(waitTime, period, self, Scheduler.DoWork) override def receive = { case Scheduler.DoWork => leader ? Leadership.IsLeader onComplete { case scala.util.Success(imLeader: Boolean) => if (imLeader) { awsWrapper.sendMessage(Check(Schedule(schedule.orgName, schedule.teamName, schedule.clusterName, notificationUrl, schedule.alert), now())) } case scala.util.Success(e) => log.error("Unexpected result returned by the leader actor: " + e) case scala.util.Failure(t) => log.error("Failed to query the leader actor, error was " + t) } case Scheduler.NotificationUrlChange(url) => log.debug(s"Updating the notification URL, from $notificationUrl to $url.") notificationUrl = url case Scheduler.Die => context stop self } override def postStop(): Unit = queueCheckSchedule.cancel() private[actors] def getSchedule(alertPeriod: String): (FiniteDuration, FiniteDuration) = parseAll(duration, alertPeriod) match { case Success(p, _) => (0.minutes, p) case NoSuccess(_, message) => parseAll(daily, alertPeriod) match { case Success(time, _) => (getWait(nowLocal(), time), 1.day) case NoSuccess(_, message2) => sys.error(s"Unexpected alert period $alertPeriod. Not a duration ($message) and not a daily scheduler ($message2).") } } private[actors] def getWait(now: LocalTime, until: LocalTime): FiniteDuration = { val wait = Minutes.minutesBetween(now, until).getMinutes val minutes = if (wait < 0) 1440 + wait else wait minutes.minutes } }
Example 27
Source File: InfluxClientFactory.scala From cave with MIT License | 5 votes |
package com.cave.metrics.data.influxdb import java.util.concurrent.Executors import com.cave.metrics.data.Metric import com.typesafe.config.Config import org.joda.time.{DateTimeZone, DateTime} import collection.JavaConversions._ import scala.concurrent.ExecutionContext case class InfluxConfiguration(default: InfluxClusterConfig, alternates: Map[String, InfluxClusterConfig]) { val alts = alternates.map { case (name, config) => s"Name: $name, Config: $config"} println(s"Default: $default, Alters: $alts") } object InfluxConfiguration { def apply(config: Config) = { val default = InfluxClusterConfig(config.getString("url"), config.getString("user"), config.getString("pass")) val alternates = config.getConfigList("alternates") map { conf => conf.getString("name") -> InfluxClusterConfig(conf.getString("url"), default.user, default.pass) } new InfluxConfiguration(default, alternates.toMap) } } class InfluxClientFactory(config: InfluxConfiguration) { def createClient(clusterConfig: InfluxClusterConfig): (InfluxClient, ExecutionContext) = new InfluxClient(clusterConfig) -> ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor()) val defaultClient = createClient(config.default) val alternates = config.alternates map { case (name, clusterConfig) => name -> createClient(clusterConfig)} def getClient(name: Option[String]): (InfluxClient, ExecutionContext) = name match { case None => defaultClient case Some(clusterName) => alternates.getOrElse(clusterName, default = defaultClient) } def sendMetrics(metrics: Seq[Metric]): Unit = { val now = new DateTime().withZone(DateTimeZone.UTC).getMillis / 1000 val maxDelay = metrics.foldLeft(0L) { case (delay, metric) => Math.max(delay, Math.abs(metric.timestamp - now)) } val (defaultClient, defaultContext) = getClient(None) defaultClient.putMetricData(Seq( Metric("writer-delay", now, maxDelay, Map(Metric.Organization -> Metric.Internal)) ))(defaultContext) metrics.groupBy(_.tags.get(Metric.Cluster)) map { case (cluster, metricSeq) => val (client, context) = getClient(cluster) client.putMetricData(metricSeq)(context) } } def close(): Unit = { defaultClient._1.close() alternates foreach { case (_, (client, _)) => client.close() } } }
Example 28
Source File: Token.scala From cave with MIT License | 5 votes |
package com.cave.metrics.data import org.apache.commons.lang3.RandomStringUtils import org.joda.time.format.ISODateTimeFormat.{dateTime, dateTimeParser} import org.joda.time.{DateTime, DateTimeZone} import play.api.libs.functional.syntax._ import play.api.libs.json._ case class Token(id: Option[String], description: String, value: String, created: DateTime) object Token { final val KeyId = "id" final val KeyDescription = "description" final val KeyValue = "value" final val KeyCreated = "created" final val DefaultName = "default" implicit val datetimeReads: Reads[DateTime] = __.read[String].map(dateTimeParser.parseDateTime) implicit val datetimeWrites = new Writes[DateTime] { def writes(value: DateTime) = JsString(dateTime.print(value)) } implicit val tokenReads: Reads[Token] = ( (__ \ KeyId).readNullable[String] and (__ \ KeyDescription).read[String] and (__ \ KeyValue).read[String] and (__ \ KeyCreated).read[DateTime] )(Token.apply _) implicit val tokenWrites: Writes[Token] = ( (__ \ KeyId).writeNullable[String] and (__ \ KeyDescription).write[String] and (__ \ KeyValue).write[String] and (__ \ KeyCreated).write[DateTime] )(unlift(Token.unapply)) val secureRandom = new java.security.SecureRandom def createToken(description: String): Token = new Token(None, description, RandomStringUtils.random(56, 0, 0, true, true, null, secureRandom), new DateTime().withZone(DateTimeZone.UTC) ) }
Example 29
Source File: JsonFormatsTest.scala From courscala with Apache License 2.0 | 5 votes |
package org.coursera.common.jsonformat import org.coursera.common.collection.Enum import org.coursera.common.collection.EnumSymbol import org.coursera.common.stringkey.StringKey import org.coursera.common.stringkey.StringKeyFormat import org.joda.time.DateTime import org.joda.time.DateTimeZone import org.joda.time.Duration import org.joda.time.Instant import org.junit.Test import org.scalatest.junit.AssertionsForJUnit import play.api.libs.json.Format import play.api.libs.json.JsNumber import play.api.libs.json.JsString import play.api.libs.json.JsSuccess import play.api.libs.json.Json class JsonFormatsTest extends AssertionsForJUnit { import JsonFormatsTest._ @Test def stringKey(): Unit = { val id = TestId(2, "test") val idString = StringKey.stringify(id) assert(JsString(idString) === Json.toJson(id)) assert(JsSuccess(id) === Json.fromJson[TestId](JsString(idString))) assert(JsString(s"invalid stuff $idString").validate[TestId].isError) } @Test def enums(): Unit = { assertResult(Color.Amber)(JsString("Amber").as[Color]) assertResult(JsString("Green"))(Json.toJson(Color.Green)) } @Test def instant(): Unit = { import JsonFormats.Implicits.instantFormat val testInstant = new Instant(137) assertResult(JsNumber(137))(Json.toJson(testInstant)) assertResult(Some(testInstant))(Json.parse("137").asOpt[Instant]) } @Test def duration(): Unit = { import JsonFormats.Implicits.durationFormat val testDuration = Duration.millis(137L) assertResult(JsNumber(137))(Json.toJson(testDuration)) assertResult(Some(testDuration))(Json.parse("137").asOpt[Duration]) } @Test def dateTime(): Unit = { import JsonFormats.Implicits.dateTimeFormat val testDatetime = new DateTime(2010, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC) assertResult(JsNumber(1262304000000L))(Json.toJson(testDatetime)) assertResult(Some(testDatetime))(Json.parse("1262304000000").asOpt[DateTime].map(_.withZone(DateTimeZone.UTC))) } } object JsonFormatsTest { case class TestId(part1: Int, part2: String) object TestId { implicit val stringKeyFormat: StringKeyFormat[TestId] = StringKeyFormat.caseClassFormat((apply _).tupled, unapply) implicit val format: Format[TestId] = JsonFormats.stringKeyFormat[TestId] } sealed trait Color extends EnumSymbol object Color extends Enum[Color] { case object Red extends Color case object Amber extends Color case object Green extends Color implicit val format: Format[Color] = JsonFormats.enumFormat(Color) } }
Example 30
Source File: JodaSerializer.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.coders.instances.kryo import com.esotericsoftware.kryo.io.{Input, Output} import com.esotericsoftware.kryo.{Kryo, Serializer} import org.joda.time.{DateTime, DateTimeZone, LocalDate, LocalDateTime, LocalTime} import org.joda.time.chrono.ISOChronology private[coders] class JodaLocalDateTimeSerializer extends Serializer[LocalDateTime] { setImmutable(true) def write(kryo: Kryo, output: Output, ldt: LocalDateTime): Unit = { output.writeInt(ldt.getYear, false) val month = input.readByte().toInt val day = input.readByte().toInt new LocalDate(year, month, day) } } private[coders] class JodaDateTimeSerializer extends Serializer[DateTime] { setImmutable(true) def write(kryo: Kryo, output: Output, dt: DateTime): Unit = { output.writeLong(dt.getMillis) output.writeString(dt.getZone.getID) } def read(kryo: Kryo, input: Input, tpe: Class[DateTime]): DateTime = { val millis = input.readLong() val zone = DateTimeZone.forID(input.readString()) new DateTime(millis, zone) } }
Example 31
Source File: JodaSerializerTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.coders.instances.kryo import com.spotify.scio.coders.{CoderTestUtils, KryoAtomicCoder, KryoOptions} import org.joda.time.{DateTime, DateTimeZone, LocalDate, LocalDateTime, LocalTime} import org.scalacheck._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatestplus.scalacheck.Checkers import scala.jdk.CollectionConverters._ import scala.util.Try class JodaSerializerTest extends AnyFlatSpec with Checkers { // TODO: remove this once https://github.com/scalatest/scalatest/issues/1090 is addressed implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 100) implicit val dateTimeArb = Arbitrary { for { year <- Gen.choose(-292275054, 292278993) month <- Gen.choose(1, 12) maxDayOfMonth <- Try { Gen.const(new LocalDateTime(year, month, 1, 0, 0).dayOfMonth().getMaximumValue) }.getOrElse(Gen.fail) day <- Gen.choose(1, maxDayOfMonth) hour <- Gen.choose(0, 23) minute <- Gen.choose(0, 59) second <- Gen.choose(0, 59) ms <- Gen.choose(0, 999) tz <- Gen.oneOf(DateTimeZone.getAvailableIDs.asScala.toSeq) attempt <- Try { val ldt = new DateTime(year, month, day, hour, minute, second, ms, DateTimeZone.forID(tz)) Gen.const(ldt) }.getOrElse(Gen.fail) } yield attempt } implicit val localDateTimeArb = Arbitrary { Arbitrary.arbitrary[DateTime].map(_.toLocalDateTime) } implicit val localTimeArb = Arbitrary { Arbitrary.arbitrary[LocalDateTime].map(_.toLocalTime) } implicit val localDateArb = Arbitrary { Arbitrary.arbitrary[LocalDateTime].map(_.toLocalDate) } val coder = new KryoAtomicCoder[Any](KryoOptions()) def roundTripProp[T](value: T): Prop = Prop.secure { CoderTestUtils.testRoundTrip(coder, value) } "KryoAtomicCoder" should "roundtrip LocalDate" in { check(roundTripProp[LocalDate] _) } it should "roundtrip LocalTime" in { check(roundTripProp[LocalTime] _) } it should "roundtrip LocalDateTime" in { check(roundTripProp[LocalDateTime] _) } it should "roundtrip DateTime" in { check(roundTripProp[DateTime] _) } }
Example 32
Source File: DateTimeConverter.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.commons.datetime import java.sql.Timestamp import org.joda.time.format.{DateTimeFormatter, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} trait DateTimeConverter { val zone: DateTimeZone = DateTimeZone.getDefault val dateTimeFormatter: DateTimeFormatter = ISODateTimeFormat.dateTime() def toString(dateTime: DateTime): String = dateTime.toString(dateTimeFormatter) def parseDateTime(s: String): DateTime = dateTimeFormatter.parseDateTime(s).withZone(zone) def parseTimestamp(s: String): Timestamp = new Timestamp(parseDateTime(s).getMillis) def now: DateTime = new DateTime(zone) def fromMillis(millis: Long): DateTime = new DateTime(zone).withMillis(millis) def dateTime( year: Int, monthOfyear: Int, dayOfMonth: Int, hourOfDay: Int = 0, minutesOfHour: Int = 0, secondsOfMinute: Int = 0): DateTime = new DateTime(year, monthOfyear, dayOfMonth, hourOfDay, minutesOfHour, secondsOfMinute, zone) def dateTimeFromUTC( year: Int, monthOfyear: Int, dayOfMonth: Int, hourOfDay: Int = 0, minutesOfHour: Int = 0, secondsOfMinute: Int = 0): DateTime = new DateTime( year, monthOfyear, dayOfMonth, hourOfDay, minutesOfHour, secondsOfMinute, DateTimeZone.UTC).withZone(DateTimeConverter.zone) } object DateTimeConverter extends DateTimeConverter
Example 33
Source File: ContainerProvider.scala From azure-kusto-spark with Apache License 2.0 | 5 votes |
package com.microsoft.kusto.spark.utils import com.microsoft.azure.kusto.data.Client import com.microsoft.kusto.spark.utils.CslCommandsGenerator.generateCreateTmpStorageCommand import com.microsoft.kusto.spark.utils.{KustoDataSourceUtils => KDSU} import org.joda.time.{DateTime, DateTimeZone, Period} import scala.collection.JavaConverters._ class ContainerProvider[A](val dmClient: Client, val clusterAlias: String, val command: String, cacheEntryCreator: ContainerAndSas => A) { private var roundRobinIdx = 0 private var storageUris: Seq[A] = Seq.empty private var lastRefresh: DateTime = new DateTime(DateTimeZone.UTC) private val myName = this.getClass.getSimpleName def getContainer: A = { // Refresh if storageExpiryMinutes have passed since last refresh for this cluster as SAS should be valid for at least 120 minutes if (storageUris.isEmpty || new Period(new DateTime(DateTimeZone.UTC), lastRefresh).getMinutes > KustoConstants.storageExpiryMinutes) { refresh } else { roundRobinIdx = (roundRobinIdx + 1) % storageUris.size storageUris(roundRobinIdx) } } def getAllContainers: Seq[A] = { if (storageUris.isEmpty || new Period(new DateTime(DateTimeZone.UTC), lastRefresh).getMinutes > KustoConstants.storageExpiryMinutes){ refresh } storageUris } private def refresh = { val res = dmClient.execute(command) val storage = res.getPrimaryResults.getData.asScala.map(row => { val parts = row.get(0).toString.split('?') cacheEntryCreator(ContainerAndSas(parts(0), '?' + parts(1))) }) if (storage.isEmpty) { KDSU.reportExceptionAndThrow(myName, new RuntimeException("Failed to allocate temporary storage"), "writing to Kusto", clusterAlias) } lastRefresh = new DateTime(DateTimeZone.UTC) storageUris = scala.util.Random.shuffle(storage) roundRobinIdx = 0 storage(roundRobinIdx) } }
Example 34
Source File: TimeFormat.scala From flint with Apache License 2.0 | 5 votes |
package com.twosigma.flint.timeseries.time import java.util.concurrent.TimeUnit import org.joda.time.format.{ DateTimeFormat, DateTimeFormatter, ISODateTimeFormat } import org.joda.time.{ DateTime, DateTimeZone } import scala.concurrent.duration.TimeUnit import scala.util.Try object TimeFormat { protected[flint] def parseNano(text: String, timeZone: DateTimeZone = DateTimeZone.UTC): Long = parse(text, timeZone, timeUnit = TimeUnit.NANOSECONDS) private val formatters: List[DateTimeFormatter] = List( // Double `HH` formatter DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss.SSS Z"), DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss Z"), DateTimeFormat.forPattern("yyyyMMdd HH:mm Z"), DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss.SSS"), DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss"), DateTimeFormat.forPattern("yyyyMMdd HH:mm"), DateTimeFormat.forPattern("yyyyMMdd"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS Z"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss Z"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm Z"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm"), DateTimeFormat.forPattern("yyyy-MM-dd"), // Single `H` formatter DateTimeFormat.forPattern("yyyyMMdd H:mm:ss.SSS"), DateTimeFormat.forPattern("yyyyMMdd H:mm:ss.SSS Z"), DateTimeFormat.forPattern("yyyy-MM-dd H:mm:ss.SSS"), DateTimeFormat.forPattern("yyyy-MM-dd H:mm:ss.SSS Z"), // ISO DateTime ISODateTimeFormat.dateTimeParser() ) }
Example 35
Source File: PointRDDExtensionsSpec.scala From reactiveinflux-spark with Apache License 2.0 | 5 votes |
package com.pygmalios.reactiveinflux.extensions import com.holdenkarau.spark.testing.SharedSparkContext import com.pygmalios.reactiveinflux.Point.Measurement import com.pygmalios.reactiveinflux._ import com.pygmalios.reactiveinflux.extensions.PointRDDExtensionsSpec._ import com.pygmalios.reactiveinflux.spark._ import com.pygmalios.reactiveinflux.spark.extensions.PointRDDExtensions import org.joda.time.{DateTime, DateTimeZone} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{BeforeAndAfter, FlatSpec} import scala.concurrent.duration._ @RunWith(classOf[JUnitRunner]) class PointRDDExtensionsSpec extends FlatSpec with SharedSparkContext with BeforeAndAfter { before { withInflux(_.create()) } after { withInflux(_.drop()) } behavior of "saveToInflux" it should "write single point to Influx" in { val points = List(point1) val rdd = sc.parallelize(points) // Execute rdd.saveToInflux() // Assert assert(PointRDDExtensions.totalBatchCount == 1) assert(PointRDDExtensions.totalPointCount == 1) val result = withInflux( _.query(Query(s"SELECT * FROM $measurement1")) .result .singleSeries) assert(result.rows.size == 1) val row = result.rows.head assert(row.time == point1.time) assert(row.values.size == 5) } it should "write 1000 points to Influx" in { val points = (1 to 1000).map { i => Point( time = point1.time.plusMinutes(i), measurement = point1.measurement, tags = point1.tags, fields = point1.fields ) } val rdd = sc.parallelize(points) // Execute rdd.saveToInflux() // Assert assert(PointRDDExtensions.totalBatchCount == 8) assert(PointRDDExtensions.totalPointCount == 1000) val result = withInflux( _.query(Query(s"SELECT * FROM $measurement1")) .result .singleSeries) assert(result.rows.size == 1000) } } object PointRDDExtensionsSpec { implicit val params: ReactiveInfluxDbName = ReactiveInfluxDbName("test") implicit val awaitAtMost: Duration = 1.second val measurement1: Measurement = "measurement1" val point1 = Point( time = new DateTime(1983, 1, 10, 7, 43, 10, 3, DateTimeZone.UTC), measurement = measurement1, tags = Map("tagKey1" -> "tagValue1", "tagKey2" -> "tagValue2"), fields = Map("fieldKey1" -> StringFieldValue("fieldValue1"), "fieldKey2" -> BigDecimalFieldValue(10.7))) }
Example 36
Source File: EventStatsServlet.scala From spark-streaming-demo with Apache License 2.0 | 5 votes |
package com.datastax.examples.meetup import org.joda.time.{DateTimeZone, DateTime, Duration} import org.scalatra.scalate.ScalateSupport import org.scalatra.{CorsSupport, ScalatraServlet} import scala.concurrent.Await import scala.concurrent.duration._ import org.json4s.{DefaultFormats, Formats} import org.scalatra.json._ class EventStatsServlet() extends ScalatraServlet with CorsSupport with JacksonJsonSupport with ScalateSupport { protected implicit val jsonFormats: Formats = DefaultFormats before() { contentType = formats("json") } options("/*"){ response.setHeader("Access-Control-Allow-Headers", request.getHeader("Access-Control-Request-Headers")); } get("/trending") { val time = new DateTime(DateTimeZone.UTC) // Scan 5 second intervals within the past 1 minute. // Stop as soon as first successful found. val result = (for (i <- Stream range (0,12); v = getTrendingTopics(i, time); if v.nonEmpty) yield v).headOption // Order topics by count in desc order and take top 20 result.map(r => r.toIndexedSeq.sortBy(_._2).reverse.take(20)) } get("/countries") { val attendeesByCountry = Event.dimensions("attending", "ALL") Await.result(attendeesByCountry, 5 seconds) .map{ case (a,b) => Map("code" -> a.toUpperCase, "value" -> b)} } get("/") { contentType="text/html" layoutTemplate("dashboard.ssp") } def roundDateTime(t: DateTime, d: Duration) = { t minus (t.getMillis - (t.getMillis.toDouble / d.getMillis).round * d.getMillis) } def getTrendingTopics(i:Int, time:DateTime) = { val t = roundDateTime(time minusSeconds 5*i, Duration.standardSeconds(5)) val trendingTopics = Event.dimensions("trending", "S" + t.toString("yyyyMMddHHmmss")) Await.result(trendingTopics, 5 seconds) } }
Example 37
Source File: DateUtilsSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.common.util import org.joda.time.format.{DateTimeFormat, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike class DateUtilsSpec extends Matchers with AnyFunSpecLike { private val iso8601format = ISODateTimeFormat.dateTimeNoMillis() private val iso8601withMillis = ISODateTimeFormat.dateTime() import DateUtils._ val now = DateTime.now() val f = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss") val dt = f.parseDateTime("2017-01-10 23:13:26") describe("When using DateUtils") { it("converts to ISO format") { iso8601(now, false) shouldBe iso8601format.print(now) iso8601(now, true) shouldBe iso8601withMillis.print(now) } it("converts to UTC") { val dt = new DateTime(1234567000, DateTimeZone.UTC) dtFromUtcSeconds(1234567) shouldBe dt dtFromIso8601("1970-01-15T06:56:07Z") shouldBe dt } it("implicitly converts to the wrapper") { val dtw: DateTimeWrapper = dt dtw shouldBe DateTimeWrapper(dt) } it("sorts and compares dates") { val dtw: DateTimeWrapper = dt dtw.compare(now) should be < 0 dtw.compare(now, dt) should be > 0 Seq(now, dt).sorted shouldBe Seq(dt, now) } } }
Example 38
Source File: ClickhouseJsonSupport.scala From clickhouse-scala-client with GNU Lesser General Public License v3.0 | 5 votes |
package com.crobox.clickhouse.dsl.marshalling import com.crobox.clickhouse.time.IntervalStart import org.joda.time.format.{DateTimeFormatter, DateTimeFormatterBuilder, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} import spray.json.{JsNumber, JsString, JsValue, JsonFormat, deserializationError, _} import scala.util.Try trait ClickhouseJsonSupport { override def read(json: JsValue): IntervalStart = json match { case JsString(value) => value match { case month(relativeMonth, timezoneId) => new DateTime(UnixStartTimeWithoutTimeZone) .withZoneRetainFields(DateTimeZone.forID(timezoneId)) .plusMonths(relativeMonth.toInt - RelativeMonthsSinceUnixStart) .withZone(DateTimeZone.UTC) case date(dateOnly, timezoneId) => //should handle quarter and year grouping as it returns a date formatter .parseDateTime(dateOnly) .withZoneRetainFields(DateTimeZone.forID(timezoneId)) .withZone(DateTimeZone.UTC) case msTimestamp(millis) => new DateTime(millis.toLong, DateTimeZone.UTC) case timestamp(secs) => new DateTime(secs.toLong * 1000, DateTimeZone.UTC) case _ => // sometimes clickhouse mistakenly returns a long / int value as JsString. Therefor, first try to // parse it as a long... val dateTime = Try { new DateTime(value.toLong, DateTimeZone.UTC) }.toOption // continue with parsing using the formatter dateTime.getOrElse { try { formatter.parseDateTime(value) } catch { case _: IllegalArgumentException => error(s"Couldn't parse $value into valid date time") case _: UnsupportedOperationException => error("Unsupported operation, programmatic misconfiguration?") } } } case JsNumber(millis) => new DateTime(millis.longValue, DateTimeZone.UTC) case _ => throw DeserializationException(s"Unknown date format read from clickhouse for $json") } def error(v: Any): DateTime = { val example = readFormatter.print(0) deserializationError( f"'$v' is not a valid date value. Dates must be in compact ISO-8601 format, e.g. '$example'" ) } } } object ClickhouseJsonSupport extends DefaultJsonProtocol with ClickhouseJsonSupport
Example 39
Source File: ClickhouseIntervalStartFormatTest.scala From clickhouse-scala-client with GNU Lesser General Public License v3.0 | 5 votes |
package com.crobox.clickhouse.dsl.marshalling import com.crobox.clickhouse.dsl.marshalling.ClickhouseJsonSupport.ClickhouseIntervalStartFormat import org.joda.time.{DateTime, DateTimeZone} import spray.json.{JsNumber, JsString} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ClickhouseIntervalStartFormatTest extends AnyFlatSpec with Matchers { val zone = DateTimeZone.forID("Europe/Bucharest") it should "read using month relative" in { ClickhouseIntervalStartFormat.read( JsString(s"${ClickhouseIntervalStartFormat.RelativeMonthsSinceUnixStart + 3}_$zone") ) should be(new DateTime("1970-04-01T00:00:00.000+02:00", DateTimeZone.UTC)) } it should "read using 0 as JsString" in { ClickhouseIntervalStartFormat.read(JsString("0")) should be( new DateTime("1970-01-01T00:00:00.000+00:00", DateTimeZone.UTC) ) } it should "read using 0 as JsNumber" in { ClickhouseIntervalStartFormat.read(JsNumber(0)) should be( new DateTime("1970-01-01T00:00:00.000+00:00", DateTimeZone.UTC) ) } it should "read date only" in { ClickhouseIntervalStartFormat.read(JsString(s"1970-12-17_$zone")) should be( new DateTime("1970-12-17T00:00:00.000+02:00", DateTimeZone.UTC) ) } it should "read timestamp" in { val date = DateTime.now(DateTimeZone.UTC) ClickhouseIntervalStartFormat.read(JsString(s"${date.getMillis}")) should be(date) ClickhouseIntervalStartFormat.read(JsNumber(date.getMillis)) should be(date) } }
Example 40
Source File: BaseJobServiceTest.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2018, Yahoo Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.job.service import java.util.UUID import com.google.common.io.Closer import com.yahoo.maha.core.DailyGrain import com.yahoo.maha.jdbc.JdbcConnection import com.zaxxer.hikari.{HikariConfig, HikariDataSource} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.{FunSuite, Matchers} trait BaseJobServiceTest extends FunSuite with Matchers { protected var dataSource: Option[HikariDataSource] = None protected var jdbcConnection: Option[JdbcConnection] = None protected val closer : Closer = Closer.create() final val REGISTRY = "er" protected[this] val fromDate : String = DailyGrain.toFormattedString(DateTime.now(DateTimeZone.UTC).minusDays(7)) protected[this] val toDate : String = DailyGrain.toFormattedString(DateTime.now(DateTimeZone.UTC)) val h2dbId : String = UUID.randomUUID().toString.replace("-","") initJdbcToH2() val mahaJobWorkerTable = s""" | create table maha_worker_job( | jobId NUMBER(10) PRIMARY KEY, | jobType VARCHAR(100), | jobStatus VARCHAR(100), | jobResponse VARCHAR(100), | numAcquired NUMBER(2), | createdTimestamp TIMESTAMP, | acquiredTimestamp TIMESTAMP, | endedTimestamp TIMESTAMP, | jobParentId NUMBER(10), | jobRequest VARCHAR(100), | hostname VARCHAR(100), | cubeName VARCHAR(100), | isDeleted NUMBER(1) | ); """.stripMargin val now = new DateTime() def initJdbcToH2(): Unit = { val config = new HikariConfig() config.setJdbcUrl(s"jdbc:h2:mem:$h2dbId;MODE=Oracle;DB_CLOSE_DELAY=-1") config.setUsername("sa") config.setPassword("h2.test.database.password") config.setMaximumPoolSize(1) dataSource = Option(new HikariDataSource(config)) jdbcConnection = dataSource.map(JdbcConnection(_)) assert(jdbcConnection.isDefined) } val result = jdbcConnection.get.execute(mahaJobWorkerTable) assert(result.isSuccess, s"Failed to create job table $result") }
Example 41
Source File: ThrottlingConfig.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.worker.throttling import java.util.concurrent.Executors import com.yahoo.maha.core.Engine import com.yahoo.maha.job.service.{JobMetadata, JobStatus, JobType} import com.yahoo.maha.worker.request.MahaWorkerRequest import grizzled.slf4j.Logging import org.joda.time.{DateTime, DateTimeZone} import scala.concurrent.{Await, ExecutionContext} import scala.concurrent.duration._ import scala.util.{Failure, Success} case class EngineBasedThrottler(throttlingConfig: EngineThrottlingConfig, jobMetadata : JobMetadata, jobMetaExecConfig: JobMetaExecConfig) extends Throttler with Logging { implicit val executor = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(jobMetaExecConfig.poolSize)) override def throttle(mahaWorkerRequest: MahaWorkerRequest): Boolean = { val engine: Engine = mahaWorkerRequest.engine val jobType:JobType = { val jobTypeOption = JobType.getJobType(engine) require(jobTypeOption.isDefined, s"Unable to get the job type for engine $engine") jobTypeOption.get } var timesChecked = 0 var countOfRunningJobs = getRunningJobs(engine, jobType) while (countOfRunningJobs > throttlingConfig.countThreshold && timesChecked < throttlingConfig.maxChecks) { warn(s"Throttling: Number of running jobs ($countOfRunningJobs) exceeds threshold (${throttlingConfig.countThreshold}). Checked $timesChecked times.") Thread.sleep(throttlingConfig.checkDelayMs) countOfRunningJobs = getRunningJobs(engine, jobType) timesChecked += 1 } if (timesChecked == throttlingConfig.maxChecks && countOfRunningJobs > throttlingConfig.countThreshold) { warn(s"Timeout: Count of running jobs exceeds threshold even after ${throttlingConfig.checkDelayMs * throttlingConfig.maxChecks} ms. Continuing to process to avoid increasing PULSAR/KAFKA backlog.") //monManager.incrementMetric(Metrics.ThrottleCheckTimeouts) } info(s"Number of running jobs ($countOfRunningJobs) below threshold (${throttlingConfig.countThreshold}), proceeding to process message.") if(timesChecked > 0) { true } else false } def getRunningJobs(engine: Engine, jobType:JobType): Int = { try { val jobCreatedTs = DateTime.now(DateTimeZone.UTC).minusMinutes(throttlingConfig.lookbackMins) val countOfRunningJobsFuture = jobMetadata.countJobsByTypeAndStatus(jobType, JobStatus.RUNNING, jobCreatedTs) Await.result(countOfRunningJobsFuture, jobMetaExecConfig.maxWaitMills millis) val runningJobCount: Int = if(countOfRunningJobsFuture.value.isEmpty) { warn(s"Failed to get the runningJobCount in ${jobMetaExecConfig.maxWaitMills}") 0 } else { countOfRunningJobsFuture.value.get match { case Success(count) => count case Failure(t) => { error(s"Failed to get the result from jobMeta ${t.getMessage}", t) 0 } } } runningJobCount } catch { case e:Exception => e.printStackTrace() 0 } } }
Example 42
Source File: DerivedFunctionTest.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.core import java.util.TimeZone import com.yahoo.maha.core.DruidDerivedFunction._ import org.joda.time.DateTimeZone import org.json4s.JObject import org.scalatest.{FunSuiteLike, Matchers} class DerivedFunctionTest extends FunSuiteLike with Matchers { test("Create a DECODE_DIM failure cases") { val minLengthCatch = intercept[IllegalArgumentException] { new DECODE_DIM("fieldName", "tooFewArgs") } assert(minLengthCatch.getMessage.contains("Usage: DECODE( expression , search , result [, search , result]... [, default] )")) } test("Create value DECODE_DIM") { val newDecode = new DECODE_DIM("fieldName", "arg1", "decodeVal1", "arg2", "decodeVal2", "default") val newDecodeWithoutDefault = new DECODE_DIM("fieldName", "arg1", "decodeVal1", "arg2", "decodeVal2") assert(newDecode.apply("arg1") == Some("decodeVal1")) assert(newDecode.apply("arg3") == Some("default")) assert(newDecodeWithoutDefault.apply("arg3") == None) assert(newDecode.apply.isDefinedAt("arg20")) } test("Attempt LOOKUP_WITH_DECODE fail") { val minLengthCatch = intercept[IllegalArgumentException] { new LOOKUP_WITH_DECODE("fieldNameSpace", "valueCol", dimensionOverrideMap = Map.empty, "tooFewArgs") } assert(minLengthCatch.getMessage.contains("Usage: DECODE( expression , search , result [, search , result]... [, default] )")) } test("Failure to get interval date with blank format") { val thrown = intercept[IllegalArgumentException]{ GET_INTERVAL_DATE.checkFormat("") } assert(thrown.getMessage.contains("Format for get_interval_date must be d|w|m|day|yr not")) } test("All Derived Functions should generate proper JSON Strings.") { val gid = GET_INTERVAL_DATE("fieldName", "yyyyMMdd") val dow = DAY_OF_WEEK("fieldName") val dtf = DATETIME_FORMATTER("fieldName", 0, 10) val dd = DECODE_DIM("fieldName", "arg1", "decodeVal1", "arg2", "decodeVal2", "default") val js = JAVASCRIPT("fieldName", "function(x) { return x > 0; }") val rgx = REGEX("fieldName", "blah", 0, true, "t") val lu = LOOKUP("namespace", "val", Map("a" -> "b")) val lwd = LOOKUP_WITH_DECODE("namespace", "valCol", Map("b" -> "a"), "arg1", "decodeVal1", "arg2", "decodeVal2", "default") val lwe = LOOKUP_WITH_EMPTY_VALUE_OVERRIDE("namespace", "valCol", "ovr", Map("c" -> "d")) val lwo = LOOKUP_WITH_DECODE_ON_OTHER_COLUMN("namespace", "valCol", "valToCheck", "valIfMatched", "valIfNot", Map("2" -> "4", "b" -> "a")) val ltf = LOOKUP_WITH_TIMEFORMATTER("namespace", "valCol", "yyyyMMdd", "yyyy", Map("do" -> "dont"), Some("override")) val ldr = LOOKUP_WITH_DECODE_RETAIN_MISSING_VALUE("namespace", "valCol", true, true, Map("rtn" -> "not"), "arg1", "decodeVal1", "arg2", "decodeVal2", "default") val dtz = DRUID_TIME_FORMAT("format", DateTimeZone.forID("Asia/Jakarta")) val dpg = DRUID_TIME_FORMAT_WITH_PERIOD_GRANULARITY("format", "P1D", DateTimeZone.forID("Asia/Jakarta")) val rc = TIME_FORMAT_WITH_REQUEST_CONTEXT("yyyy") val lwt = LOOKUP_WITH_TIMESTAMP("namespace", "val", "fmt", Map.empty, Some("ovrVal"), asMillis = false) val resultArray = List(gid, dow, dtf, dd, js, rgx, lu, lwd, lwe, lwo, ltf, ldr, dtz, dpg, rc, lwt) val expectedJSONs = List( """{"function_type":"GET_INTERVAL_DATE","fieldName":"fieldName","format":"yyyyMMdd"}""", """{"function_type":"DAY_OF_WEEK","fieldName":"fieldName"}""", """{"function_type":"DATETIME_FORMATTER","fieldName":"fieldName","index":0,"length":10}""", """{"function_type":"DECODE_DIM","fieldName":"fieldName","args":"arg1,decodeVal1,arg2,decodeVal2,default"}""", """{"function_type":"JAVASCRIPT","fieldName":"fieldName","function":"function(x) { return x > 0; }"}""", """{"function_type":"REGEX","fieldName":"fieldName","expr":"blah","index":0,"replaceMissingValue":true,"replaceMissingValueWith":"t"}""", """{"function_type":"LOOKUP","lookupNamespace":"namespace","valueColumn":"val","dimensionOverrideMap":{"a":"b"}}""", """{"function_type":"LOOKUP_WITH_DECODE","lookupNamespace":"namespace","valueColumn":"valCol","dimensionOverrideMap":{"b":"a"},"args":"arg1,decodeVal1,arg2,decodeVal2,default"}""", """{"function_type":"LOOKUP_WITH_EMPTY_VALUE_OVERRIDE","lookupNamespace":"namespace","valueColumn":"valCol","overrideValue":"ovr","dimensionOverrideMap":{"c":"d"}}""", """{"function_type":"LOOKUP_WITH_DECODE_ON_OTHER_COLUMN","lookupNamespace":"namespace","columnToCheck":"valCol","valueToCheck":"valToCheck","columnIfValueMatched":"valIfMatched","columnIfValueNotMatched":"valIfNot","dimensionOverrideMap":{"2":"4","b":"a"}}""", """{"function_type":"LOOKUP_WITH_TIMEFORMATTER","lookupNamespace":"namespace","valueColumn":"valCol","inputFormat":"yyyyMMdd","resultFormat":"yyyy","dimensionOverrideMap":{"do":"dont"}}""", """{"function_type":"LOOKUP_WITH_DECODE_RETAIN_MISSING_VALUE","lookupNamespace":"namespace","valueColumn":"valCol","retainMissingValue":true,"injective":true,"dimensionOverrideMap":{"rtn":"not"},"args":"arg1,decodeVal1,arg2,decodeVal2,default"}""", """{"function_type":"DRUID_TIME_FORMAT","format":"format","zone":"Asia/Jakarta"}""", """{"function_type":"DRUID_TIME_FORMAT_WITH_PERIOD_GRANULARITY","format":"format","period":"P1D","zone":"Asia/Jakarta"}""", """{"function_type":"TIME_FORMAT_WITH_REQUEST_CONTEXT","format":"yyyy"}""", """{"function_type":"LOOKUP_WITH_TIMESTAMP","lookupNamespace":"namespace","valueColumn":"val","resultFormat":"fmt","dimensionOverrideMap":{},"overrideValue":"ovrVal","asMillis":false}""" ) import org.json4s._ import org.json4s.jackson.JsonMethods._ implicit val formats = DefaultFormats val allJSONs: List[JObject] = resultArray.map(expn => expn.asJSON) val allJsonStrings: List[String] = allJSONs.map(json => compact(json)) assert(allJsonStrings.forall(str => expectedJSONs.contains(str))) } }
Example 43
Source File: UTCFormatSpec.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.ops.v4 import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.WordSpec import play.api.libs.json.{Format, Json} case class NotUTC(when: DateTime) object NotUTC { implicit val format: Format[NotUTC] = Json.format[NotUTC] } case class UseUTC(when: DateTime) object UseUTC extends UTCFormats { implicit val format: Format[UseUTC] = Json.format[UseUTC] } class UTCFormatSpec extends WordSpec { private[this] val pacificTimeZone = DateTimeZone.forID("US/Pacific") "Json.format by default" should { "deserialize with the current time zone" in { val dt = new DateTime(pacificTimeZone) assertResult(DateTimeZone.getDefault) { val notUTC = Json.toJson(NotUTC(dt)).as[NotUTC] notUTC.when.getZone } } } "UTCFormats" should { "override the standard Format[DateTime]" in { val dt = new DateTime(pacificTimeZone) assertResult(DateTimeZone.UTC) { val useUTC = Json.toJson(UseUTC(dt)).as[UseUTC] useUTC.when.getZone } } } }
Example 44
Source File: AuditService.scala From vat-api with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.vatapi.services import javax.inject.Inject import org.joda.time.{DateTime, DateTimeZone} import play.api.Logger import play.api.libs.json.{Json, Writes} import play.api.mvc.Request import uk.gov.hmrc.http.HeaderCarrier import uk.gov.hmrc.play.audit.AuditExtensions import uk.gov.hmrc.play.audit.http.connector.AuditConnector import uk.gov.hmrc.play.audit.model.ExtendedDataEvent import uk.gov.hmrc.vatapi.models.audit.AuditEvent import uk.gov.hmrc.vatapi.resources.BusinessResult import scala.concurrent.ExecutionContext class AuditService @Inject()(auditConnector: AuditConnector) { val logger: Logger = Logger(this.getClass) def audit[T](event: AuditEvent[T])( implicit hc: HeaderCarrier, fmt: Writes[T], request: Request[_], ec: ExecutionContext ): BusinessResult[Unit] = { logger.debug(s"[AuditService][audit] Generating ${event.auditType} audit event for vat-api.") val auditEvent = ExtendedDataEvent( auditSource = "vat-api", auditType = event.auditType, tags = AuditExtensions.auditHeaderCarrier(hc).toAuditTags(event.transactionName, request.path), detail = Json.toJson(event.detail), generatedAt = DateTime.now(DateTimeZone.UTC) ) BusinessResult.success(auditConnector.sendExtendedEvent(auditEvent)) } }
Example 45
Source File: UnitSpec.scala From vat-api with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.vatapi import org.joda.time.{DateTime, DateTimeZone, LocalDate} import org.scalatest.OptionValues import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.{AnyWordSpec, AsyncWordSpec} import play.api.test.{DefaultAwaitTimeout, FutureAwaits} import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import scala.language.postfixOps trait BaseUnitSpec extends Matchers with OptionValues with TestUtils with FutureAwaits with DefaultAwaitTimeout { implicit val timeout: FiniteDuration = 5 seconds def await[T](f: Future[T])(implicit duration: FiniteDuration = timeout): T = Await.result(f, duration) } trait UnitSpec extends AnyWordSpec with BaseUnitSpec{ implicit def extractAwait[A](future: Future[A]): A = await[A](future) def await[A](future: Future[A])(implicit timeout: Duration): A = Await.result(future, timeout) } trait AsyncUnitSpec extends AsyncWordSpec with BaseUnitSpec trait TestUtils { private val vrnGenerator = VrnGenerator() def now: DateTime = DateTime.now(DateTimeZone.UTC) def generateVrn = vrnGenerator.nextVrn() implicit def toLocalDate(d: DateTime): LocalDate = d.toLocalDate } object TestUtils extends TestUtils
Example 46
Source File: DateHelper.scala From dependency with MIT License | 5 votes |
package io.flow.dependency.www.lib import org.joda.time.{DateTime, DateTimeZone} import org.joda.time.format.DateTimeFormat object DateHelper { private[this] val EasternTime = DateTimeZone.forID("America/New_York") private[this] val DefaultLabel = "N/A" def shortDate( dateTime: DateTime ): String = shortDateOption(Some(dateTime)) def shortDateOption( dateTime: Option[DateTime], default: String = DefaultLabel ): String = { dateTime match { case None => default case Some(dt) => { DateTimeFormat.shortDate.withZone(EasternTime).print(dt) } } } def longDateTime( dateTime: DateTime ): String = longDateTimeOption(Some(dateTime)) def longDateTimeOption( dateTime: Option[DateTime], default: String = DefaultLabel ): String = { dateTime match { case None => default case Some(dt) => { DateTimeFormat.longDateTime.withZone(EasternTime).print(dt) } } } }
Example 47
Source File: DiffWrappers.scala From filo with Apache License 2.0 | 5 votes |
package org.velvia.filo.codecs import org.joda.time.{DateTime, DateTimeZone} import scala.language.postfixOps import scalaxy.loops._ import org.velvia.filo._ import org.velvia.filo.vector._ abstract class DiffPrimitiveWrapper[A: TypedReaderProvider, P](dpv: DiffPrimitiveVector) extends NaMaskAvailable[P](dpv.naMask) { val info = dpv.info val _len = dpv.len val dataReader = TypedBufferReader[A](FastBufferReader(dpv.dataAsByteBuffer()), info.nbits, info.signed) val baseReader = FastBufferReader(dpv.base) final def length: Int = _len final def foreach[B](fn: P => B): Unit = { if (isEmptyMask) { // every value available! for { i <- 0 until length optimized } { fn(apply(i)) } } else { for { i <- 0 until length optimized } { if (isAvailable(i)) fn(apply(i)) } } } } class DiffDateTimeWithTZWrapper(ddtv: DiffDateTimeVector) extends DiffDateTimeWrapperBase(ddtv) { import TypedBufferReader._ val tzBase: Byte = ddtv.vars.baseTz val tzReader = TypedBufferReader[Int](FastBufferReader(ddtv.tzAsByteBuffer), ddtv.tzInfo.nbits, ddtv.tzInfo.signed) final def apply(i: Int): DateTime = { val zone = DateTimeZone.forOffsetMillis((tzBase + tzReader.read(i)) * VectorBuilder.FifteenMinMillis) new DateTime(millisBase + millisReader.read(i), zone) } }
Example 48
Source File: JsonJacksonMarshallerTest.scala From wix-http-testkit with MIT License | 5 votes |
package com.wix.e2e.http.json import java.time.LocalDateTime import java.util.Optional import com.fasterxml.jackson.databind.ObjectMapper import com.wix.e2e.http.api.Marshaller import com.wix.e2e.http.json.MarshallingTestObjects.SomeCaseClass import com.wix.test.random._ import org.joda.time.DateTimeZone.UTC import org.joda.time.{DateTime, DateTimeZone} import org.specs2.mutable.Spec import org.specs2.specification.Scope class JsonJacksonMarshallerTest extends Spec { trait ctx extends Scope { val someStr = randomStr val javaDateTime = LocalDateTime.now() val someCaseClass = SomeCaseClass(randomStr, randomInt) val dateTime = new DateTime val dateTimeUTC = new DateTime(UTC) val marshaller: Marshaller = new JsonJacksonMarshaller } "JsonJacksonMarshaller" should { "marshall scala option properly" in new ctx { marshaller.unmarshall[Option[String]]( marshaller.marshall( Some(someStr) ) ) must beSome(someStr) } "marshall scala case classes properly" in new ctx { marshaller.unmarshall[SomeCaseClass]( marshaller.marshall( someCaseClass ) ) must_=== someCaseClass } "marshall datetime without zone" in new ctx { marshaller.unmarshall[DateTime]( marshaller.marshall( dateTime.withZone(DateTimeZone.getDefault) ) ) must_=== dateTime.withZone(UTC) } "marshall date time to textual format in UTC" in new ctx { marshaller.marshall( dateTime ) must contain(dateTime.withZone(UTC).toString) } "marshall java.time objects" in new ctx { marshaller.unmarshall[LocalDateTime]( marshaller.marshall( javaDateTime ) ) must_=== javaDateTime } "marshall java 8 Optional" in new ctx { marshaller.unmarshall[Optional[DateTime]]( marshaller.marshall( dateTimeUTC ) ) must_=== Optional.of(dateTimeUTC) marshaller.unmarshall[Optional[SomeCaseClass]]( marshaller.marshall( someCaseClass ) ) must_=== Optional.of(someCaseClass) } "expose jackson object mapper to allow external configuration" in new ctx { marshaller.asInstanceOf[JsonJacksonMarshaller].configure must beAnInstanceOf[ObjectMapper] } } } object MarshallingTestObjects { case class SomeCaseClass(s: String, i: Int) }
Example 49
Source File: AbstractJsonSerializer.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.common.formats import com.fasterxml.jackson.core.JsonFactory import com.typesafe.config.ConfigFactory import org.joda.time.DateTimeZone import org.joda.time.format.ISODateTimeFormat object SettingsHelper { val config = ConfigFactory.load() val dataCenter = config.getString("dataCenter.id") } class AbstractJsonSerializer { val jsonFactory = new JsonFactory() val dateFormatter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC) } trait NsSplitter { def splitNamespaceField(field: String) = field.lastIndexOf(".") match { case -1 => "nn" -> field case i => field.substring(i + 1) -> field.substring(0, i).replace('.', '_') } def reverseNsTypedField(field: String) = { if (field == "_all") "allFields" else if (field.startsWith("system.") || field.startsWith("content.") || field.startsWith("link.")) field else { val (ns, typedField) = splitNamespaceField(field) "fields." + ns + "." + typedField.replace('.', '_') } } }
Example 50
Source File: KustoAzureFsSetupCache.scala From azure-kusto-spark with Apache License 2.0 | 5 votes |
package com.microsoft.kusto.spark.utils import org.joda.time.{DateTime, DateTimeZone, Period} import scala.collection.mutable.Map private[kusto] object KustoAzureFsSetupCache { private var storageAccountKeyMap: Map[String, String] = Map.empty[String,String] private var storageSasMap: Map[String, String] = Map.empty[String,String] private var nativeAzureFsSet = false private var lastRefresh: DateTime = new DateTime(DateTimeZone.UTC) // Return 'true' iff the entry exists in the cache. If it doesn't, or differs - update the cache // now is typically 'new DateTime(DateTimeZone.UTC)' def updateAndGetPrevStorageAccountAccess(account: String, secret: String, now: DateTime): Boolean = { var secretCached = storageAccountKeyMap.getOrElse(account, "") if (!secretCached.isEmpty && (secretCached != secret)) { // Entry exists but with a different secret - remove it and update storageAccountKeyMap.remove(account) secretCached = "" } if (secretCached.isEmpty || checkIfRefreshNeeded(now)) { storageAccountKeyMap.put(account, secret) lastRefresh = now false } else true } def updateAndGetPrevSas(container: String, account: String, secret: String, now: DateTime): Boolean = { val key = container + "." + account var secretCached = storageSasMap.getOrElse(key, "") if (!secretCached.isEmpty && (secretCached != secret)) { // Entry exists but with a different secret - remove it and update storageSasMap.remove(key) secretCached = "" } if (secretCached.isEmpty || checkIfRefreshNeeded(now)) { storageSasMap.put(key, secret) lastRefresh = now false } else true } def updateAndGetPrevNativeAzureFs(now: DateTime): Boolean = { if (nativeAzureFsSet || checkIfRefreshNeeded(now)) true else { nativeAzureFsSet = true false } } private[kusto] def checkIfRefreshNeeded(utcNow: DateTime) = { new Period(utcNow, lastRefresh).getMinutes > KustoConstants.sparkSettingsRefreshMinutes } }
Example 51
Source File: ScheduleSpec.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion import java.time.format.DateTimeFormatter import java.time.{DayOfWeek, ZoneId, ZonedDateTime} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.WordSpec class ScheduleSpec extends WordSpec{ "`with` function of DayOfWeek" should { "shift the date to the expected" in { val dt = ZonedDateTime.parse("2019-11-20T00:00:00Z") assert( dt.`with`(DayOfWeek.of(1)) === ZonedDateTime.parse("2019-11-18T00:00:00Z") && dt.`with`(DayOfWeek.of(2)) === ZonedDateTime.parse("2019-11-19T00:00:00Z") && dt.`with`(DayOfWeek.of(3)) === ZonedDateTime.parse("2019-11-20T00:00:00Z") && dt.`with`(DayOfWeek.of(4)) === ZonedDateTime.parse("2019-11-21T00:00:00Z") && dt.`with`(DayOfWeek.of(5)) === ZonedDateTime.parse("2019-11-22T00:00:00Z") && dt.`with`(DayOfWeek.of(6)) === ZonedDateTime.parse("2019-11-23T00:00:00Z") && dt.`with`(DayOfWeek.of(7)) === ZonedDateTime.parse("2019-11-24T00:00:00Z") ) } } "`with` function of DayOfWeek" should { "shift the date same as withDayOfWeek in Joda time" in { val dateTimeFormatStr = "yyyy-MM-dd'T'HH:mm:ss" val datetimeFormat = DateTimeFormatter.ofPattern(dateTimeFormatStr) val javaDt = ZonedDateTime.parse("2019-11-20T00:00:00Z").withZoneSameInstant(ZoneId.of("UTC")) val jodaDt = new DateTime("2019-11-20T00:00:00Z").withZone(DateTimeZone.UTC) assert( javaDt.`with`(DayOfWeek.of(1)).format(datetimeFormat) === jodaDt.withDayOfWeek(1).toString(dateTimeFormatStr) && javaDt.`with`(DayOfWeek.of(2)).format(datetimeFormat) === jodaDt.withDayOfWeek(2).toString(dateTimeFormatStr) && javaDt.`with`(DayOfWeek.of(3)).format(datetimeFormat) === jodaDt.withDayOfWeek(3).toString(dateTimeFormatStr) && javaDt.`with`(DayOfWeek.of(4)).format(datetimeFormat) === jodaDt.withDayOfWeek(4).toString(dateTimeFormatStr) && javaDt.`with`(DayOfWeek.of(5)).format(datetimeFormat) === jodaDt.withDayOfWeek(5).toString(dateTimeFormatStr) && javaDt.`with`(DayOfWeek.of(6)).format(datetimeFormat) === jodaDt.withDayOfWeek(6).toString(dateTimeFormatStr) && javaDt.`with`(DayOfWeek.of(7)).format(datetimeFormat) === jodaDt.withDayOfWeek(7).toString(dateTimeFormatStr) ) } } "withZoneSameLocal" should { "be consistent with toDateTime in joda time" in { val dateTimeFormatStr = "yyyy-MM-dd'T'HH:mm:ss" val datetimeFormat = DateTimeFormatter.ofPattern( dateTimeFormatStr) val javaDt = ZonedDateTime.parse("2019-11-18T00:00:00Z").withZoneSameLocal(ZoneId.of("UTC")) val jodaDt = new DateTime("2019-11-18T00:00:00Z").toDateTime(DateTimeZone.UTC) assert(javaDt.format(datetimeFormat) === jodaDt.toString(dateTimeFormatStr)) } } }
Example 52
Source File: Rfc3339UtilTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.play.controllers import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.{FunSpec, MustMatchers} class Rfc3339UtilTest extends FunSpec with MustMatchers { val dtz = DateTimeZone.UTC val date = new DateTime(1451911387284L, dtz) describe("Rfc3339UtilTest") { it("should parse RFC3339 DateTime") { Rfc3339Util.parseDateTime("2007-05-01T15:43:26-00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.000Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3-00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.300Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452-01:00").withZone(dtz).toString mustBe "2007-05-01T16:43:26.345Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+01:00").withZone(dtz).toString mustBe "2007-05-01T14:43:26.345Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.345Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3-00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.300Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26+00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.000Z" } it("should parse RFC3339 Date") { Rfc3339Util.parseDate("2007-05-01").toString mustBe "2007-05-01" Rfc3339Util.parseDate("2008-05-01").toString mustBe "2008-05-01" Rfc3339Util.parseDate("2007-08-01").toString mustBe "2007-08-01" Rfc3339Util.parseDate("2007-05-08").toString mustBe "2007-05-08" } it("should write DateTime") { Rfc3339Util.writeDateTime(date.withZone(dtz)) mustBe "2016-01-04T12:43:07.284000+0000" } it("should write Date") { Rfc3339Util.writeDate(date.toLocalDate) mustBe "2016-01-04" } } }
Example 53
Source File: Dates.scala From play-ui with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.play.views.formatting import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, DateTimeZone, LocalDate} import play.api.i18n.Lang object Dates { private val MonthNamesInWelsh = Map( 1 -> "Ionawr", 2 -> "Chwefror", 3 -> "Mawrth", 4 -> "Ebrill", 5 -> "Mai", 6 -> "Mehefin", 7 -> "Gorffennaf", 8 -> "Awst", 9 -> "Medi", 10 -> "Hydref", 11 -> "Tachwedd", 12 -> "Rhagfyr") private val WeekDaysInWelsh = Map( 1 -> "Dydd Llun", 2 -> "Dydd Mawrth", 3 -> "Dydd Mercher", 4 -> "Dydd Iau", 5 -> "Dydd Gwener", 6 -> "Dydd Sadwrn", 7 -> "Dydd Sul") private[formatting] val dateFormat = DateTimeFormat.forPattern("d MMMM y").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val dateFormatAbbrMonth = DateTimeFormat.forPattern("d MMM y").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val shortDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val easyReadingDateFormat = DateTimeFormat.forPattern("EEEE d MMMM yyyy").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val easyReadingTimestampFormat = DateTimeFormat.forPattern("h:mmaa").withZone(DateTimeZone.forID("Europe/London")) def formatDate(date: LocalDate) = dateFormat.print(date) def formatDateAbbrMonth(date: LocalDate) = dateFormatAbbrMonth.print(date) def formatDate(date: Option[LocalDate], default: String) = date match { case Some(d) => dateFormat.print(d) case None => default } def formatDateTime(date: DateTime) = dateFormat.print(date) def formatEasyReadingTimestamp(date: Option[DateTime], default: String)(implicit lang: Lang) = { val englishEasyDate: DateTime => String = d => s"${easyReadingTimestampFormat.print(d).toLowerCase}, ${easyReadingDateFormat.print(d)}" val welshEasyDate: DateTime => String = d => s"${easyReadingTimestampFormat.print(d).toLowerCase}, ${WeekDaysInWelsh(d.getDayOfWeek)} ${d.getDayOfMonth} ${MonthNamesInWelsh( d.getMonthOfYear)} ${d.getYear}" val formatter = lang.code match { case "cy" => welshEasyDate case _ => englishEasyDate } date.fold(default)(formatter) } def shortDate(date: LocalDate) = shortDateFormat.print(date) def formatDays(days: Int) = s"$days day${if (days > 1) "s" else ""}" }
Example 54
Source File: LoginStatusSpec.scala From play-ui with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.play.views.layouts import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.{Matchers, WordSpec} import play.api.i18n._ import play.api.inject.guice.GuiceApplicationBuilder import play.api.test.Helpers.contentAsString import uk.gov.hmrc.play.views.html.layouts.loginStatus import play.api.test.Helpers._ import play.api.i18n.Messages.Implicits._ class LoginStatusSpec extends WordSpec with Matchers { implicit val application = new GuiceApplicationBuilder() .configure(Map("play.i18n.langs" -> List("en", "cy"))) .build() "The loginStatus" should { val userName = "Ivor" val previouslyLoggedInAt = new DateTime(2018, 4, 20, 16, 20, 0, 0, DateTimeZone.forID("Europe/London")) "show the first login message in English" in { implicit val lang = Lang("en") val content = contentAsString(loginStatus(userName, None, "logoutUrl")) content should include("Ivor, this is the first time you have logged in") } "show the first login message in Welsh" in { implicit val lang = Lang("cy") val content = contentAsString(loginStatus(userName, None, "logoutUrl")) content should include("Ivor, dyma’r tro cyntaf i chi fewngofnodi") } "show the previous login message in English" in { implicit val lang = Lang("en") val content = contentAsString(loginStatus(userName, Some(previouslyLoggedInAt), "logoutUrl")) content should include("Ivor, you last signed in 4:20pm, Friday 20 April 2018") } "show the previous login message in Welsh (with the day and month in Welsh)" in { implicit val lang = Lang("cy") val content = contentAsString(loginStatus(userName, Some(previouslyLoggedInAt), "logoutUrl")) content should include("Ivor, y tro diwethaf i chi fewngofnodi oedd 4:20pm, Dydd Gwener 20 Ebrill 2018") } } }
Example 55
Source File: RestFormats.scala From http-verbs with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.http.controllers import org.joda.time.format.ISODateTimeFormat import play.api.libs.json._ import play.api.libs.json.JsString import org.joda.time.{DateTime, DateTimeZone, LocalDate, LocalDateTime} import scala.util.Try object RestFormats extends RestFormats trait RestFormats { private val dateTimeFormat = ISODateTimeFormat.dateTime.withZoneUTC private val localDateRegex = """^(\d\d\d\d)-(\d\d)-(\d\d)$""".r implicit val localDateTimeRead: Reads[LocalDateTime] = new Reads[LocalDateTime] { override def reads(json: JsValue): JsResult[LocalDateTime] = json match { case JsString(s) => Try { JsSuccess(new LocalDateTime(dateTimeFormat.parseDateTime(s), DateTimeZone.UTC)) }.getOrElse { JsError(s"Could not parse $s as a DateTime with format ${dateTimeFormat.toString}") } case _ => JsError(s"Expected value to be a string, was actually $json") } } implicit val localDateTimeWrite: Writes[LocalDateTime] = new Writes[LocalDateTime] { def writes(dateTime: LocalDateTime): JsValue = JsString(dateTimeFormat.print(dateTime.toDateTime(DateTimeZone.UTC))) } implicit val dateTimeRead: Reads[DateTime] = new Reads[DateTime] { override def reads(json: JsValue): JsResult[DateTime] = json match { case JsString(s) => Try { JsSuccess(dateTimeFormat.parseDateTime(s)) }.getOrElse { JsError(s"Could not parse $s as a DateTime with format ${dateTimeFormat.toString}") } case _ => JsError(s"Expected value to be a string, was actually $json") } } implicit val dateTimeWrite: Writes[DateTime] = new Writes[DateTime] { def writes(dateTime: DateTime): JsValue = JsString(dateTimeFormat.print(dateTime)) } implicit val localDateRead: Reads[LocalDate] = new Reads[LocalDate] { override def reads(json: JsValue): JsResult[LocalDate] = json match { case JsString(s @ localDateRegex(y, m, d)) => Try { JsSuccess(new LocalDate(y.toInt, m.toInt, d.toInt)) }.getOrElse { JsError(s"$s is not a valid date") } case JsString(s) => JsError(s"Cannot parse $s as a LocalDate") case _ => JsError(s"Expected value to be a string, was actually $json") } } implicit val localDateWrite: Writes[LocalDate] = new Writes[LocalDate] { def writes(date: LocalDate): JsValue = JsString("%04d-%02d-%02d".format(date.getYear, date.getMonthOfYear, date.getDayOfMonth)) } implicit val dateTimeFormats = Format(dateTimeRead, dateTimeWrite) implicit val localDateTimeFormats = Format(localDateTimeRead, localDateTimeWrite) implicit val localDateFormats = Format(localDateRead, localDateWrite) }
Example 56
Source File: RestFormatsSpec.scala From http-verbs with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.http.controllers import org.joda.time.{DateTime, DateTimeZone, LocalDate, LocalDateTime} import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.matchers.should.Matchers import play.api.libs.json.{JsSuccess, _} class RestFormatsSpec extends AnyWordSpecLike with Matchers { "localDateTimeRead" should { "return a LocalDateTime for correctly formatted JsString" in { val testDate = new LocalDateTime(0) val jsValue = RestFormats.localDateTimeWrite.writes(testDate) val JsSuccess(result, _) = RestFormats.localDateTimeRead.reads(jsValue) result shouldBe testDate } "return a JsError for a json value that is not a JsString" in { RestFormats.localDateTimeRead.reads(Json.obj()) shouldBe a[JsError] } "return a JsError for a JsString that is not a well-formatted date" in { RestFormats.localDateTimeRead.reads(JsString("not a valid date")) shouldBe a[JsError] } } "dateTimeRead" should { "return a DateTime in zone UTC for correctly formatted JsString" in { val testDate = new DateTime(0) val jsValue = RestFormats.dateTimeWrite.writes(testDate) val JsSuccess(result, _) = RestFormats.dateTimeRead.reads(jsValue) result shouldBe testDate.withZone(DateTimeZone.UTC) } "return a JsError for a json value that is not a JsString" in { RestFormats.dateTimeRead.reads(Json.obj()) shouldBe a[JsError] } "return a JsError for a JsString that is not a well-formatted date" in { RestFormats.dateTimeRead.reads(JsString("not a valid date")) shouldBe a[JsError] } } "localDateRead" should { "return a LocalDate in zone UTC for correctly formatted JsString" in { val json = JsString("1994-05-01") val expectedDate = new LocalDate(1994, 5, 1) val JsSuccess(result, _) = RestFormats.localDateRead.reads(json) result shouldBe expectedDate } "return a JsError for a json value that is not a JsString" in { RestFormats.localDateRead.reads(Json.obj()) shouldBe a[JsError] } "return a JsError for a JsString that is not a well-formatted date" in { RestFormats.localDateRead.reads(JsString("not a valid date")) shouldBe a[JsError] } "return a JsError for a JsString that is well formatted but has bad values" in { RestFormats.localDateRead.reads(JsString("1994-13-32")) shouldBe a[JsError] } } }
Example 57
Source File: Example3.scala From sfseize with Apache License 2.0 | 5 votes |
package org.eichelberger.sfc.examples.quickstart object Example3 extends App { import org.eichelberger.sfc._ import org.eichelberger.sfc.Dimensions._ import org.eichelberger.sfc.SpaceFillingCurve._ import org.joda.time.{DateTimeZone, DateTime} // create the dimensions that can manage user-space val x = DefaultDimensions.createLongitude(18) // ~153 meters per cell (@ equator) val y = DefaultDimensions.createLatitude(17) // ~153 meters per cell val t = DefaultDimensions.createNearDateTime( new DateTime(1970, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC")), new DateTime(2010, 12, 31, 23, 59, 59, DateTimeZone.forID("UTC")), 20 ) // compose the curve with dimensions val curve = new ComposedCurve( RowMajorCurve(20, 35), Seq( t, new ComposedCurve( CompactHilbertCurve(18, 17), Seq(x, y) ) ) ) // hashing points in user space val point = Seq( new DateTime(1998, 4, 7, 21, 15, 11, DateTimeZone.forID("UTC")), // t -78.49, // x 38.04 // y ) val hash = curve.pointToHash(point) println(s"$point -> $hash") // fetching user-space cells from hash value val cell = curve.hashToCell(hash) println(s"$cell <- $hash") // identify the top-level index-ranges that cover a query val query = Cell(Seq( DefaultDimensions.createNearDateTime( new DateTime(1998, 6, 15, 0, 0, 0, DateTimeZone.forID("UTC")), new DateTime(1998, 7, 15, 23, 59, 59, DateTimeZone.forID("UTC")), 0 ), DefaultDimensions.createDimension("x", -80.0, -79.0, 0), DefaultDimensions.createDimension("y", 38.0, 39.0, 0) )) val ranges = curve.getRangesCoveringCell(query).toList println(s"Number of ranges: ${ranges.size}") val totalCells = ranges.map(_.size).sum println(s"Total cells in ranges: $totalCells") }
Example 58
Source File: package.scala From pureconfig with Mozilla Public License 2.0 | 5 votes |
package pureconfig.module import org.joda.time.{ DateTimeZone, Duration, Instant, Interval } import org.joda.time.format.{ DateTimeFormat, DateTimeFormatter } import pureconfig.{ ConfigConvert, ConfigReader } import pureconfig.ConfigConvert.{ catchReadError, viaNonEmptyString } package object joda { implicit def instantConfigConvert: ConfigConvert[Instant] = ConfigConvert[Long].xmap(new Instant(_), _.getMillis) implicit def intervalConfigConvert: ConfigConvert[Interval] = viaNonEmptyString[Interval]( catchReadError(Interval.parseWithOffset), _.toString) implicit def durationConfigConvert: ConfigConvert[Duration] = viaNonEmptyString[Duration]( catchReadError(Duration.parse), _.toString) implicit def dateTimeFormatterConfigConvert: ConfigReader[DateTimeFormatter] = ConfigReader.fromNonEmptyString[DateTimeFormatter]( catchReadError(DateTimeFormat.forPattern)) implicit def dateTimeZoneConfigConvert: ConfigConvert[DateTimeZone] = viaNonEmptyString[DateTimeZone]( catchReadError(DateTimeZone.forID), _.getID) }
Example 59
Source File: MLFlowModelSpec.scala From ForestFlow with Apache License 2.0 | 5 votes |
package ai.forestflow.serving.MLFlow import ai.forestflow.serving.impl.{LocalFileArtifactReader, MLFlowH2OLoader, UnsupportedServableFlavor} import ai.forestflow.serving.interfaces.{ArtifactReader, Loader} import cats.syntax.either._ import ai.forestflow.serving.impl.{LocalFileArtifactReader, MLFlowH2OLoader, UnsupportedServableFlavor} import ai.forestflow.serving.interfaces.ArtifactReader import ai.forestflow.utils.SourceStorageProtocols import io.circe.generic.extras._ import io.circe.generic.extras.semiauto.deriveDecoder import io.circe.{CursorOp, Decoder, DecodingFailure} import ai.forestflow.serving.interfaces.Loader import ai.forestflow.utils.ThrowableImplicits._ import org.joda.time.{DateTimeZone, LocalDateTime} case class MLFlowModelSpec( artifactReader: ArtifactReader, runId: Option[String], timeCreated: Long, flavors: Map[String, Loader] ) { def getServableFlavor: Option[(String, Loader)] = flavors.collectFirst { case (flavor, loader) if !loader.isInstanceOf[UnsupportedServableFlavor] => (flavor, loader) } } object MLFlowModelSpec { implicit val config: Configuration = { val baseConfig = Configuration.default.withSnakeCaseMemberNames baseConfig.copy(transformMemberNames = baseConfig.transformMemberNames andThen { // from snake_case in class to snake_case file case "artifact_reader" => "artifact_path" case "time_created" => "utc_time_created" // utc_time_created is a string! case other => other }) } implicit val decodeTimeCreated: Decoder[Long] = Decoder.decodeString.emap{ tm: String => Either.catchNonFatal[Long]({ var ts = tm.replace(" ", "T") if (ts.takeRight(1) != "Z") ts = ts + "Z" val ll = LocalDateTime.parse(tm.replace(" ", "T")).toDateTime(DateTimeZone.UTC) ll.getMillis } ).leftMap(t => s"timeCreated Decoder Failed: ${t.printableStackTrace}") }.handleErrorWith(_ => Decoder.decodeLong) implicit val decodeMLFlowModel: Decoder[MLFlowModelSpec] = deriveDecoder[MLFlowModelSpec] implicit val decodeArtifactReaderString: Decoder[ArtifactReader] = Decoder.decodeOption[String].emap { artifactPath: Option[String] => Either.catchNonFatal[ArtifactReader]({ artifactPath match { case Some(path) => ArtifactReader.getArtifactReader(path) case _ => LocalFileArtifactReader("") } } ).leftMap(t => s"Artifact Reader Decoder Failed: ${t.printableStackTrace}") } implicit val decodeServableFlavor: Decoder[Map[String, Loader]] = Decoder.decodeMap[String, Map[String, String]].emap { flavors => Either.catchNonFatal[Map[String, Loader]]( flavors .map { case (flavor, props) => (flavor.toLowerCase, props) } .map { case (f@"h2o_mojo", props) => f -> MLFlowH2OLoader(dataPath = props.getOrElse("data", ""), version = props.get("h2o_version")) case (f, props) => f -> UnsupportedServableFlavor(props) case (f, props) => throw DecodingFailure(s"Unexpected or unsupported flavor type [$f] with props $props", List[CursorOp]()) // TODO: Support POJO? // case (f, _) => p -> BasicSourceProvider() } ).leftMap(t => t.printableStackTrace) } }
Example 60
Source File: GoogleCalendarMappers.scala From HAT2.0 with GNU Affero General Public License v3.0 | 5 votes |
package org.hatdex.hat.she.mappers import java.util.UUID import org.hatdex.hat.api.models.{ EndpointQuery, EndpointQueryFilter, FilterOperator, PropertyQuery } import org.hatdex.hat.api.models.applications.{ DataFeedItem, DataFeedItemContent, DataFeedItemLocation, DataFeedItemTitle, LocationAddress } import org.joda.time.{ DateTime, DateTimeZone } import play.api.libs.json.{ JsValue, Json } import scala.util.Try class GoogleCalendarMapper extends DataEndpointMapper { override protected val dataDeduplicationField: Option[String] = Some("id") def dataQueries(fromDate: Option[DateTime], untilDate: Option[DateTime]): Seq[PropertyQuery] = { val eventDateTimePropertyQuery = PropertyQuery( List( EndpointQuery("calendar/google/events", None, Some(Seq( dateFilter(fromDate, untilDate).map(f ⇒ EndpointQueryFilter("start.dateTime", None, f))).flatten), None)), Some("start.dateTime"), Some("descending"), None) val dateOnlyFilter = if (fromDate.isDefined) { Some(FilterOperator.Between(Json.toJson(fromDate.map(_.toString("yyyy-MM-dd"))), Json.toJson(untilDate.map(_.toString("yyyy-MM-dd"))))) } else { None } val eventDatePropertyQuery = PropertyQuery( List( EndpointQuery("calendar/google/events", None, Some(Seq( dateOnlyFilter.map(f ⇒ EndpointQueryFilter("start.date", None, f))).flatten), None)), Some("start.date"), Some("descending"), None) Seq(eventDateTimePropertyQuery, eventDatePropertyQuery) } def cleanHtmlTags(input: String): String = { input.replaceAll("<br/?>", "\n") .replaceAll(" ", " ") .replaceAll("<a [^>]*>([^<]*)</a>", "$1") } def mapDataRecord(recordId: UUID, content: JsValue, tailRecordId: Option[UUID] = None, tailContent: Option[JsValue] = None): Try[DataFeedItem] = { for { startDate ← Try((content \ "start" \ "dateTime").asOpt[DateTime] .getOrElse((content \ "start" \ "date").as[DateTime]) .withZone((content \ "start" \ "timeZone").asOpt[String].flatMap(z ⇒ Try(DateTimeZone.forID(z)).toOption).getOrElse(DateTimeZone.getDefault))) endDate ← Try((content \ "end" \ "dateTime").asOpt[DateTime] .getOrElse((content \ "end" \ "date").as[DateTime]) .withZone((content \ "end" \ "timeZone").asOpt[String].flatMap(z ⇒ Try(DateTimeZone.forID(z)).toOption).getOrElse(DateTimeZone.getDefault))) timeIntervalString ← Try(eventTimeIntervalString(startDate, Some(endDate))) itemContent ← Try(DataFeedItemContent( (content \ "description").asOpt[String].map(cleanHtmlTags), None, None, None)) location ← Try(DataFeedItemLocation( geo = None, address = (content \ "location").asOpt[String].map(l ⇒ LocationAddress(None, None, Some(l), None, None)), // TODO integrate with geocoding API for full location information? tags = None)) } yield { val title = DataFeedItemTitle(s"${(content \ "summary").as[String]}", Some(s"${timeIntervalString._1} ${timeIntervalString._2.getOrElse("")}"), Some("event")) val loc = Some(location).filter(l ⇒ l.address.isDefined || l.geo.isDefined || l.tags.isDefined) DataFeedItem("google", startDate, Seq("event"), Some(title), Some(itemContent), loc) } } }
Example 61
Source File: DashcoinSpout.scala From Raphtory with Apache License 2.0 | 5 votes |
package com.raphtory.examples.blockchain.spouts import com.raphtory.core.components.Spout.SpoutTrait import com.raphtory.examples.blockchain.LitecoinTransaction import org.joda.time.DateTime import org.joda.time.DateTimeZone import scalaj.http.Http import scalaj.http.HttpRequest import spray.json._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.language.postfixOps class DashcoinSpout extends SpoutTrait { var blockcount = 1 val rpcuser = System.getenv().getOrDefault("DASHCOIN_USERNAME", "").trim val rpcpassword = System.getenv().getOrDefault("DASHCOIN_PASSWORD", "").trim val serverAddress = System.getenv().getOrDefault("DASHCOIN_NODE", "").trim val id = "scala-jsonrpc" val baseRequest = Http(serverAddress).auth(rpcuser, rpcpassword).header("content-type", "text/plain") //************* MESSAGE HANDLING BLOCK override def ProcessSpoutTask(message: Any): Unit = message match { case StartSpout => AllocateSpoutTask(Duration(1, MILLISECONDS), "parseBlock") case "parseBlock" => running() case _ => println("message not recognized!") } def running(): Unit = try { for (i <- 1 to 10) { getTransactions() blockcount += 1 if (blockcount % 1000 == 0) println(s"Parsed block $blockcount at ${DateTime.now(DateTimeZone.UTC).getMillis}") } AllocateSpoutTask(Duration(1, NANOSECONDS), "parseBlock") } catch { case e: java.net.SocketTimeoutException => case e: spray.json.DeserializationException => AllocateSpoutTask(Duration(10, SECONDS), "parseBlock") } def getTransactions(): Unit = { val re = request("getblockhash", blockcount.toString).execute().body.toString.parseJson.asJsObject val blockID = re.fields("result") val blockData = request("getblock", s"$blockID,2").execute().body.toString.parseJson.asJsObject val result = blockData.fields("result") val time = result.asJsObject.fields("time") for (transaction <- result.asJsObject().fields("tx").asInstanceOf[JsArray].elements) sendTuple(LitecoinTransaction(time, blockcount, blockID, transaction)) //val time = transaction.asJsObject.fields("time") } def request(command: String, params: String = ""): HttpRequest = baseRequest.postData(s"""{"jsonrpc": "1.0", "id":"$id", "method": "$command", "params": [$params] }""") } //def request(command: String, params: String = ""): HttpRequest = baseRequest.postData(s"""{"jsonrpc": "1.0", "id":"$id", "method": "$command", "params": [$params] }""")
Example 62
Source File: SparqlTriggeredProcessorMonitor.scala From CM-Well with Apache License 2.0 | 5 votes |
package actions import com.typesafe.scalalogging.LazyLogging import k.grid.{Grid, WhoAreYou, WhoIAm} import akka.pattern._ import akka.util.Timeout import cmwell.ctrl.checkers.StpChecker.{RequestStats, ResponseStats, Row, Table} import cmwell.domain.{FileContent, FileInfoton, SystemFields, VirtualInfoton} import cmwell.ws.Settings import org.joda.time.{DateTime, DateTimeZone} import scala.concurrent.Future import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global object SparqlTriggeredProcessorMonitor extends LazyLogging { implicit val timeout = Timeout(30.seconds) def getAddress = (stpManager ? WhoAreYou) .mapTo[WhoIAm] .map(_.address) .recover { case err: Throwable => "NA" } def stpManager = Grid.serviceRef("sparql-triggered-processor-manager") def jobsDataToTuple(lines: Iterable[Row]) = for (line <- lines) yield MarkdownTuple(line.toSeq: _*) def generateTables(path: String, dc: String, isAdmin: Boolean): Future[Option[VirtualInfoton]] = { val jobsDataFuture = (stpManager ? RequestStats(isAdmin)) .mapTo[ResponseStats] .map { case ResponseStats(tables) => tables } val future = for { address <- getAddress tables <- jobsDataFuture } yield { val title = s""" |# Sparql Triggered Processor Monitor<br> |## Current host: $address <br> """.stripMargin val tablesFormattedData = tables.map { table => val mdTable = MarkdownTable( header = MarkdownTuple(table.header.toSeq: _*), body = jobsDataToTuple(table.body).toSeq ) s""" |${table.title.mkString("### ", "<br>\n### ", "<br>")} |${mdTable.get} <hr>""".stripMargin } title + "\n\n" + tablesFormattedData.mkString("\n\n") } future.map { content => Some( VirtualInfoton( FileInfoton(SystemFields(path, new DateTime(DateTimeZone.UTC), "VirtualInfoton", dc, None, "", "http"), None, content = Some(FileContent(content.getBytes("utf-8"), "text/x-markdown"))) ) ) } } }
Example 63
Source File: ActiveInfotonHandler.scala From CM-Well with Apache License 2.0 | 5 votes |
package actions import cmwell.domain._ import cmwell.ws.Settings import org.joda.time.{DateTime, DateTimeZone} import wsutil.StringExtensions object ActiveInfotonHandler { // private[this] val activeInfotons = Set[String]("/proc","/proc/node") def wrapInfotonReply(infoton: Option[Infoton]): Option[Infoton] = infoton match { case Some(i) if requiresWrapping(i.systemFields.path) => Some(wrap(i)) case i => i } private[this] def requiresWrapping(path: String): Boolean = path match { case "/" => true case p if p.startsWith("/proc/") => true case p if p.dropTrailingChars('/') == "/meta/ns" => true case _ => false } import scala.language.implicitConversions import VirtualInfoton._ // todo why do we have to invoke v2i explicitly if it's an implicit def ?! private[this] def wrap(infoton: Infoton): Infoton = infoton match { case cp @ CompoundInfoton(SystemFields("/", _, _, _, _, _, _), _, children, _, length, total) => cp.copy(children = v2i(VirtualInfoton(ObjectInfoton(SystemFields("/proc", new DateTime(DateTimeZone.UTC), "VirtualInfoton", Settings.dataCenter, None, "", "http")))) +: children, total = total + 1, length = length + 1) case cp @ CompoundInfoton(SystemFields("/meta/ns", _, _, _, _, _, _), _, children, _, length, total) => cp.copy( children = v2i(VirtualInfoton(ObjectInfoton(SystemFields("/meta/ns/sys", new DateTime(DateTimeZone.UTC), "VirtualInfoton", Settings.dataCenter, None, "", "http")))) +: v2i( VirtualInfoton(ObjectInfoton(SystemFields("/meta/ns/nn", new DateTime(DateTimeZone.UTC), "VirtualInfoton", Settings.dataCenter, None, "", "http")))) +: children, total = total + 2, length = length + 2 ) case i => i } // def isActiveInfotonPath(path: String): Boolean = activeInfotons.contains(path) }