org.joda.time.format.DateTimeFormat Scala Examples
The following examples show how to use org.joda.time.format.DateTimeFormat.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DateTimeTools.scala From pertax-frontend with Apache License 2.0 | 9 votes |
package util import com.google.inject.{Inject, Singleton} import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} import org.joda.time.{DateTime, _} import play.api.Logger import uk.gov.hmrc.time.CurrentTaxYear import scala.util.{Failure, Success, Try} import java.time.{LocalDateTime => JavaLDT} object DateTimeTools extends CurrentTaxYear { //Timezone causing problem on dev server val defaultTZ = DateTimeZone.forID("Europe/London") val unixDateFormat = "yyyy-MM-dd" val unixDateTimeFormat = "yyyy-MM-dd'T'HH:mm:ss" val humanDateFormat = "dd MMMMM yyyy" //Returns for example 1516 in March 2016 def previousAndCurrentTaxYear = previousAndCurrentTaxYearFromGivenYear(current.currentYear) def previousAndCurrentTaxYearFromGivenYear(year: Int) = { def y = year (y - 1).toString.takeRight(2) + (y).toString.takeRight(2) } private def formatter(pattern: String): DateTimeFormatter = DateTimeFormat.forPattern(pattern).withZone(defaultTZ) def short(dateTime: LocalDate) = formatter("dd/MM/yyy").print(dateTime) def asHumanDateFromUnixDate(unixDate: String): String = Try(DateTimeFormat.forPattern(humanDateFormat).print(DateTime.parse(unixDate))) match { case Success(v) => v case Failure(e) => { Logger.warn("Invalid date parse in DateTimeTools.asHumanDateFromUnixDate: " + e) unixDate } } def toPaymentDate(dateTime: JavaLDT): LocalDate = new LocalDate(dateTime.getYear, dateTime.getMonthValue, dateTime.getDayOfMonth) override def now: () => DateTime = DateTime.now } @Singleton class DateTimeTools @Inject()() { def showSendTaxReturnByPost = { val start = new DateTime(s"${DateTime.now().getYear}-11-01T00:00:00Z") val end = new DateTime(s"${DateTime.now().getYear + 1}-01-31T23:59:59Z") !DateTime.now().isAfter(start) && DateTime.now().isBefore(end) } }
Example 2
Source File: WarcHeaders.scala From ArchiveSpark with MIT License | 6 votes |
package org.archive.archivespark.sparkling.warc import java.nio.charset.Charset import java.util.UUID import org.archive.archivespark.sparkling.Sparkling import org.archive.archivespark.sparkling.util.DigestUtil import org.joda.time.Instant import org.joda.time.format.{DateTimeFormat, DateTimeFormatter, ISODateTimeFormat} object WarcHeaders { val UTF8: Charset = Charset.forName(Sparkling.DefaultCharset) val ArcDateTimeFormat: DateTimeFormatter = DateTimeFormat.forPattern("yyyyMMddHHmmss").withZoneUTC val WarcDateTimeFormat: DateTimeFormatter = ISODateTimeFormat.dateTimeNoMillis val Br = "\r\n" def arcFile(info: WarcFileMeta, filename: String): Array[Byte] = { val header = StringBuilder.newBuilder header.append("filedesc://") header.append(filename) header.append(" 0.0.0.0 ") header.append(ArcDateTimeFormat.print(info.created)) header.append(" text/plain ") val headerBody = StringBuilder.newBuilder // Internet Archive: Name of gathering organization with no white space (http://archive.org/web/researcher/ArcFileFormat.php) headerBody.append("1 0 " + info.publisher.replace(" ", "")).append(Br) headerBody.append("URL IP-address Archive-date Content-type Archive-length").append(Br) val headerBodyStr: String = headerBody.toString val headerBodyBlob: Array[Byte] = headerBodyStr.getBytes(UTF8) header.append(headerBodyBlob.length).append(Br) header.append(headerBodyStr).append(Br) header.toString().getBytes(UTF8) } def warcFile(meta: WarcFileMeta, filename: String): Array[Byte] = { val header = StringBuilder.newBuilder header.append("WARC/1.0").append(Br) header.append("WARC-Type: warcinfo").append(Br) header.append("WARC-Date: " + WarcDateTimeFormat.print(Instant.now)).append(Br) header.append("WARC-Filename: " + filename).append(Br) header.append("WARC-Record-ID: " + newRecordID()).append(Br) header.append("Content-Type: application/warc-fields").append(Br) val headerBody = StringBuilder.newBuilder headerBody.append("software: " + meta.software).append(Br) headerBody.append("format: WARC File Format 1.0").append(Br) headerBody.append("conformsTo: http://bibnum.bnf.fr/WARC/WARC_ISO_28500_version1_latestdraft.pdf").append(Br) headerBody.append("publisher: " + meta.publisher).append(Br) headerBody.append("created: " + WarcDateTimeFormat.print(meta.created)).append(Br) headerBody.append(Br * 3) val headerBodyStr = headerBody.toString() val headerBodyBlob = headerBodyStr.getBytes(UTF8) header.append("Content-Length: " + headerBodyBlob.length).append(Br) header.append(Br) header.append(headerBodyStr) header.toString().getBytes(UTF8) } def warcResponseRecord(meta: WarcRecordMeta, content: Array[Byte], payload: Array[Byte]): Array[Byte] = { val header = StringBuilder.newBuilder header.append("WARC/1.0").append(Br) header.append("WARC-Type: response").append(Br) header.append("WARC-Target-URI: " + meta.url).append(Br) header.append("WARC-Date: " + WarcDateTimeFormat.print(meta.timestamp)).append(Br) header.append("WARC-Payload-Digest: sha1:" + DigestUtil.sha1Base32(payload)).append(Br) if (meta.ip.isDefined) header.append("WARC-IP-Address: " + meta.ip.get).append(Br) header.append("WARC-Record-ID: " + meta.recordId.getOrElse(newRecordID())).append(Br) header.append("Content-Type: application/http; msgtype=response").append(Br) header.append("Content-Length: " + content.length).append(Br) header.append(Br) header.toString().getBytes(UTF8) } def http(statusLine: String, headers: Map[String, String]): Array[Byte] = { val header = StringBuilder.newBuilder header.append(statusLine).append(Br) for ((key, value) <- headers) { header.append(s"$key: $value").append(Br) } header.append(Br) header.toString().getBytes(UTF8) } private def newRecordID(): String = "<urn:uuid:" + UUID.randomUUID() + ">" }
Example 3
Source File: Config.scala From opencv-darts with GNU General Public License v3.0 | 5 votes |
package darts import org.joda.time.format.DateTimeFormat import scala.xml.{NodeSeq, XML} import java.io.File import org.bytedeco.javacpp.opencv_core.{Mat, Point, Scalar} class Config(val id: String) { def int(path: String):Int = Config.int(id, path) def bool(path: String):Boolean = Config.bool(id, path) def str(path: String):String = Config.str(id, path) } object Config { val Cyan = new Scalar(255, 255, 0, 0) val Blue = new Scalar(255, 100, 0, 0) val Purple = new Scalar(255, 0, 255, 0) val Yellow = new Scalar(0, 255, 255, 0) val Red = new Scalar(0, 0, 255, 0) val Green = new Scalar(0, 255, 0, 0) val Black = new Scalar(0, 0, 0, 0) val BlackMat = new Mat(Black) val WhiteMat = new Mat(new Scalar(255, 255, 255, 0)) val conversion = 1f val nums = List(6, 13, 4, 18, 1, 20, 5, 12, 9, 14, 11, 8, 16, 7, 19, 3, 17, 2, 15, 10).map(_/conversion) val distancesFromBull = Array(14, 28, 174, 192, 284, 300).map(_/conversion) val bull = new Point((400/conversion).toInt, (400/conversion).toInt) val timeFormatter = DateTimeFormat.forPattern("Y-MMM-d_H-mm_ss-SS"); val file = new File("config.xml") if (!file.isFile) { println("config.xml not found. To start with, copy config-sample.xml from the project.") System.exit(2) } var lastModified = file.lastModified() var xml = XML.loadFile("config.xml") var x: NodeSeq = xml \\ "DartsConfig" def camRoot(id: String): NodeSeq = { if (file.lastModified() != lastModified) { try { xml = XML.loadFile("config.xml") }catch { case e: Exception => { Thread.sleep(20) xml = XML.loadFile("config.xml") } } x = xml \\ "DartsConfig" lastModified = file.lastModified println("config reloaded") } (x \\ "camera").filter(n => (n \ "@camId").text == id) } def int(path: String):Int = (path.split("/").foldLeft(x)((root, key) => root \\ key)).text.toInt def bool(path: String):Boolean = (path.split("/").foldLeft(x)((root, key) => root \\ key)).text.toInt == 1 def str(path: String):String = (path.split("/").foldLeft(x)((root, key) => root \\ key)).text def float(path: String):Float = (path.split("/").foldLeft(x)((root, key) => root \\ key)).text.toFloat def int(id: String, path: String):Int = { val cr = camRoot(id) val x = (path.split("/").foldLeft(cr)((root, key) => root \\ key)) x.text.toInt } def bool(id: String, path: String):Boolean = (path.split("/").foldLeft(camRoot(id))((root, key) => root \\ key)).text.toInt == 1 def str(id: String, path: String):String = (path.split("/").foldLeft(camRoot(id))((root, key) => root \\ key)).text def getConfig(id: String):Config = new Config(id) }
Example 4
Source File: TypedBigQueryIT.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.bigquery import com.google.protobuf.ByteString import com.spotify.scio._ import com.spotify.scio.bigquery.client.BigQuery import com.spotify.scio.testing._ import magnolify.scalacheck.auto._ import org.apache.beam.sdk.options.PipelineOptionsFactory import org.joda.time.format.DateTimeFormat import org.joda.time.{Instant, LocalDate, LocalDateTime, LocalTime} import org.scalacheck._ import org.scalatest.BeforeAndAfterAll import scala.util.Random object TypedBigQueryIT { @BigQueryType.toTable case class Record( bool: Boolean, int: Int, long: Long, float: Float, double: Double, string: String, byteString: ByteString, timestamp: Instant, date: LocalDate, time: LocalTime, datetime: LocalDateTime ) // Workaround for millis rounding error val epochGen = Gen.chooseNum[Long](0L, 1000000000000L).map(x => x / 1000 * 1000) implicit val arbByteString = Arbitrary(Gen.alphaStr.map(ByteString.copyFromUtf8)) implicit val arbInstant = Arbitrary(epochGen.map(new Instant(_))) implicit val arbDate = Arbitrary(epochGen.map(new LocalDate(_))) implicit val arbTime = Arbitrary(epochGen.map(new LocalTime(_))) implicit val arbDatetime = Arbitrary(epochGen.map(new LocalDateTime(_))) private val recordGen = { implicitly[Arbitrary[Record]].arbitrary } private val table = { val TIME_FORMATTER = DateTimeFormat.forPattern("yyyyMMddHHmmss") val now = Instant.now().toString(TIME_FORMATTER) val spec = "data-integration-test:bigquery_avro_it.records_" + now + "_" + Random.nextInt(Int.MaxValue) Table.Spec(spec) } private val records = Gen.listOfN(1000, recordGen).sample.get private val options = PipelineOptionsFactory .fromArgs( "--project=data-integration-test", "--tempLocation=gs://data-integration-test-eu/temp" ) .create() } class TypedBigQueryIT extends PipelineSpec with BeforeAndAfterAll { import TypedBigQueryIT._ override protected def beforeAll(): Unit = { val sc = ScioContext(options) sc.parallelize(records).saveAsTypedBigQueryTable(table) sc.run() () } override protected def afterAll(): Unit = BigQuery.defaultInstance().tables.delete(table.ref) "TypedBigQuery" should "read records" in { val sc = ScioContext(options) sc.typedBigQuery[Record](table) should containInAnyOrder(records) sc.run() } }
Example 5
Source File: DateTimeUtil.scala From aerosolve with Apache License 2.0 | 5 votes |
package com.airbnb.common.ml.util import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, DateTimeConstants} object DateTimeUtil { private val DATE_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd") def getDateTimeFromString(dateStr: String): DateTime = { DateTime.parse(dateStr, DATE_FORMATTER) } def getDaysSinceEpoch(date: DateTime): Long = { date.getMillis / DateTimeConstants.MILLIS_PER_DAY } def dateStringFromInt(daysSinceEpoch: Long): String = { DATE_FORMATTER.print(daysSinceEpoch * DateTimeConstants.MILLIS_PER_DAY) } def daysRange(begin: String, end: String): Int = { ( getDaysSinceEpoch(getDateTimeFromString(end)) - getDaysSinceEpoch(getDateTimeFromString(begin)) ).toInt } }
Example 6
Source File: MetaParamType.scala From DataQuality with GNU Lesser General Public License v3.0 | 5 votes |
package models.meta import models.metrics.Metric import org.joda.time.format.DateTimeFormat import org.squeryl.PrimitiveTypeMode._ import scala.util.Try object MetaParamType extends Enumeration { val double = ParamValidationVal("DOUBLE", validateDouble) val integer = ParamValidationVal("INTEGER", validateInt) val string = ParamValidationVal("STRING", validateString) val metric = ParamValidationVal("METRIC", validateMetric) val dateFormat = ParamValidationVal("DATE_FORMAT", validateDateFormat) val proportion = ParamValidationVal("PROPORTION", validateProportion) protected case class ParamValidationVal(name: String, validationFunc: (String)=>Boolean) extends super.Val() { override def toString(): String = this.name } implicit def convert(value: Value): ParamValidationVal = value.asInstanceOf[ParamValidationVal] private def validateDouble(value: String): Boolean = Try(value.toDouble).isSuccess private def validateInt(value: String): Boolean = Try(value.toInt).isSuccess private def validateString(value: String): Boolean = Try(value.toString).isSuccess private def validateMetric(value: String): Boolean = Metric.getIdList().toList.contains(value) private def validateDateFormat(value: String): Boolean = Try{DateTimeFormat.forPattern(value)}.isSuccess private def validateProportion(value: String): Boolean = { Try(value.toDouble).toOption match { case Some(v) if v >= 0 && v <= 1 => true case _ => false } } }
Example 7
Source File: MetaParamType.scala From DataQuality with GNU Lesser General Public License v3.0 | 5 votes |
package dbmodel.meta import dbmodel.metrics.Metric import org.joda.time.format.DateTimeFormat import org.squeryl.PrimitiveTypeMode._ import scala.util.Try object MetaParamType extends Enumeration { val double = ParamValidationVal("DOUBLE", validateDouble) val integer = ParamValidationVal("INTEGER", validateInt) val string = ParamValidationVal("STRING", validateString) val metric = ParamValidationVal("METRIC", validateMetric) val dateFormat = ParamValidationVal("DATE_FORMAT", validateDateFormat) val proportion = ParamValidationVal("PROPORTION", validateProportion) protected case class ParamValidationVal(name: String, validationFunc: (String)=>Boolean) extends super.Val() { override def toString(): String = this.name } implicit def convert(value: Value): ParamValidationVal = value.asInstanceOf[ParamValidationVal] private def validateDouble(value: String): Boolean = Try(value.toDouble).isSuccess private def validateInt(value: String): Boolean = Try(value.toInt).isSuccess private def validateString(value: String): Boolean = Try(value.toString).isSuccess private def validateMetric(value: String): Boolean = Metric.getIdList().toList.contains(value) private def validateDateFormat(value: String): Boolean = Try{DateTimeFormat.forPattern(value)}.isSuccess private def validateProportion(value: String): Boolean = { Try(value.toDouble).toOption match { case Some(v) if v >= 0 && v <= 1 => true case _ => false } } }
Example 8
Source File: KibanaForwarder.scala From shield with MIT License | 5 votes |
package shield.actors.listeners import akka.actor.{Actor, ActorLogging} import com.amazonaws.auth.{AWSCredentials, DefaultAWSCredentialsProviderChain} import com.typesafe.config.Config import shield.actors.RestartLogging import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTimeZone, DateTime} import shield.aws.AWSSigningConfig import shield.metrics.Instrumented import spray.client.pipelining._ import spray.http.HttpResponse import shield.aws.AWSImplicits._ import spray.json.DefaultJsonProtocol._ import spray.json._ // todo: ensure useful mapping on the index class KibanaForwarder(id: String, host: String, indexPrefix: String, ttype: String, maxOutstanding: Int, signingParams: AWSSigningConfig) extends Actor with ActorLogging with RestartLogging with Instrumented { implicit val ctx = context.dispatcher // todo: timeout? val awsSigningConfig = signingParams val pipeline = sendReceive val dayFormat = DateTimeFormat.forPattern("yyyy.MM.dd") val outstandingCounter = metrics.counter("outstandingPosts", id) val droppedMeter = metrics.meter("droppedAccessLogs", id) val postTimer = timing("postToKibana", id) def receive = { case LogsFlushed => outstandingCounter -= 1 case AccessLogs(buffer) => if (buffer.nonEmpty) { if (outstandingCounter.count >= maxOutstanding) { droppedMeter.mark(buffer.length) } else postTimer { outstandingCounter += 1 val date = DateTimeFormat.forPattern("yyyy.MM.dd").print(DateTime.now(DateTimeZone.UTC)) // todo: CompactPrint is 1% cpu under load tests. Faster serialization library? val orderedCommands = buffer.flatMap { doc => List( JsObject( "index" -> JsObject( "_index" -> JsString(s"$indexPrefix-$date"), "_type" -> JsString(ttype) ) ).toJson.compactPrint, doc.toJson.compactPrint ) } val req = Post(s"$host/_bulk", orderedCommands.mkString("\n") + "\n").withAWSSigning(awsSigningConfig) pipeline(req) andThen LogCollector.handleResults(self, droppedMeter, log, buffer.length) } } } }
Example 9
Source File: AuthUtil.scala From shield with MIT License | 5 votes |
package shield.aws import java.nio.charset.StandardCharsets import java.security.MessageDigest import javax.crypto.Mac import javax.crypto.spec.SecretKeySpec import org.apache.commons.codec.binary.Hex import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, DateTimeZone} import spray.http.HttpHeaders.RawHeader import spray.http.HttpRequest object HexBytesUtil { def hex2bytes(hex: String): Array[Byte] = { hex.replaceAll("[^0-9A-Fa-f]", "").sliding(2, 2).toArray.map(Integer.parseInt(_, 16).toByte) } def bytes2hex(bytes: Array[Byte], sep: Option[String] = None): String = { sep match { case None => bytes.map("%02x".format(_)).mkString case _ => bytes.map("%02x".format(_)).mkString(sep.get) } } }
Example 10
Source File: CutOffTime.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.aggregators import com.salesforce.op.utils.date.DateTimeUtils import org.joda.time.format.DateTimeFormat case class CutOffTime(cType: CutOffTimeType, timeMs: Option[Long]) object CutOffTime { // scalastyle:off def UnixEpoch(sinceEpoch: Long): CutOffTime = CutOffTime( cType = CutOffTimeTypes.UnixEpoch, timeMs = Some(math.max(sinceEpoch, 0L)) ) def DaysAgo(daysAgo: Int): CutOffTime = CutOffTime( cType = CutOffTimeTypes.DaysAgo, timeMs = Some(DateTimeUtils.now().withTimeAtStartOfDay().minusDays(daysAgo).getMillis) ) def WeeksAgo(weeksAgo: Int): CutOffTime = CutOffTime( cType = CutOffTimeTypes.WeeksAgo, timeMs = Some(DateTimeUtils.now().withTimeAtStartOfDay().minusWeeks(weeksAgo).getMillis) ) def DDMMYYYY(ddMMyyyy: String): CutOffTime = CutOffTime( cType = CutOffTimeTypes.DDMMYYYY, timeMs = Some( DateTimeFormat.forPattern("ddMMyyyy").parseDateTime(ddMMyyyy).withZone(DateTimeUtils.DefaultTimeZone).getMillis ) ) def NoCutoff(): CutOffTime = CutOffTime(cType = CutOffTimeTypes.NoCutoff, timeMs = None) // scalastyle:on }
Example 11
Source File: TaxiFareOps.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.flink import java.util.Locale import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import scala.util.{ Failure, Success, Try } import cloudflow.flink.avro._ object TaxiFareOps { @transient val timeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withLocale(Locale.US).withZoneUTC(); def fromString(fare: String): Try[TaxiFare] = { def parseFloat(s: String) = if (s.length() > 0) s.toFloat else 0.0f def parseDateTime(s: String) = DateTime.parse(s, timeFormatter) val tokens = fare.split(",") if (tokens.length != 8) Failure(new RuntimeException(s"Invalid record: $fare")) else Try { val rideId = tokens(0).toLong new TaxiFare( rideId, tokens(1).toLong, tokens(4), tokens(2).toLong, parseDateTime(tokens(3)).getMillis(), parseFloat(tokens(5)), parseFloat(tokens(6)), parseFloat(tokens(7)) ) }.transform(s ⇒ Success(s), e ⇒ Failure(new RuntimeException(s"Invalid record: $fare", e))) } def getEventTime(fare: TaxiFare): Long = fare.startTime }
Example 12
Source File: TaxiRideOps.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.flink import java.util.Locale import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import scala.util.{ Failure, Success, Try } import cloudflow.flink.avro._ object TaxiRideOps { @transient val timeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withLocale(Locale.US).withZoneUTC(); def fromString(ride: String): Try[TaxiRide] = { def parseFloat(s: String) = if (s.length() > 0) s.toFloat else 0.0f def parseDateTime(s: String) = DateTime.parse(s, timeFormatter) val tokens = ride.split(",") if (tokens.length != 11) Failure(new RuntimeException(s"Invalid record: $ride")) else Try { val rideId = tokens(0).toLong val (isStart, startTime, endTime) = tokens(1) match { case "START" ⇒ (true, parseDateTime(tokens(2)), parseDateTime(tokens(3))) case "END" ⇒ (false, parseDateTime(tokens(3)), parseDateTime(tokens(2))) case _ ⇒ throw new RuntimeException(s"Invalid record: $ride") } new TaxiRide( rideId, isStart, tokens(9).toLong, tokens(8).toShort, tokens(10).toLong, parseFloat(tokens(4)), parseFloat(tokens(5)), parseFloat(tokens(6)), parseFloat(tokens(7)), startTime.getMillis(), endTime.getMillis() ) }.transform(s ⇒ Success(s), e ⇒ Failure(new RuntimeException(s"Invalid record: $ride", e))) } def getEventTime(ride: TaxiRide): Long = if (ride.isStart) ride.startTime else ride.endTime }
Example 13
Source File: QueryGuardConfigs.scala From gimel with Apache License 2.0 | 5 votes |
package com.paypal.gimel.common.conf import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} object QueryGuardConfigs { val JOB_TTL: String = "gimel.sql.query.guard.timeout.sec" val DELAY_TTL: String = "gimel.sql.query.guard.delay.timeout.sec" } object QueryGuardConstants { val DEFAULT_JOB_TTL: Int = 2100000 val DEFAULT_DELAY_TTL: Int = 10000 // val DEFAULT_JOB_TTL: Int = 60000 // val DEFAULT_DELAY_TTL: Int = 1000 val EXCEPTION_MSG_FORMAT: String = """Gimel Query Guard Exception : Your SQL has exceeded the maximum limit of %s. | Query Start Time : %s | Query Abort Time : %s | Please start a dedicated Spark Kernel with Gimel to run long running queries. | Please tune your SQL to pull only required data by applying right filters, this will also help minimize the runtime & process only required data from source system.""".stripMargin val LOCAL_DATETIME_FORMATTER: DateTimeFormatter = DateTimeFormat.fullDateTime() }
Example 14
Source File: TemporalBounds.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.entity import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.DateTimeFormat import play.api.libs.json._ import play.api.libs.json.Reads._ import play.api.libs.functional.syntax._ case class TemporalBounds(from: DateTime, to: DateTime) { import TemporalBounds._ // TODO make this smarter - for now, we'll just print to/from years override def toString() = if (from == to) { yearFormatter.print(from) } else { s"${yearFormatter.print(from)}/${yearFormatter.print(to)}" } } object TemporalBounds { // For convenience private val yearFormatter = DateTimeFormat.forPattern("yyyy").withZone(DateTimeZone.UTC) private val dateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd").withZone(DateTimeZone.UTC) implicit val dateFormat = Format( JsPath.read[JsString].map { json => dateFormatter.parseDateTime(json.value) }, Writes[DateTime] { dt => Json.toJson(dateFormatter.print(dt)) } ) private def flexDateWrite(dt: DateTime): JsValue = if (dt.monthOfYear == 1 && dt.dayOfMonth == 1 && dt.minuteOfDay == 0) Json.toJson(dt.year.get) else Json.toJson(dt) implicit val temporalBoundsFormat: Format[TemporalBounds] = ( (JsPath \ "from").format[JsValue].inmap[DateTime](flexDateRead, flexDateWrite) and (JsPath \ "to").format[JsValue].inmap[DateTime](flexDateRead, flexDateWrite) )(TemporalBounds.apply, unlift(TemporalBounds.unapply)) def computeUnion(bounds: Seq[TemporalBounds]): TemporalBounds = { val from = bounds.map(_.from.getMillis).min val to = bounds.map(_.to.getMillis).max TemporalBounds( new DateTime(from, DateTimeZone.UTC), new DateTime(to, DateTimeZone.UTC)) } def fromYears(from: Int, to: Int): TemporalBounds = { val f = new DateTime(DateTimeZone.UTC).withDate(from, 1, 1).withTime(0, 0, 0, 0) val t = new DateTime(DateTimeZone.UTC).withDate(to, 1, 1).withTime(0, 0, 0, 0) TemporalBounds(f, t) } }
Example 15
Source File: BackupAdminController.scala From recogito2 with Apache License 2.0 | 5 votes |
package controllers.admin.backup import akka.actor.ActorSystem import akka.stream.scaladsl.FileIO import com.mohiva.play.silhouette.api.Silhouette import controllers.{BaseAuthController, Security} import controllers.document.BackupReader import javax.inject.{Inject, Singleton} import services.ContentType import services.annotation.AnnotationService import services.document.DocumentService import services.generated.tables.records.DocumentFilepartRecord import services.user.UserService import services.user.Roles._ import services.visit.VisitService import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import org.webjars.play.WebJarsUtil import play.api.Configuration import play.api.mvc.{ControllerComponents, ResponseHeader, Result} import play.api.libs.Files.TemporaryFileCreator import play.api.http.HttpEntity import scala.concurrent.{ExecutionContext, Future} import storage.db.DB import storage.es.migration.AnnotationMigrationUtil import transform.tiling.TilingService @Singleton class BackupAdminController @Inject() ( val components: ControllerComponents, val config: Configuration, val migrationUtil: AnnotationMigrationUtil, val users: UserService, val visits: VisitService, val silhouette: Silhouette[Security.Env], implicit val db: DB, implicit val tilingService: TilingService, implicit val annotations: AnnotationService, implicit val documents: DocumentService, implicit val ctx: ExecutionContext, implicit val system: ActorSystem, implicit val tmpFileCreator: TemporaryFileCreator, implicit val webJarsUtil: WebJarsUtil ) extends BaseAuthController(components, config, documents, users) with BackupReader { def restore = silhouette.SecuredAction(Security.WithRole(Admin)).async { implicit request => request.body.asMultipartFormData.flatMap(_.file("backup")) match { case Some(formData) => restoreBackup(formData.ref.path.toFile, runAsAdmin = true, forcedOwner = None).map { case (doc, fileparts) => Ok }.recover { case t: Throwable => t.printStackTrace() InternalServerError } case None => Future.successful(BadRequest) } } def exportVisits = silhouette.SecuredAction(Security.WithRole(Admin)).async { implicit request => visits.scrollExport().map { path => val fmt = DateTimeFormat.forPattern("yyyy-MM-dd") val source = FileIO.fromPath(path) val filename = s"visits-exported-${fmt.print(DateTime.now)}.csv" Result( header = ResponseHeader(200, Map("Content-Disposition" -> s"""attachment; filename="${filename}"""")), body = HttpEntity.Streamed(source, None, Some("text/csv")) ) } } def deleteVisitsOlderThan(date: Option[String]) = silhouette.SecuredAction(Security.WithRole(Admin)).async { implicit request => date match { case Some(_) => Future.successful(BadRequest("User-provided dates not supported yet.")) case _ => val cutoffDate = DateTime.now minusMonths 6 visits.deleteOlderThan(cutoffDate).map { success => if (success) Ok("Done.") else InternalServerError("Something went wrong.") } } } }
Example 16
Source File: ContributionSpec.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.contribution import java.util.UUID import services.ContentType import org.specs2.mutable._ import org.specs2.runner._ import org.junit.runner._ import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.DateTimeFormat import play.api.libs.json.Json import play.api.test._ import play.api.test.Helpers._ import scala.io.Source @RunWith(classOf[JUnitRunner]) class ContributionSpec extends Specification { private val DATE_TIME_PATTERN = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") private val madeAt = DateTime.parse("2016-06-03T13:02:00Z", DATE_TIME_PATTERN).withZone(DateTimeZone.UTC) "The sample Contribution" should { "be properly created from JSON" in { val json = Source.fromFile("test/resources/services/contribution/contribution.json").getLines().mkString("\n") val result = Json.fromJson[Contribution](Json.parse(json)) // Parsed without errors? result.isSuccess must equalTo(true) val contribution = result.get contribution.action must equalTo(ContributionAction.CONFIRM_BODY) contribution.madeBy must equalTo("rainer") contribution.madeAt must equalTo(madeAt) val item = contribution.affectsItem item.itemType must equalTo(ItemType.PLACE_BODY) item.documentId must equalTo("98muze1cl3saib") item.documentOwner must equalTo("rainer") item.filepartId must equalTo(Some(UUID.fromString("a7126845-16ac-434b-99bd-0f297e227822"))) item.contentType must equalTo(Some(ContentType.TEXT_PLAIN)) item.annotationId must equalTo(Some(UUID.fromString("7cfa1504-26de-45ef-a590-8b60ea8a60e8"))) item.annotationVersionId must equalTo(Some(UUID.fromString("e868423f-5ea9-42ed-bb7d-5e1fac9195a0"))) contribution.affectsUsers must equalTo(Seq("otheruser")) } } "JSON serialization/parsing roundtrip" should { "yield an equal Contribution" in { val contribution = Contribution( ContributionAction.DELETE_BODY, "rainer", madeAt, Item( ItemType.COMMENT_BODY, "98muze1cl3saib", "rainer", Some(UUID.fromString("7ccbf5dd-335b-4d59-bff6-d8d59d977825")), Some(ContentType.TEXT_TEIXML), Some(UUID.fromString("7cfa1504-26de-45ef-a590-8b60ea8a60e8")), Some(UUID.fromString("e868423f-5ea9-42ed-bb7d-5e1fac9195a0")), Some("just a comment"), None ), Seq("rainer"), None) // Convert to JSON val serialized = Json.prettyPrint(Json.toJson(contribution)) val parseResult = Json.fromJson[Contribution](Json.parse(serialized)) parseResult.isSuccess must equalTo(true) parseResult.get must equalTo(contribution) } } }
Example 17
Source File: RelationSpec.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.annotation.relation import org.joda.time.{DateTime, DateTimeZone} import org.joda.time.format.DateTimeFormat import org.junit.runner._ import org.specs2.mutable._ import org.specs2.runner._ import play.api.test._ import play.api.test.Helpers._ import play.api.libs.json.Json import scala.io.Source import services.annotation.{Annotation, AnnotationBody} @RunWith(classOf[JUnitRunner]) class RelationSpec extends Specification { private val DATE_TIME_PATTERN = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") import services.annotation.BackendAnnotation._ "The sample annotation" should { "be properly created from JSON" in { val json = Source.fromFile("test/resources/services/annotation/annotation-with-relation.json").getLines().mkString("\n") val result = Json.fromJson[Annotation](Json.parse(json)) result.isSuccess must equalTo(true) val relations = result.get.relations relations.size must equalTo(1) relations.head.bodies.size must equalTo(1) val body = relations.head.bodies.head body.hasType must equalTo(AnnotationBody.TAG) body.lastModifiedBy must equalTo(Some("rainer")) body.lastModifiedAt must equalTo(DateTime.parse("2018-05-07T15:31:00Z", DATE_TIME_PATTERN).withZone(DateTimeZone.UTC)) body.value must equalTo("flyingTo") } } }
Example 18
Source File: VisitSpec.scala From recogito2 with Apache License 2.0 | 5 votes |
package services.visit import java.util.UUID import services.ContentType import services.RuntimeAccessLevel import org.specs2.mutable._ import org.specs2.runner._ import org.junit.runner._ import org.joda.time.{ DateTime, DateTimeZone } import org.joda.time.format.DateTimeFormat import play.api.libs.json.Json import play.api.test._ import play.api.test.Helpers._ import scala.io.Source @RunWith(classOf[JUnitRunner]) class VisitSpec extends Specification { private val DATE_TIME_PATTERN = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") private val visitedAt = DateTime.parse("2016-11-08T07:27:00Z", DATE_TIME_PATTERN).withZone(DateTimeZone.UTC) "The sample Visit" should { "be properly created from JSON" in { val json = Source.fromFile("test/resources/services/visit/visit.json").getLines().mkString("\n") val result = Json.fromJson[Visit](Json.parse(json)) result.isSuccess must equalTo(true) val visit = result.get visit.url must equalTo("http://recogito.pelagios.org/document/fb2f3hm1ihnwgn/part/1/edit") visit.referer must equalTo(Some("http://recogito.pelagios.org/rainer")) visit.visitedAt must equalTo(visitedAt) visit.responseFormat must equalTo("text/html") visit.accessLevel must equalTo(Some(RuntimeAccessLevel.READ_ALL)) val client = visit.client client.userAgent must equalTo("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/52.0.2743.116 Chrome/52.0.2743.116 Safari/537.36") client.browser must equalTo("CHROME") client.os must equalTo("LINUX") client.deviceType must equalTo("COMPUTER") val item = visit.visitedItem.get item.documentId must equalTo("fb2f3hm1ihnwgn") item.documentOwner must equalTo("rainer") item.filepartId must equalTo(Some(UUID.fromString("a7126845-16ac-434b-99bd-0f297e227822"))) item.contentType must equalTo(Some(ContentType.TEXT_PLAIN)) } } "JSON serialization/parsing roundtrip" should { "yield an equal Visit" in { val visit = Visit( "http://recogito.pelagios.org/document/fb2f3hm1ihnwgn/part/1/edit", Some("http://recogito.pelagios.org/rainer"), visitedAt, Client( "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36", "CHROME", "LINUX", "COMPUTER" ), "text/html", Some(VisitedItem( "fb2f3hm1ihnwgn", "rainer", Some(UUID.randomUUID), Some(ContentType.TEXT_PLAIN) )), Some(RuntimeAccessLevel.READ_ALL) ) // Convert to JSON val serialized = Json.prettyPrint(Json.toJson(visit)) val parseResult = Json.fromJson[Visit](Json.parse(serialized)) parseResult.isSuccess must equalTo(true) parseResult.get must equalTo(visit) } } }
Example 19
Source File: ScraperJob.scala From core with Apache License 2.0 | 5 votes |
package com.smartbackpackerapp.scraper import cats.effect.IO import cats.instances.list._ import cats.syntax.apply._ import cats.syntax.traverse._ import com.smartbackpackerapp.common.IOApp import com.smartbackpackerapp.model._ import org.joda.time.Seconds import org.joda.time.format.DateTimeFormat object ScraperJob extends IOApp { private val ctx = new ScraperModule[IO] val visaIndexProgram: IO[Unit] = for { _ <- putStrLn("Starting visa index scraping job") ranking <- ctx.visaRestrictionsIndexParser.parse _ <- putStrLn("Starting visa index inserting data job") _ <- ctx.visaRestrictionsInsertData.run(ranking) _ <- putStrLn("Visa index scraping job done") } yield () val updateCountriesProgram: IO[Unit] = for { _ <- putStrLn("Starting countries updating data job") _ <- ctx.countryInsertData.runUpdate _ <- putStrLn("Countries updating data job DONE") } yield () val countriesProgram: IO[Unit] = for { _ <- putStrLn("Starting countries inserting data job") _ <- ctx.countryInsertData.run _ <- putStrLn("Countries inserting data job DONE") } yield () val visaCategoriesProgram: IO[Unit] = for { _ <- putStrLn("Starting visa categories inserting data job") _ <- ctx.visaCategoryInsertData.run _ <- putStrLn("Visa categories inserting data job DONE") } yield () def visaRequirementsProgramFor(from: CountryCode): IO[Unit] = for { _ <- putStrLn(s"${from.value} >> Starting visa requirements job") _ <- ctx.visaRequirementsInsertData.run(from) _ <- putStrLn(s"${from.value} >> Visa requirements job DONE") } yield () val visaRequirementsProgram: IO[Unit] = { ctx.scraperConfig.countriesCode() flatMap { codes => codes.traverse(c => visaRequirementsProgramFor(c)) *> IO.unit } } def healthInfoProgramFor(cc: CountryCode): IO[Unit] = for { _ <- putStrLn("Starting health info inserting data job") _ <- ctx.healthInfoInsertData.run(cc) _ <- putStrLn("Health info inserting data job DONE") } yield () val healthInfoProgram: IO[Unit] = { ctx.scraperConfig.countriesCode() flatMap { codes => codes.traverse(c => healthInfoProgramFor(c)) *> IO.unit } } case object MissingArgument extends Exception("There should be only one argument with one of the following values: `loadCountries`, `updateCountries`, `loadVisaCategories`, `visaRequirements`, `visaRanking` or `healthInfo`") def readArgs(args: List[String]): IO[Unit] = { val ifEmpty = IO.raiseError[Unit](MissingArgument) args.headOption.fold(ifEmpty) { case "loadCountries" => countriesProgram case "updateCountries" => updateCountriesProgram case "loadVisaCategories" => visaCategoriesProgram case "visaRequirements" => visaRequirementsProgram case "visaRanking" => visaIndexProgram case "healthInfo" => healthInfoProgram case _ => ifEmpty } } override def start(args: List[String]): IO[Unit] = { lazy val fmt = DateTimeFormat.forPattern("H:m:s.S") for { start <- getTime _ <- if (ctx.devDbUrl.nonEmpty) putStrLn(s"DEV DB connection established: ${ctx.devDbUrl}") else putStrLn(s"DB connection established: ${ctx.dbUrl}") _ <- putStrLn(s"Starting job at ${start.toString(fmt)}") _ <- readArgs(args) end <- getTime _ <- putStrLn(s"Finished job at ${end.toString(fmt)}. Duration ${Seconds.secondsBetween(start, end).getSeconds} seconds") } yield () } }
Example 20
Source File: MetaDataProvider.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.nio.file.Path import java.util.jar.Manifest import java.util.zip.ZipFile import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import uk.gov.hmrc.releaser.github.CommitSha import scala.collection.JavaConversions._ import scala.io.Source import scala.util.{Failure, Success, Try} trait MetaDataProvider { def fromJarFile(p: Path): Try[ArtefactMetaData] def fromCommitManifest(p: Path): Try[ArtefactMetaData] } case class ArtefactMetaData(sha:CommitSha, commitAuthor:String, commitDate:DateTime) class ArtefactMetaDataProvider extends MetaDataProvider { import ArtefactMetaDataProvider._ def fromJarFile(p: Path): Try[ArtefactMetaData] = { Try {new ZipFile(p.toFile) }.flatMap { jarFile => jarFile.entries().filter(_.getName == "META-INF/MANIFEST.MF").toList.headOption.map { ze => val man = new Manifest(jarFile.getInputStream(ze)) ArtefactMetaData( man.getMainAttributes.getValue("Git-Head-Rev"), man.getMainAttributes.getValue("Git-Commit-Author"), gitCommitDateFormat.parseDateTime(man.getMainAttributes.getValue("Git-Commit-Date")) ) }.toTry(new Exception(s"Failed to retrieve manifest from $p")) } } def fromCommitManifest(p: Path): Try[ArtefactMetaData] = { Try { val map = Source.fromFile(p.toFile) .getLines().toSeq .map(_.split("=")) .map { case Array(key, value) => key.trim -> value.trim }.toMap ArtefactMetaData(map("sha"), map("author"), gitCommitDateFormat.parseDateTime(map("date"))) } } } object ArtefactMetaDataProvider { val gitCommitDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ") implicit class OptionPimp[A](opt: Option[A]){ def toTry(e:Exception):Try[A] = opt match { case Some(x) => Success(x) case None => Failure(e) } } }
Example 21
Source File: SchedulerDataManager.scala From cave with MIT License | 5 votes |
package com.cave.metrics.data.postgresql import java.sql.Timestamp import com.cave.metrics.data.AwsConfig import com.cave.metrics.data.postgresql.Tables._ import org.joda.time.format.DateTimeFormat import org.joda.time.DateTime import scala.slick.jdbc.{GetResult, StaticQuery => Q} import scala.slick.driver.PostgresDriver.simple._ class SchedulerDataManager(awsConfig: AwsConfig) extends DatabaseConnection(awsConfig) { def leadershipTermTimeoutSeconds = awsConfig.leadershipTermTimeoutSeconds def leadershipTermLengthSeconds = awsConfig.leadershipTermLengthSeconds def DBDateTimeFormatter = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss Z") implicit val getSchedulersResult = GetResult(r => SchedulersRow(r.<<, r.<<, r.<<)) def takeLeadership(hostname: String): Boolean = { db.withTransaction { implicit session => val termTimeout = new DateTime().minusSeconds(leadershipTermTimeoutSeconds) val timeoutSql = DBDateTimeFormatter.print(termTimeout) val sql = s"BEGIN; SELECT * FROM schedulers WHERE created_at < '$timeoutSql' FOR UPDATE" val query = Q.queryNA[SchedulersRow](sql) def updateTimestamp(): Boolean = Schedulers.filter(_.createdAt < new Timestamp(termTimeout.getMillis)) .map(s => (s.name, s.createdAt)).update(hostname, new Timestamp(System.currentTimeMillis())) == 1 try { query.list.length == 1 && (updateTimestamp() || { session.rollback() false }) } catch { case e: Exception => log.error(e) session.rollback() false } } } }
Example 22
Source File: SchedulerDataManagerSpec.scala From cave with MIT License | 5 votes |
package com.cave.metrics.data.postgresql import java.sql.Timestamp import com.cave.metrics.data.postgresql.Tables._ import org.joda.time.format.DateTimeFormat import org.scalatest.BeforeAndAfter import scala.slick.driver.H2Driver.simple._ import scala.slick.jdbc.StaticQuery class SchedulerDataManagerSpec extends AbstractDataManagerSpec with BeforeAndAfter { val hostname_1 = "host1" val hostname_2 = "host2" val hostname_3 = "host3" var dm: SchedulerDataManager = _ before { dm = new SchedulerDataManager(awsConfig) { override def DBDateTimeFormatter = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss") override def leadershipTermTimeoutSeconds = 30 } Schedulers += SchedulersRow(1, "initialValue", new Timestamp(System.currentTimeMillis() - 1000 * 60)) } "Scheduler Data Manager" should "update Schedulers table" in { Schedulers.list.head.name should be("initialValue") assert(dm.takeLeadership(hostname_1), "Expected success") Schedulers.list.head.name should be(hostname_1) assert(dm.takeLeadership(hostname_3) == false, "Expected success") Schedulers.list.head.name should be(hostname_1) assert(dm.extendLeadership(hostname_2) == false, "Expected success") Schedulers.list.head.name should be(hostname_1) Thread.sleep(1500) assert(dm.extendLeadership(hostname_1), "A-hostname was not able to extend its leadership") Schedulers.list.head.name should be(hostname_1) } it should "not update the leader if one is active" in { StaticQuery.queryNA("truncate table SCHEDULERS").execute Schedulers += SchedulersRow(1, hostname_1, new Timestamp(System.currentTimeMillis() - 1000 * 20)) Schedulers.list.length should be(1) assert(!dm.takeLeadership(hostname_2), "Expected failure") Schedulers.list.head.name should be(hostname_1) Thread.sleep(100) assert(dm.extendLeadership(hostname_1), "Expected success") Schedulers.list.head.name should be(hostname_1) } it should "not give leadership to host3 when host2 is the leader" in { StaticQuery.queryNA("truncate table SCHEDULERS").execute Schedulers += SchedulersRow(1, hostname_1, new Timestamp(System.currentTimeMillis() - 1000 * 31)) Schedulers.list.length should be(1) assert(dm.takeLeadership(hostname_2), "Expected success") Schedulers.list.head.name should be(hostname_2) assert(!dm.takeLeadership(hostname_3), "Expected failure") Schedulers.list.head.name should be(hostname_2) assert(!dm.takeLeadership(hostname_1), "Expected failure") Schedulers.list.head.name should be(hostname_2) } it should "be thread safe" in { StaticQuery.queryNA("truncate table SCHEDULERS").execute Schedulers.list.length should be(0) Schedulers += SchedulersRow(1, hostname_1, new Timestamp(System.currentTimeMillis() - 1000 * 360)) Schedulers.list.length should be(1) Schedulers.list.head.name should be(hostname_1) import scala.slick.jdbc.{GetResult, StaticQuery => Q} val sql = s"BEGIN; select * from SCHEDULERS FOR UPDATE" val query = Q.queryNA[SchedulersRow](sql) query.list.length should be(1) assert(!dm.takeLeadership(hostname_1), "Expected failure") assert(!dm.takeLeadership(hostname_2), "Expected failure") assert(!dm.takeLeadership(hostname_3), "Expected failure") assert(!dm.extendLeadership(hostname_1), "Expected failure") assert(!dm.extendLeadership(hostname_2), "Expected failure") assert(!dm.extendLeadership(hostname_3), "Expected failure") Schedulers.list.head.name should be(hostname_1) } }
Example 23
Source File: DistCacheExampleTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.examples.extra import com.spotify.scio.bigquery._ import com.spotify.scio.examples.common.ExampleData import com.spotify.scio.io._ import com.spotify.scio.testing._ import org.joda.time.format.DateTimeFormat class DistCacheExampleTest extends PipelineSpec { val fmt = DateTimeFormat.forPattern("yyyyMMdd") def d2t(date: String): Long = fmt.parseDateTime(date).getMillis / 1000 val in = Seq( TableRow("timestamp" -> d2t("20150101")), TableRow("timestamp" -> d2t("20150102")), TableRow("timestamp" -> d2t("20150103")), TableRow("timestamp" -> d2t("20150201")), TableRow("timestamp" -> d2t("20150202")), TableRow("timestamp" -> d2t("20150301")) ) val distCache = Map(1 -> "Jan", 2 -> "Feb", 3 -> "Mar") val expected = Seq("Jan 3", "Feb 2", "Mar 1") "DistCacheExample" should "work" in { JobTest[com.spotify.scio.examples.extra.DistCacheExample.type] .args("--output=out.txt") .input(TableRowJsonIO(ExampleData.EXPORTED_WIKI_TABLE), in) .distCache(DistCacheIO("gs://dataflow-samples/samples/misc/months.txt"), distCache) .output(TextIO("out.txt"))(coll => coll should containInAnyOrder(expected)) .run() } }
Example 24
Source File: ToTableRow.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.extra.bigquery import com.spotify.scio.extra.bigquery.AvroConverters.AvroConversionException import java.math.{BigDecimal => JBigDecimal} import java.nio.ByteBuffer import java.util import com.spotify.scio.bigquery.TableRow import org.apache.avro.Schema import org.apache.avro.generic.{GenericFixed, IndexedRecord} import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.BaseEncoding import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, LocalDate, LocalTime} import scala.jdk.CollectionConverters._ private[bigquery] trait ToTableRow { private lazy val encodingPropName: String = "bigquery.bytes.encoder" private lazy val base64Encoding: BaseEncoding = BaseEncoding.base64() private lazy val hexEncoding: BaseEncoding = BaseEncoding.base16() // YYYY-[M]M-[D]D private[this] val localDateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC() // YYYY-[M]M-[D]D[( |T)[H]H:[M]M:[S]S[.DDDDDD]] private[this] val localTimeFormatter = DateTimeFormat.forPattern("HH:mm:ss.SSSSSS") // YYYY-[M]M-[D]D[( |T)[H]H:[M]M:[S]S[.DDDDDD]][time zone] private[this] val timestampFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS") private[bigquery] def toTableRowField(fieldValue: Any, field: Schema.Field): Any = fieldValue match { case x: CharSequence => x.toString case x: Enum[_] => x.name() case x: JBigDecimal => x.toString case x: Number => x case x: Boolean => x case x: GenericFixed => encodeByteArray(x.bytes(), field.schema()) case x: ByteBuffer => encodeByteArray(toByteArray(x), field.schema()) case x: util.Map[_, _] => toTableRowFromMap(x.asScala, field) case x: java.lang.Iterable[_] => toTableRowFromIterable(x.asScala, field) case x: IndexedRecord => AvroConverters.toTableRow(x) case x: LocalDate => localDateFormatter.print(x) case x: LocalTime => localTimeFormatter.print(x) case x: DateTime => timestampFormatter.print(x) case _ => throw AvroConversionException( s"ToTableRow conversion failed:" + s"could not match ${fieldValue.getClass}" ) } private def toTableRowFromIterable(iterable: Iterable[Any], field: Schema.Field): util.List[_] = iterable .map { item => if (item.isInstanceOf[Iterable[_]] || item.isInstanceOf[Map[_, _]]) { throw AvroConversionException( s"ToTableRow conversion failed for item $item: " + s"iterable and map types not supported" ) } toTableRowField(item, field) } .toList .asJava private def toTableRowFromMap(map: Iterable[Any], field: Schema.Field): util.List[_] = map .map { case (k, v) => new TableRow() .set("key", toTableRowField(k, field)) .set("value", toTableRowField(v, field)) } .toList .asJava private def encodeByteArray(bytes: Array[Byte], fieldSchema: Schema): String = Option(fieldSchema.getProp(encodingPropName)) match { case Some("BASE64") => base64Encoding.encode(bytes) case Some("HEX") => hexEncoding.encode(bytes) case Some(encoding) => throw AvroConversionException(s"Unsupported encoding $encoding") case None => base64Encoding.encode(bytes) } private def toByteArray(buffer: ByteBuffer) = { val copy = buffer.asReadOnlyBuffer val bytes = new Array[Byte](copy.limit) copy.rewind copy.get(bytes) bytes } }
Example 25
Source File: Binders.scala From vat-api with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.vatapi.resources import org.joda.time.LocalDate import org.joda.time.format.DateTimeFormat import play.api.mvc.{PathBindable, QueryStringBindable} import uk.gov.hmrc.domain.Vrn import uk.gov.hmrc.vatapi.models.{FinancialDataQueryParams, ObligationsQueryParams, OptEither} import scala.util.{Failure, Success, Try} object Binders { implicit def vrnBinder(implicit stringBinder: PathBindable[String]) = new PathBindable[Vrn] { val vrnRegex = """^\d{9}$""" def unbind(key: String, vrn: Vrn): String = stringBinder.unbind(key, vrn.value) def bind(key: String, value: String): Either[String, Vrn] = { if (value.matches(vrnRegex)) { Right(Vrn(value)) } else { Left("ERROR_VRN_INVALID") } } } implicit def obligationsQueryParamsBinder(implicit stringBinder: QueryStringBindable[String]) = new QueryStringBindable[ObligationsQueryParams] { override def bind(key: String, params: Map[String, Seq[String]]): OptEither[ObligationsQueryParams] = { val from = stringBinder.bind("from", params) val to = stringBinder.bind("to", params) val status = stringBinder.bind("status", params) val query = ObligationsQueryParams.from(from, to, status) if (query.isRight) Some(Right(query.right.get)) else Some(Left(query.left.get)) } override def unbind(key: String, value: ObligationsQueryParams): String = stringBinder.unbind(key, value.map(key).toString) } implicit def financialDataQueryParamsBinder(implicit stringBinder: QueryStringBindable[String]) = new QueryStringBindable[FinancialDataQueryParams] { override def bind(key: String, params: Map[String, Seq[String]]): OptEither[FinancialDataQueryParams] = { val from = stringBinder.bind("from", params) val to = stringBinder.bind("to", params) val query = FinancialDataQueryParams.from(from, to) if (query.isRight) Some(Right(query.right.get)) else Some(Left(query.left.get)) } override def unbind(key: String, value: FinancialDataQueryParams): String = stringBinder.unbind(key, value.map(key).toString) } val format: String = "yyy-MM-dd" implicit val dateQueryParamsBinder = new QueryStringBindable[LocalDate] { override def unbind(key: String, date: LocalDate): String = date.toString override def bind(key: String, params: Map[String, Seq[String]]): Option[Either[String, LocalDate]] = for { dates <- params.get(key) } yield Try { DateTimeFormat.forPattern(format).parseLocalDate(dates(0)) } match { case Success(v) => Right(v) case Failure(_) => Left("ERROR_INVALID_DATE") } } }
Example 26
Source File: Time14Util.scala From ArchiveSpark with MIT License | 5 votes |
package org.archive.archivespark.sparkling.util import org.joda.time.DateTime import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} import scala.util.Try object Time14Util { val TimestampFill: String = "20000101000000" val TimestampFormat: DateTimeFormatter = DateTimeFormat.forPattern("yyyyMMddHHmmss").withZoneUTC def fix(timestamp: String): String = { val fill = TimestampFill val length = fill.length if (timestamp.length < length) timestamp + fill.takeRight(length - timestamp.length) else timestamp } def parse(timestamp: String, fix: Boolean = true): DateTime = TimestampFormat.parseDateTime(if (fix) this.fix(timestamp) else timestamp) def validate(timestamp: String): Option[String] = { val fixed = fix(timestamp) if (Try(parse(fixed, fix = false)).isSuccess) Some(fixed) else None } }
Example 27
Source File: StreamingTSExample.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.examples.streaming import java.util.UUID import kafka.serializer.StringDecoder import org.apache.spark.sql.Row import org.apache.spark.streaming.Durations import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.kafka.KafkaUtils import org.apache.spark.{SparkConf, SparkContext} import com.basho.riak.spark.streaming._ import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat object StreamingTSExample { def main(args: Array[String]): Unit = { val sparkConf = new SparkConf(true) .setAppName("Simple Spark Streaming to Riak TS Demo") setSparkOpt(sparkConf, "spark.master", "local") setSparkOpt(sparkConf, "spark.riak.connection.host", "127.0.0.1:8087") setSparkOpt(sparkConf, "kafka.broker", "127.0.0.1:9092") val sc = new SparkContext(sparkConf) val streamCtx = new StreamingContext(sc, Durations.seconds(15)) val kafkaProps = Map[String, String]( "metadata.broker.list" -> sparkConf.get("kafka.broker"), "client.id" -> UUID.randomUUID().toString ) KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](streamCtx, kafkaProps, Set[String]("ingest-ts") ) map { case (key, value) => val mapper = new ObjectMapper() mapper.registerModule(DefaultScalaModule) val wr = mapper.readValue(value, classOf[Map[String,String]]) Row( wr("weather"), wr("family"), DateTime.parse(wr("time"),DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS")).getMillis, wr("temperature"), wr("humidity"), wr("pressure")) } saveToRiakTS "ts_weather_demo" streamCtx.start() println("Spark streaming context started. Spark UI could be found at http://SPARK_MASTER_HOST:4040") println("NOTE: if you're running job on the 'local' master open http://localhost:4040") streamCtx.awaitTermination() } private def setSparkOpt(sparkConf: SparkConf, option: String, defaultOptVal: String): SparkConf = { val optval = sparkConf.getOption(option).getOrElse(defaultOptVal) sparkConf.set(option, optval) } }
Example 28
Source File: NpsDate.scala From nisp-frontend with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.nisp.models import org.joda.time.LocalDate import org.joda.time.format.DateTimeFormat import play.api.data.validation.ValidationError import play.api.libs.json._ import uk.gov.hmrc.nisp.utils.Constants case class NpsDate (localDate: LocalDate) { val toNpsString: String = NpsDate.dateFormat.print(localDate) val taxYear: Int = { val year = localDate.year.get if (localDate.isBefore(new LocalDate(year, Constants.taxYearsStartEndMonth, Constants.taxYearStartDay))) year - 1 else year } } object NpsDate { private val dateFormat = DateTimeFormat.forPattern("dd/MM/yyyy") private val npsDateRegex = """^(\d\d)/(\d\d)/(\d\d\d\d)$""".r implicit val reads = new Reads[NpsDate] { override def reads(json:JsValue): JsResult[NpsDate] = { json match { case JsString(npsDateRegex(d,m,y)) => JsSuccess(NpsDate(new LocalDate(y.toInt, m.toInt, d.toInt))) case JsNull => JsError(ValidationError("Null date cannot convert to NpsDate")) } } } implicit val writes = new Writes[NpsDate] { override def writes(date: NpsDate): JsValue = JsString(date.toNpsString) } def taxYearEndDate(taxYear: Int): NpsDate = NpsDate(taxYear + 1, Constants.taxYearsStartEndMonth, Constants.taxYearEndDay) def taxYearStartDate(taxYear: Int): NpsDate = NpsDate(taxYear, Constants.taxYearsStartEndMonth, Constants.taxYearStartDay) def apply(year: Int, month: Int, day: Int): NpsDate = NpsDate(new LocalDate(year,month,day)) }
Example 29
Source File: AccountAccessEvent.scala From nisp-frontend with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.nisp.events import org.joda.time.LocalDate import org.joda.time.format.DateTimeFormat import uk.gov.hmrc.nisp.models.enums.Scenario.Scenario import uk.gov.hmrc.http.HeaderCarrier object AccountAccessEvent { def apply(nino: String, statePensionAge: LocalDate, statePensionAmount: BigDecimal, statePensionForecast: BigDecimal, dateOfBirth: LocalDate, name: String, contractedOutFlag: Boolean = false, forecastScenario: Scenario, copeAmount: BigDecimal, authenticationProvider: String)(implicit hc: HeaderCarrier): AccountAccessEvent = new AccountAccessEvent( nino, statePensionAge, statePensionAmount, statePensionForecast, dateOfBirth, name, contractedOutFlag, forecastScenario, copeAmount, authenticationProvider ) } class AccountAccessEvent(nino: String, statePensionAge: LocalDate, statePensionAmount: BigDecimal, statePensionForecast: BigDecimal, dateOfBirth: LocalDate, name: String, contractedOutFlag: Boolean, forecastScenario: Scenario, copeAmount: BigDecimal, authenticationProvider: String)(implicit hc: HeaderCarrier) extends NispBusinessEvent("AccountPage", Map( "nino" -> nino, "StatePensionAge" -> DateTimeFormat.forPattern("dd/MM/yyyy").print(statePensionAge), "StatePensionAmount" -> statePensionAmount.toString(), "StatePensionForecast" -> statePensionForecast.toString(), "DateOfBirth" -> DateTimeFormat.forPattern("dd/MM/yyyy").print(dateOfBirth), "Name" -> name, "ContractedOut" -> contractedOutFlag.toString, "ForecastScenario" -> forecastScenario.toString, "COPEAmount" -> copeAmount.toString(), "AuthenticationProvider" -> authenticationProvider ) )
Example 30
Source File: DateValues.scala From avoin-voitto with MIT License | 5 votes |
package liigavoitto.journalist.values import java.util.Locale import liigavoitto.scores.Match import org.joda.time.format.DateTimeFormat trait DateValues { implicit val mtch: Match implicit val lang: String private lazy val locale = lang match { case "fi" => new Locale("fi", "FI") case "sv" => new Locale("sv", "SE") } private lazy val formatter = DateTimeFormat.forPattern("EEEE").withLocale(locale) lazy val day = mtch.date.toString(formatter) }
Example 31
Source File: ScoresApiSupport.scala From avoin-voitto with MIT License | 5 votes |
package liigavoitto.scores import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.HttpMethods._ import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.stream.ActorMaterializer import liigavoitto.util.Logging import org.joda.time.format.DateTimeFormat import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.{ Failure, Properties, Success, Try } trait ScoresApiSupport extends Logging { implicit val system: ActorSystem implicit val ec = system.dispatcher implicit val fm = ActorMaterializer() val oneHundredMegabytes = 100000000 val apiUrl = Properties.envOrElse("SCORES_API_URL", "http://scores.api.yle.fi/v0/") val scoresAuth = Map[String, String]( "app_id" -> Properties.envOrElse("SCORES_API_APP_ID", ""), "app_key" -> Properties.envOrElse("SCORES_API_APP_KEY", "") ) val dateFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss") val timeout = 15.seconds protected def get(url: String) = { Try { val request = HttpRequest(GET, url) log.info("REQUEST: " + request) Http().singleRequest(request).map(r => getStr(r)) } match { case Success(s) => s case Failure(e) => log.warn(s"Failed to get $url: " + e.getMessage) e.printStackTrace() throw new RuntimeException("Failure: " + e) } } protected def getStr(r: HttpResponse) = { Try { val entity = Await.result(r.entity.withSizeLimit(oneHundredMegabytes).toStrict(timeout), timeout) entity.data.decodeString("UTF-8") } match { case Success(s) => s case Failure(e) => throw new RuntimeException(s"Scores api failure: " + e.getMessage) } } }
Example 32
Source File: TimeFormat.scala From flint with Apache License 2.0 | 5 votes |
package com.twosigma.flint.timeseries.time import java.util.concurrent.TimeUnit import org.joda.time.format.{ DateTimeFormat, DateTimeFormatter, ISODateTimeFormat } import org.joda.time.{ DateTime, DateTimeZone } import scala.concurrent.duration.TimeUnit import scala.util.Try object TimeFormat { protected[flint] def parseNano(text: String, timeZone: DateTimeZone = DateTimeZone.UTC): Long = parse(text, timeZone, timeUnit = TimeUnit.NANOSECONDS) private val formatters: List[DateTimeFormatter] = List( // Double `HH` formatter DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss.SSS Z"), DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss Z"), DateTimeFormat.forPattern("yyyyMMdd HH:mm Z"), DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss.SSS"), DateTimeFormat.forPattern("yyyyMMdd HH:mm:ss"), DateTimeFormat.forPattern("yyyyMMdd HH:mm"), DateTimeFormat.forPattern("yyyyMMdd"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS Z"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss Z"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm Z"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"), DateTimeFormat.forPattern("yyyy-MM-dd HH:mm"), DateTimeFormat.forPattern("yyyy-MM-dd"), // Single `H` formatter DateTimeFormat.forPattern("yyyyMMdd H:mm:ss.SSS"), DateTimeFormat.forPattern("yyyyMMdd H:mm:ss.SSS Z"), DateTimeFormat.forPattern("yyyy-MM-dd H:mm:ss.SSS"), DateTimeFormat.forPattern("yyyy-MM-dd H:mm:ss.SSS Z"), // ISO DateTime ISODateTimeFormat.dateTimeParser() ) }
Example 33
Source File: DateUtilsSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.common.util import org.joda.time.format.{DateTimeFormat, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike class DateUtilsSpec extends Matchers with AnyFunSpecLike { private val iso8601format = ISODateTimeFormat.dateTimeNoMillis() private val iso8601withMillis = ISODateTimeFormat.dateTime() import DateUtils._ val now = DateTime.now() val f = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss") val dt = f.parseDateTime("2017-01-10 23:13:26") describe("When using DateUtils") { it("converts to ISO format") { iso8601(now, false) shouldBe iso8601format.print(now) iso8601(now, true) shouldBe iso8601withMillis.print(now) } it("converts to UTC") { val dt = new DateTime(1234567000, DateTimeZone.UTC) dtFromUtcSeconds(1234567) shouldBe dt dtFromIso8601("1970-01-15T06:56:07Z") shouldBe dt } it("implicitly converts to the wrapper") { val dtw: DateTimeWrapper = dt dtw shouldBe DateTimeWrapper(dt) } it("sorts and compares dates") { val dtw: DateTimeWrapper = dt dtw.compare(now) should be < 0 dtw.compare(now, dt) should be > 0 Seq(now, dt).sorted shouldBe Seq(dt, now) } } }
Example 34
Source File: ColumnFunctionTest.scala From clickhouse-scala-client with GNU Lesser General Public License v3.0 | 5 votes |
package com.crobox.clickhouse.dsl.column import com.crobox.clickhouse.dsl.language.ClickhouseTokenizerModule import com.crobox.clickhouse.dsl._ import com.crobox.clickhouse.{ClickhouseClientSpec, TestSchemaClickhouseQuerySpec} import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import org.scalatest.concurrent.ScalaFutures import scala.concurrent.Future trait ColumnFunctionTest extends ClickhouseClientSpec with TestSchemaClickhouseQuerySpec with ScalaFutures with ClickhouseTokenizerModule{ implicit val clickhouseClient = clickClient protected def r(query: Column): String = { runSql(select(query)).futureValue.trim } protected def runSql(query: OperationalQuery): Future[String] = { clickhouseClient.query(toSql(query.internalQuery,None)) } implicit class DDTStringify(ddt: DateTime) { def printAsDate: String = DateTimeFormat.forPattern("yyyy-MM-dd").print(ddt) def printAsDateTime: String = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").print(ddt) def printAsYYYYMM: String = DateTimeFormat.forPattern("yyyyMM").print(ddt) def toStartOfQuarter: DateTime = { val remainder = (ddt.getMonthOfYear - 1) % 3 ddt.withDayOfMonth(1).minusMonths(remainder) } def toStartOfMin(min: Int): DateTime = { val remainder = ddt.getMinuteOfHour % min ddt .withSecondOfMinute(0) .withMillisOfSecond(0) .minusMinutes(remainder) } def toStartOfHr: DateTime = { ddt .withMinuteOfHour(0) .withSecondOfMinute(0) .withMillisOfSecond(0) } } }
Example 35
Source File: ExampleMahaService.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.api.jersey.example import java.io.File import java.util.UUID import com.yahoo.maha.core.ddl.OracleDDLGenerator import com.yahoo.maha.jdbc.{JdbcConnection, List, Seq} import com.yahoo.maha.service.{DefaultMahaService, MahaService, MahaServiceConfig} import com.zaxxer.hikari.{HikariConfig, HikariDataSource} import grizzled.slf4j.Logging import org.apache.commons.io.FileUtils import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat object ExampleMahaService extends Logging { val REGISTRY_NAME = "academic"; private var dataSource: Option[HikariDataSource] = None private var jdbcConnection: Option[JdbcConnection] = None val h2dbId = UUID.randomUUID().toString.replace("-","") val today: String = DateTimeFormat.forPattern("yyyy-MM-dd").print(DateTime.now()) val yesterday: String = DateTimeFormat.forPattern("yyyy-MM-dd").print(DateTime.now().minusDays(1)) def initJdbcToH2(): Unit = { val config = new HikariConfig() config.setJdbcUrl(s"jdbc:h2:mem:$h2dbId;MODE=Oracle;DB_CLOSE_DELAY=-1") config.setUsername("sa") config.setPassword("h2.test.database.password") config.setMaximumPoolSize(2) dataSource = Option(new HikariDataSource(config)) jdbcConnection = dataSource.map(new JdbcConnection(_)) assert(jdbcConnection.isDefined, "Failed to connect to h2 local server") } def getMahaService(scope: String = "main"): MahaService = { val jsonString = FileUtils.readFileToString(new File(s"src/$scope/resources/maha-service-config.json")) .replaceAll("h2dbId", s"$h2dbId") initJdbcToH2() val mahaServiceResult = MahaServiceConfig.fromJson(jsonString.getBytes("utf-8")) if (mahaServiceResult.isFailure) { mahaServiceResult.leftMap { res=> error(s"Failed to launch Example MahaService, MahaService Error list is: ${res.list.toList}") } } val mahaServiceConfig = mahaServiceResult.toOption.get val mahaService: MahaService = new DefaultMahaService(mahaServiceConfig) stageStudentData(mahaServiceConfig) mahaService } def stageStudentData(mahaServiceConfig: MahaServiceConfig) : Unit = { val ddlGenerator = new OracleDDLGenerator val erRegistryConfig = mahaServiceConfig.registry.get(ExampleMahaService.REGISTRY_NAME).get val erRegistry= erRegistryConfig.registry erRegistry.factMap.values.foreach { publicFact => publicFact.factList.foreach { fact=> val ddl = ddlGenerator.toDDL(fact) assert(jdbcConnection.get.executeUpdate(ddl).isSuccess) } } val insertSql = """ INSERT INTO student_grade_sheet (year, section_id, student_id, class_id, total_marks, date, comment) VALUES (?, ?, ?, ?, ?, ?, ?) """ val rows: List[Seq[Any]] = List( Seq(1, 100, 213, 200, 125, ExampleMahaService.today, "some comment") ) rows.foreach { row => val result = jdbcConnection.get.executeUpdate(insertSql, row) assert(result.isSuccess) } var count = 0 jdbcConnection.get.queryForObject("select * from student_grade_sheet") { rs => while (rs.next()) { count += 1 } } assert(rows.size == count) } }
Example 36
Source File: Metrics.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.client import java.util.concurrent.TimeUnit import akka.actor.Actor import cool.graph.cuid.Cuid import cool.graph.shared.externalServices.KinesisPublisher import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString} import scala.collection.mutable import scala.concurrent.duration.FiniteDuration import scala.util.control.NonFatal object FeatureMetric extends Enumeration { type FeatureMetric = Value val Subscriptions = Value("backend/api/subscriptions") val Filter = Value("backend/feature/filter") val NestedMutations = Value("backend/feature/nested-mutation") val ApiSimple = Value("backend/api/simple") val ApiRelay = Value("backend/api/relay") val ApiFiles = Value("backend/api/files") val ServersideSubscriptions = Value("backend/feature/sss") val RequestPipeline = Value("backend/feature/rp") // add this! val PermissionQuery = Value("backend/feature/permission-queries") // add this! val Authentication = Value("backend/feature/authentication") val Algolia = Value("backend/feature/algolia") // add this! val Auth0 = Value("backend/feature/integration-auth0") val Digits = Value("backend/feature/integration-digits") } case class ApiFeatureMetric(ip: String, date: DateTime, projectId: String, clientId: String, usedFeatures: List[String], // Should be false when we can't determine. This is the case for subscriptions. // Is always false for File api. isFromConsole: Boolean) class FeatureMetricActor( metricsPublisher: KinesisPublisher, interval: Int ) extends Actor { import context.dispatcher val metrics = mutable.Buffer.empty[ApiFeatureMetric] val FLUSH = "FLUSH" val tick = context.system.scheduler.schedule( initialDelay = FiniteDuration(interval, TimeUnit.SECONDS), interval = FiniteDuration(interval, TimeUnit.SECONDS), receiver = self, message = FLUSH ) override def postStop() = tick.cancel() def receive = { case metric: ApiFeatureMetric => metrics += metric case FLUSH => flushMetrics() } def flushMetrics() = { val byProject = metrics.groupBy(_.projectId) map { case (projectId, metrics) => JsObject( "requestCount" -> JsNumber(metrics.length), "projectId" -> JsString(projectId), "usedIps" -> JsArray(metrics.map(_.ip).distinct.take(10).toVector.map(JsString(_))), "features" -> JsArray(metrics.flatMap(_.usedFeatures).distinct.toVector.map(JsString(_))), "date" -> JsString(metrics.head.date.toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z").withZoneUTC())), "version" -> JsString("1"), "justConsoleRequests" -> JsBoolean(metrics.forall(_.isFromConsole)) ) } byProject.foreach { json => try { metricsPublisher.putRecord(json.toString, shardId = Cuid.createCuid()) } catch { case NonFatal(e) => println(s"Putting kinesis FeatureMetric failed: ${e.getMessage} ${e.toString}") } } metrics.clear() } }
Example 37
Source File: DateHelper.scala From dependency with MIT License | 5 votes |
package io.flow.dependency.www.lib import org.joda.time.{DateTime, DateTimeZone} import org.joda.time.format.DateTimeFormat object DateHelper { private[this] val EasternTime = DateTimeZone.forID("America/New_York") private[this] val DefaultLabel = "N/A" def shortDate( dateTime: DateTime ): String = shortDateOption(Some(dateTime)) def shortDateOption( dateTime: Option[DateTime], default: String = DefaultLabel ): String = { dateTime match { case None => default case Some(dt) => { DateTimeFormat.shortDate.withZone(EasternTime).print(dt) } } } def longDateTime( dateTime: DateTime ): String = longDateTimeOption(Some(dateTime)) def longDateTimeOption( dateTime: Option[DateTime], default: String = DefaultLabel ): String = { dateTime match { case None => default case Some(dt) => { DateTimeFormat.longDateTime.withZone(EasternTime).print(dt) } } } }
Example 38
Source File: RawModel.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.models.benchmark.generator.models import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import scala.annotation.tailrec import scala.io.Source import scala.util.Random case class RawModel (order_id: String, timestamp: String, client_id: Integer, latitude: Double, longitude: Double, payment_method: String, credit_card: String, shopping_center: String, employee: Integer) {} object RawModel { val Range_client_id = (1, 300) val Range_payment_method = Source.fromInputStream( this.getClass.getClassLoader.getResourceAsStream("payment-methods.txt")).getLines().toSeq val Range_shopping_center = Source.fromInputStream( this.getClass.getClassLoader.getResourceAsStream("shopping-centers.txt")).getLines().toSeq val Range_employee = (1, 300) val Range_quantity = (1, 30) val Range_timestap = (0, 60) val Range_creditCard = (0, 9) val R = Random val DigitsCreditCard = 16 val Range_family_product: Map[String, Map[String,Float]] = Source.fromInputStream( this.getClass.getClassLoader.getResourceAsStream("family-products.csv")).getLines().map(x => { val splitted = x.split(",") (splitted(0), Map(splitted(1) -> splitted(2).toFloat)) }).toMap def generateShoppingCenter(): String = { Range_shopping_center(generateRandomInt(0, Range_shopping_center.length - 1)) } def generatePaymentMethod(): String = { Range_payment_method(generateRandomInt(0, Range_payment_method.length - 1)) } def generateTimestamp(): String = { val datetime = new DateTime().minusDays(generateRandomInt(Range_timestap._1, Range_timestap._2)) DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").print(datetime) } def generateRandomInt(min: Int, max: Int): Int = { R.nextInt((max -min) + 1) + min } @tailrec def generateCreditCard(current: String): String = { if(current.length != DigitsCreditCard) generateCreditCard(current + generateRandomInt(Range_creditCard._1, Range_creditCard._2)) else current } } trait RawModelCommonData { val geolocations = initGeolocations() val clientIdCreditCard: Map[Int, String] = initClientIdCreditCard((1 to RawModel.Range_client_id._2).toSeq, Map()) val clientIdGeo: Map[Int, (Double, Double)] = initClientIdGeo(clientIdCreditCard, geolocations) def initGeolocations() : Seq[String] = { Source.fromInputStream( this.getClass.getClassLoader.getResourceAsStream("geolocations.csv")).getLines().toSeq } def initClientIdCreditCard(idClients: Seq[Int], clientIdCreditCard: Map[Int, String]): Map[Int, String] = { if(idClients.size == 0) { clientIdCreditCard } else { val newIdClients = idClients.init val newClientIdCreditCard = clientIdCreditCard + (idClients.last -> RawModel.generateCreditCard("")) initClientIdCreditCard(newIdClients, newClientIdCreditCard) } } def initClientIdGeo(clientIdCreditCard: Map[Int, String], geolocations: Seq[String]) :Map[Int, (Double, Double)] = { clientIdCreditCard.map(x => { val index = RawModel.generateRandomInt(0, geolocations.size - 1) x._1 -> ((geolocations(index)).split(":")(0).toDouble, (geolocations(index)).split(":")(1).toDouble) }) } }
Example 39
Source File: Generator.scala From donut with MIT License | 5 votes |
package report.donut import org.apache.commons.lang3.StringUtils import org.joda.time.DateTime import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} import report.donut.gherkin.model._ import report.donut.log.Log import report.donut.performance.PerformanceSupport import report.donut.template.TemplateEngine import report.donut.transformers.cucumber.{CucumberTransformer, Feature => CucumberFeature} import scala.collection.mutable.ListBuffer import scala.util.Try object Generator extends Log with PerformanceSupport { val formatter: DateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd-HHmm") //this wrapper is currently used to help the java maven plugin def apply(resultSources: String, outputPath: String = "donut", filePrefix: String = "", dateTime: String, template: String = "default", countSkippedAsFailure: Boolean = false, countPendingAsFailure: Boolean = false, countUndefinedAsFailure: Boolean = false, countMissingAsFailure: Boolean = false, projectName: String, projectVersion: String, customAttributes: scala.collection.mutable.Map[String, String]): ReportConsole = { createReport(resultSources, outputPath, filePrefix, dateTime, template, countSkippedAsFailure, countPendingAsFailure, countUndefinedAsFailure, countMissingAsFailure, projectName, projectVersion, customAttributes.toMap) match { case Right(report) => ReportConsole(report) case Left(error) => throw DonutException(s"An error occurred while generating donut report. $error") } } private[donut] def createReport(resultSources: String, outputPath: String = "donut", filePrefix: String = "", datetime: String = formatter.print(DateTime.now), template: String = "default", countSkippedAsFailure: Boolean = false, countPendingAsFailure: Boolean = false, countUndefinedAsFailure: Boolean = false, countMissingAsFailure: Boolean = false, projectName: String, projectVersion: String, customAttributes: Map[String, String] = Map()): Either[String, Report] = { //Prepare objects val statusConf = StatusConfiguration(countSkippedAsFailure, countPendingAsFailure, countUndefinedAsFailure, countMissingAsFailure) val projectMetadata = ProjectMetadata(projectName, projectVersion, customAttributes) val reportStartedTimestamp = Try(formatter.parseDateTime(datetime)).getOrElse(DateTime.now) for { resultSourceList <- if (!StringUtils.isBlank(resultSources)) Right(resultSources.split(",").map(_.trim).toList).right else Left("Unable to extract the paths to the result sources. Please use this format:- cucumber:/my/path/cucumber-reports,cucumber:/my/other/path/adapted-reports").right features <- timed("step1", "Loaded result sources") { loadResultSources(resultSourceList, statusConf).right } report <- timed("step2", "Produced report") { Right(Report(features, reportStartedTimestamp, projectMetadata)).right } _ <- TemplateEngine(report, s"/templates/$template/index.html").renderToHTML(outputPath, filePrefix).right } yield report } def loadResultSources(resultSourceList: List[String], statusConf: StatusConfiguration): Either[String, List[Feature]] = { var features = new ListBuffer[CucumberFeature] for (resultSource <- resultSourceList) { val result = ResultLoader(resultSource).load if (result.isLeft) return Left(result.left.get) features ++= result.right.get } val donutFeatures = CucumberTransformer.transform(features.toList, statusConf).right.get Try(donutFeatures.toList).toEither(_.getMessage) } } case class DonutException(mgs: String) extends Exception
Example 40
Source File: ExecutionDashboard.scala From donut with MIT License | 5 votes |
package report.donut.gherkin.model import org.joda.time.format.DateTimeFormat case class ExecutionDashboard(totalFeatures: Int, featureMetrics: Metrics, allTestsMetrics: Metrics, scenarioMetrics: Metrics, unitTestMetrics: Metrics, stepMetrics: Metrics, totalTags: Int, featureFailRate: Double, featurePassRate: Double, scenariosFailRate: Double, scenariosPassRate: Double, unitTestsFailRate: Double, unitTestsPassRate: Double, executionDuration: String, executionDateTime: String) object ExecutionDashboard { def apply(implicit features: List[Feature], executionData: ExecutionData): ExecutionDashboard = { val featuresMetrics = executionData.allFeatureMetrics val allTestsMetrics = executionData.allTestsMetrics val scenarioMetrics = executionData.allScenarioMetrics val unitTestMetrics = executionData.allUnitTestMetrics ExecutionDashboard( featuresMetrics.total, featuresMetrics, allTestsMetrics, scenarioMetrics, unitTestMetrics, executionData.allStepMetrics, executionData.allTagSize, failRate(featuresMetrics), passRate(featuresMetrics), failRate(scenarioMetrics), passRate(scenarioMetrics), failRate(unitTestMetrics), passRate(unitTestMetrics), Duration.calculateTotalDurationStr(features.map(f => f.duration.duration)), DateTimeFormat.forPattern("dd/MM/yyyy HH:mm").print(executionData.timestamp)) } def passRate(metrics: Metrics) = rate(metrics.passed.toDouble, metrics.total.toDouble) def failRate(metrics: Metrics) = rate(metrics.failed.toDouble, metrics.total.toDouble) def rate(metric: Double, totalMetrics: Double) = { if (totalMetrics > 0) BigDecimal((metric / totalMetrics) * 100) .setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble else 0d } }
Example 41
Source File: S3UploadController.scala From scuruto with MIT License | 5 votes |
package controller.upload import javax.crypto.Mac import javax.crypto.spec.SecretKeySpec import _root_.controller.UploadController import lib._ import model.Upload import model.typebinder.UserId import org.apache.commons.codec.binary.Base64 import org.joda.time.format.DateTimeFormat import org.joda.time._ import skinny._ object S3UploadController extends UploadController { override def destination: UploadDestination = UploadDestination("s3") // -------------- // sign val AWS_ACCESS_KEY = "AWS_ACCESS_KEY" val AWS_SECRET_KEY = "AWS_SECRET_KEY" def sign: String = { val userId = policiesParams.getAs[UserId]("user_id").get val filename = params("filename") val ext = filename.split('.').last val seed = userId.value + "_" + DateTime.now().toString + "_" + filename val baseDir = SkinnyConfig.stringConfigValue("upload.s3.baseDir").getOrElse("") val path = baseDir + new Sha1Digest(seed).digestString + "." + ext val bucket = SkinnyConfig.stringConfigValue("upload.s3.bucket").getOrElse(throw new IllegalArgumentException) val policyDocument = toJSONString(Map( "expiration" -> new DateTime(DateTimeZone.UTC).plusMinutes(1).toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z")), "conditions" -> Array( Map("bucket" -> bucket), Map("key" -> path), Map("Content-Type" -> policiesParams.getAs("content_type")), Array("content-length-range", policiesParams.getAs("size"), policiesParams.getAs("size")) ) ), underscoreKeys = false) val policy = Base64.encodeBase64String(policyDocument.getBytes("UTF-8")) val hmac = Mac.getInstance("HmacSHA1") hmac.init(new SecretKeySpec(sys.env(AWS_SECRET_KEY).getBytes("UTF-8"), "HmacSHA1")) val signature = Base64.encodeBase64String(hmac.doFinal(policy.getBytes("UTF-8"))) // add to uploads table Upload.createWithAttributes( 'user_id -> userId.value, 'original_filename -> filename, 'filename -> path ) // response toJSONString(Map( "url" -> s"https://$bucket.s3.amazonaws.com/", "form" -> Map( "AWSAccessKeyId" -> sys.env(AWS_ACCESS_KEY), "signature" -> signature, "policy" -> policy, "key" -> path, "Content-Type" -> policiesParams.getAs("content_type") ) ), underscoreKeys = false) } // -------------- override def upload: Any = throw new UnsupportedOperationException // -------------- override def uploadedFileBaseUrl: UploadedBaseURL = UploadedBaseURL("s3") }
Example 42
Source File: Rfc3339Util.scala From play-swagger with MIT License | 5 votes |
package de.zalando.play.controllers import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, LocalDate} object Rfc3339Util { private val fullDate = DateTimeFormat.forPattern("yyyy-MM-dd") private val shortDateTime = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") private val shortDTWithTicks = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'") private val fullDTWithTicks = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'") private val dateTime = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSZ") def parseDateTime(datestring: String): DateTime = if(datestring.endsWith("Z") || datestring.endsWith("z")) parseFull(datestring) else parseParts(datestring) def parseDate(datestring: String): LocalDate = fullDate.parseDateTime(datestring).toLocalDate def writeDate(date: LocalDate): String = fullDate.print(date) def writeDateTime(date: DateTime): String = dateTime.print(date) private def parseParts(datestring: String): DateTime = { //step one, split off the timezone. val sepChar = if (datestring.indexOf('+')>0) '+' else '-' val firstpart = datestring.substring(0, datestring.lastIndexOf(sepChar.toInt)) val secondpart = datestring.substring(datestring.lastIndexOf(sepChar.toInt)) //step two, remove the colon from the timezone offset val thirdpart = secondpart.substring(0, secondpart.indexOf(':')) + secondpart.substring(secondpart.indexOf(':') + 1) val dstring = firstpart + thirdpart try { shortDateTime.parseDateTime(dstring) } catch { case pe: IllegalArgumentException => dateTime.parseDateTime(dstring) } } private def parseFull(datestring: String): DateTime = { try { shortDTWithTicks.parseDateTime(datestring) } catch { case p: IllegalArgumentException => fullDTWithTicks.parseDateTime(datestring) } } }
Example 43
Source File: Dates.scala From play-ui with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.play.views.formatting import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTime, DateTimeZone, LocalDate} import play.api.i18n.Lang object Dates { private val MonthNamesInWelsh = Map( 1 -> "Ionawr", 2 -> "Chwefror", 3 -> "Mawrth", 4 -> "Ebrill", 5 -> "Mai", 6 -> "Mehefin", 7 -> "Gorffennaf", 8 -> "Awst", 9 -> "Medi", 10 -> "Hydref", 11 -> "Tachwedd", 12 -> "Rhagfyr") private val WeekDaysInWelsh = Map( 1 -> "Dydd Llun", 2 -> "Dydd Mawrth", 3 -> "Dydd Mercher", 4 -> "Dydd Iau", 5 -> "Dydd Gwener", 6 -> "Dydd Sadwrn", 7 -> "Dydd Sul") private[formatting] val dateFormat = DateTimeFormat.forPattern("d MMMM y").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val dateFormatAbbrMonth = DateTimeFormat.forPattern("d MMM y").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val shortDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val easyReadingDateFormat = DateTimeFormat.forPattern("EEEE d MMMM yyyy").withZone(DateTimeZone.forID("Europe/London")) private[formatting] val easyReadingTimestampFormat = DateTimeFormat.forPattern("h:mmaa").withZone(DateTimeZone.forID("Europe/London")) def formatDate(date: LocalDate) = dateFormat.print(date) def formatDateAbbrMonth(date: LocalDate) = dateFormatAbbrMonth.print(date) def formatDate(date: Option[LocalDate], default: String) = date match { case Some(d) => dateFormat.print(d) case None => default } def formatDateTime(date: DateTime) = dateFormat.print(date) def formatEasyReadingTimestamp(date: Option[DateTime], default: String)(implicit lang: Lang) = { val englishEasyDate: DateTime => String = d => s"${easyReadingTimestampFormat.print(d).toLowerCase}, ${easyReadingDateFormat.print(d)}" val welshEasyDate: DateTime => String = d => s"${easyReadingTimestampFormat.print(d).toLowerCase}, ${WeekDaysInWelsh(d.getDayOfWeek)} ${d.getDayOfMonth} ${MonthNamesInWelsh( d.getMonthOfYear)} ${d.getYear}" val formatter = lang.code match { case "cy" => welshEasyDate case _ => englishEasyDate } date.fold(default)(formatter) } def shortDate(date: LocalDate) = shortDateFormat.print(date) def formatDays(days: Int) = s"$days day${if (days > 1) "s" else ""}" }
Example 44
Source File: TimePartitioningWriter.scala From etl-light with MIT License | 5 votes |
package yamrcraft.etlite.writers import java.io.IOException import com.typesafe.config.Config import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.joda.time.format.DateTimeFormat import org.slf4j.LoggerFactory import yamrcraft.etlite.EtlException import yamrcraft.etlite.transformers.Message import yamrcraft.etlite.utils.ConfigConversions._ import scala.collection.mutable class TimePartitioningWriter[T](config: Config, jobId: Long, partitionId: Int, writerFactory: (String, String) => Writer[T]) extends Writer[Message[T]] { val logger = LoggerFactory.getLogger(this.getClass) // config settings val workingFolder: String = config.getString("working-folder") val outputFolder: String = config.getString("output-folder") val partitionPattern: String = config.getString("partition.pattern") val folderMapping: Map[String, String] = config.getConfig("record-name-to-folder-mapping").asMap val fs = FileSystem.get(new Configuration()) val partitionFormat = DateTimeFormat.forPattern(partitionPattern) val partitionsWriters = mutable.Map[String, Writer[T]]() @throws(classOf[EtlException]) @throws(classOf[IOException]) override def write(event: Message[T]): Unit = { val timestamp = event.msgTimestamp val baseFolder = folderMapping.getOrElse(event.msgType, event.msgType) val writer = writerFor(baseFolder, timestamp) writer.write(event.msg) } override def commit() = { // close all writers partitionsWriters foreach { case (file, writer) => writer.commit() } } @throws(classOf[EtlException]) private def writerFor(baseFolder: String, timestamp: Long): Writer[T] = { val relativeFileName = new Path(s"$baseFolder/${partitionFormat.print(timestamp)}/events_${baseFolder}_job${jobId}_part$partitionId") val tempFile = new Path(workingFolder, relativeFileName) val outputFile = new Path(outputFolder, relativeFileName) partitionsWriters.getOrElseUpdate(tempFile.toString, writerFactory(tempFile.toString, outputFile.toString)) } }
Example 45
Source File: package.scala From pureconfig with Mozilla Public License 2.0 | 5 votes |
package pureconfig.module import org.joda.time.{ DateTimeZone, Duration, Instant, Interval } import org.joda.time.format.{ DateTimeFormat, DateTimeFormatter } import pureconfig.{ ConfigConvert, ConfigReader } import pureconfig.ConfigConvert.{ catchReadError, viaNonEmptyString } package object joda { implicit def instantConfigConvert: ConfigConvert[Instant] = ConfigConvert[Long].xmap(new Instant(_), _.getMillis) implicit def intervalConfigConvert: ConfigConvert[Interval] = viaNonEmptyString[Interval]( catchReadError(Interval.parseWithOffset), _.toString) implicit def durationConfigConvert: ConfigConvert[Duration] = viaNonEmptyString[Duration]( catchReadError(Duration.parse), _.toString) implicit def dateTimeFormatterConfigConvert: ConfigReader[DateTimeFormatter] = ConfigReader.fromNonEmptyString[DateTimeFormatter]( catchReadError(DateTimeFormat.forPattern)) implicit def dateTimeZoneConfigConvert: ConfigConvert[DateTimeZone] = viaNonEmptyString[DateTimeZone]( catchReadError(DateTimeZone.forID), _.getID) }
Example 46
Source File: ConfigurableSuite.scala From pureconfig with Mozilla Public License 2.0 | 5 votes |
package pureconfig.module.joda.configurable import org.joda.time._ import org.joda.time.format.{ DateTimeFormat, ISOPeriodFormat, PeriodFormatter } import pureconfig.BaseSuite import pureconfig.module.joda.arbitrary._ class ConfigurableSuite extends BaseSuite { val isoFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZZZ") implicit val dateTimeInstance = dateTimeConfigConvert(isoFormatter) checkArbitrary[DateTime] val timeFormatter = DateTimeFormat.forPattern("HH:mm:ss.SSS") implicit val localTimeInstance = localTimeConfigConvert(timeFormatter) checkArbitrary[LocalTime] val dateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd") implicit val localDateInstance = localDateConfigConvert(dateFormatter) checkArbitrary[LocalDate] val dateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS") implicit val localDateTimeInstance = localDateTimeConfigConvert(dateTimeFormatter) checkArbitrary[LocalDateTime] val monthDayFormat = DateTimeFormat.forPattern("MM-dd") implicit val monthDayInstance = monthDayConfigConvert(monthDayFormat) checkArbitrary[MonthDay] val yearMonthFormat = DateTimeFormat.forPattern("yyyy-MM") implicit val yearMonthInstance = yearMonthConfigConvert(yearMonthFormat) checkArbitrary[YearMonth] val periodFormatter: PeriodFormatter = ISOPeriodFormat.standard() implicit val periodInstance = periodConfigConvert(periodFormatter) checkArbitrary[Period] }