java.time.LocalDate Scala Examples
The following examples show how to use java.time.LocalDate.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DamlDates.scala From daml with Apache License 2.0 | 7 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import java.time.{LocalDate, ZoneOffset} import com.daml.api.util.TimestampConversion import com.daml.ledger.client.binding.{Primitive => P} import scalaz.std.stream object DamlDates { val Min: LocalDate = TimestampConversion.MIN.atZone(ZoneOffset.UTC).toLocalDate val Max: LocalDate = TimestampConversion.MAX.atZone(ZoneOffset.UTC).toLocalDate val RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate: (LocalDate, LocalDate) = (LocalDate.parse("1582-10-05"), LocalDate.parse("1582-10-14")) def localDatesWithoutInjectiveFunctionToSqlDate: Stream[LocalDate] = stream .unfold(RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate._1) { a: LocalDate => if (!a.isAfter(RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate._2)) Some((a, a.plusDays(1))) else None } def damlDatesWithoutInjectiveFunctionToSqlDate: Stream[P.Date] = localDatesWithoutInjectiveFunctionToSqlDate.map(pDate) private def pDate(d: LocalDate): P.Date = P.Date .fromLocalDate(d) .getOrElse(sys.error(s"expected `P.Date` friendly `LocalDate`, but got: $d")) }
Example 2
Source File: FundamentalsParser.scala From YahooFinanceScala with MIT License | 6 votes |
package openquant.yahoofinance.impl import java.time.format.DateTimeFormatter import java.time.{LocalDate, ZoneId, ZonedDateTime} import com.github.tototoshi.csv._ import openquant.yahoofinance.Fundamentals import scala.io.Source object FundamentalsParser extends Function1[String, Vector[Fundamentals]] { def apply(content: String): Vector[Fundamentals] = { val csvReader = CSVReader.open(Source.fromString(content)) val fundamentals: Vector[Fundamentals] = csvReader.toStream.map { fields ⇒ parseCSVLine(fields.toVector) }.toVector fundamentals } private def parseCSVLine(field: Vector[String]): Fundamentals = { require(field.length >= 2, "number of fields") val name = field(1) if (name == "N/A") Fundamentals( looksValid = false, symbol = field(0), name = name ) else Fundamentals( looksValid = true, symbol = field(0), name = name ) } }
Example 3
Source File: ChronoLocalDateTest.scala From scala-js-java-time with BSD 3-Clause "New" or "Revised" License | 6 votes |
package org.scalajs.testsuite.javalib.time.chrono import java.time.{DateTimeException, LocalTime, LocalDate} import java.time.chrono.ChronoLocalDate import org.junit.Test import org.junit.Assert._ import org.scalajs.testsuite.utils.AssertThrows._ class ChronoLocalDateTest { import ChronoLocalDate._ @Test def test_timeLineOrder(): Unit = { val ord = timeLineOrder val ds = Seq(LocalDate.MIN, LocalDate.of(2011, 2, 28), LocalDate.MAX) for { d1 <- ds d2 <- ds } { assertEquals(math.signum(d1.compareTo(d2)), math.signum(ord.compare(d1, d2))) } } @Test def test_from(): Unit = { for (d <- Seq(LocalDate.MIN, LocalDate.of(2011, 2, 28), LocalDate.MAX)) assertEquals(d, from(d)) for (t <- Seq(LocalTime.MIN, LocalTime.NOON, LocalTime.MAX)) expectThrows(classOf[DateTimeException], from(t)) } }
Example 4
Source File: ChronoPeriodTest.scala From scala-js-java-time with BSD 3-Clause "New" or "Revised" License | 6 votes |
package org.scalajs.testsuite.javalib.time.chrono import java.time.LocalDate import java.time.chrono.ChronoPeriod import org.junit.Test import org.junit.Assert.assertEquals class ChronoPeriodTest { @Test def test_between(): Unit = { val ds = Seq(LocalDate.MIN, LocalDate.of(2011, 2, 28), LocalDate.MAX) for { d1 <- ds d2 <- ds } { assertEquals(d1.until(d2), ChronoPeriod.between(d1, d2)) } } }
Example 5
Source File: ApiValueImplicits.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.value.json import java.time.{Instant, LocalDate} import java.time.format.{DateTimeFormatter, DateTimeFormatterBuilder} import com.daml.lf.data.Time import com.daml.lf.value.{Value => V} object ApiValueImplicits { implicit final class `ApiTimestamp additions`(private val it: V.ValueTimestamp) extends AnyVal { import it._ def toInstant: Instant = value.toInstant def toIso8601: String = DateTimeFormatter.ISO_INSTANT.format(toInstant) } implicit final class `ApiDate additions`(private val it: V.ValueDate) extends AnyVal { import it._ def toLocalDate: LocalDate = LocalDate.ofEpochDay((value.days: Int).toLong) def toInstant: Instant = Instant.from(toLocalDate) def toIso8601: String = DateTimeFormatter.ISO_LOCAL_DATE.format(toLocalDate) } // Timestamp has microsecond resolution private val formatter: DateTimeFormatter = new DateTimeFormatterBuilder().appendInstant(6).toFormatter() implicit final class `ApiTimestamp.type additions`(private val it: V.ValueTimestamp.type) extends AnyVal { def fromIso8601(t: String): V.ValueTimestamp = fromInstant(Instant.parse(t)) def fromInstant(t: Instant): V.ValueTimestamp = V.ValueTimestamp(Time.Timestamp.assertFromInstant(t)) def fromMillis(t: Long): V.ValueTimestamp = V.ValueTimestamp(Time.Timestamp.assertFromLong(micros = t * 1000L)) } implicit final class `ApiDate.type additions`(private val it: V.ValueDate.type) extends AnyVal { def fromIso8601(t: String): V.ValueDate = fromLocalDate(LocalDate.parse(t, DateTimeFormatter.ISO_LOCAL_DATE)) def fromLocalDate(t: LocalDate): V.ValueDate = V.ValueDate(Time.Date.assertFromDaysSinceEpoch(t.toEpochDay.toInt)) } }
Example 6
Source File: DamlDateGen.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import java.time.LocalDate import com.daml.ledger.client.binding.encoding.DamlDates._ import com.daml.ledger.client.binding.{Primitive => P} import org.scalacheck.Gen object DamlDateGen { private def genSqlCompatibleLocalDate: Gen[LocalDate] = { // skip the range val upTo: Long = RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate._1.toEpochDay - 1 val upFrom: Long = RangeOfLocalDatesWithoutInjectiveFunctionToSqlDate._2.toEpochDay + 1 Gen .oneOf( Gen.choose(Min.toEpochDay, upTo), Gen.choose(upFrom, Max.toEpochDay) ) .map(LocalDate.ofEpochDay) } def genDamlDate: Gen[P.Date] = P.Date.subst(genSqlCompatibleLocalDate) }
Example 7
Source File: PrimitiveSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import java.time.{Instant, LocalDate} import org.scalacheck.Gen import org.scalatest.{WordSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks import shapeless.test.illTyped import com.daml.ledger.client.binding.{Primitive => P} class PrimitiveSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import PrimitiveSpec._ "Primitive types" when { "defined concretely" should { "have nice companion aliases" in { P.List: collection.generic.TraversableFactory[P.List] } } "defined abstractly" should { "carry their phantoms" in { def check[A, B]() = { illTyped( "implicitly[P.ContractId[A] =:= P.ContractId[B]]", "Cannot prove that .*ContractId\\[A\\] =:= .*ContractId\\[B\\].") illTyped( "implicitly[P.TemplateId[A] =:= P.TemplateId[B]]", "Cannot prove that .*TemplateId\\[A\\] =:= .*TemplateId\\[B\\].") illTyped( "implicitly[P.Update[A] =:= P.Update[B]]", "Cannot prove that .*Update\\[A\\] =:= .*Update\\[B\\].") } check[Unit, Unit]() } } } "Date.fromLocalDate" should { import ValueSpec.dateArb "pass through existing dates" in forAll { d: P.Date => P.Date.fromLocalDate(d: LocalDate) shouldBe Some(d) } "be idempotent" in forAll(anyLocalDateGen) { d => val od2 = P.Date.fromLocalDate(d) od2 flatMap (P.Date.fromLocalDate(_: LocalDate)) shouldBe od2 } "prove MIN, MAX are valid" in { import P.Date.{MIN, MAX} P.Date.fromLocalDate(MIN: LocalDate) shouldBe Some(MIN) P.Date.fromLocalDate(MAX: LocalDate) shouldBe Some(MAX) } } "Timestamp.discardNanos" should { import ValueSpec.timestampArb "pass through existing times" in forAll { t: P.Timestamp => P.Timestamp.discardNanos(t: Instant) shouldBe Some(t) } "be idempotent" in forAll(anyInstantGen) { i => val oi2 = P.Timestamp.discardNanos(i) oi2 flatMap (P.Timestamp.discardNanos(_: Instant)) shouldBe oi2 } "prove MIN, MAX are valid" in { import P.Timestamp.{MIN, MAX} P.Timestamp.discardNanos(MIN: Instant) shouldBe Some(MIN) P.Timestamp.discardNanos(MAX: Instant) shouldBe Some(MAX) } "preapprove values for TimestampConversion.instantToMicros" in forAll(anyInstantGen) { i => P.Timestamp.discardNanos(i) foreach { t => noException should be thrownBy com.daml.api.util.TimestampConversion .instantToMicros(t) } } } } object PrimitiveSpec { private val anyLocalDateGen: Gen[LocalDate] = Gen.choose(LocalDate.MIN.toEpochDay, LocalDate.MAX.toEpochDay) map LocalDate.ofEpochDay private val anyInstantGen: Gen[Instant] = Gen .zip( Gen.choose(Instant.MIN.getEpochSecond, Instant.MAX.getEpochSecond), Gen.choose(0L, 999999999)) .map { case (s, n) => Instant.ofEpochSecond(s, n) } }
Example 8
Source File: ApiValueSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.navigator.model import java.time.{Instant, LocalDate} import org.scalatest.{Matchers, WordSpec} import com.daml.lf.data.{Time => LfTime} import com.daml.lf.value.Value.{ValueDate, ValueTimestamp} import com.daml.lf.value.json.ApiValueImplicits._ class ApiValueSpec extends WordSpec with Matchers { "Api values" when { "converting Date from ISO and back" should { val isoDate = "2019-01-28" val result = ValueDate.fromIso8601(isoDate).toIso8601 "not change the value" in { result shouldBe isoDate } } "converting Date to ISO and back" should { val date = ValueDate(LfTime.Date assertFromDaysSinceEpoch 10000) val result = ValueDate.fromIso8601(date.toIso8601) "not change the value" in { result shouldBe date } } "converting Date from LocalDate and back" should { val localDate = LocalDate.of(2019, 1, 28) val result = ValueDate.fromLocalDate(localDate).toLocalDate "not change the value" in { result shouldBe localDate } } "converting Timestamp from ISO and back" should { // Timestamp has microsecond resolution val isoDateTime = "2019-01-28T12:44:33.123456Z" val result = ValueTimestamp.fromIso8601(isoDateTime).toIso8601 "not change the value" in { result shouldBe isoDateTime } } "converting Timestamp to ISO and back" should { val timestamp = ValueTimestamp(LfTime.Timestamp assertFromLong 123456789123456L) val result = ValueTimestamp.fromIso8601(timestamp.toIso8601) "not change the value" in { result shouldBe timestamp } } "converting Timestamp from Instant and back" should { // Timestamp has microsecond resolution val instant = Instant.ofEpochSecond(86400L * 365L * 30L, 123456L * 1000L) val result = ValueTimestamp.fromInstant(instant).toInstant "not change the value" in { result shouldBe instant } } } }
Example 9
Source File: QuoteParser.scala From YahooFinanceScala with MIT License | 5 votes |
package openquant.yahoofinance.impl import java.time.format.DateTimeFormatter import java.time.{LocalDate, ZoneId, ZonedDateTime} import com.github.tototoshi.csv._ import openquant.yahoofinance.Quote import scala.io.Source class QuoteParser { private[this] val df = DateTimeFormatter.ofPattern("yyyy-MM-dd") private[this] val zoneId = ZoneId.of("America/New_York") def parse(content: String): Vector[Quote] = { val csvReader = CSVReader.open(Source.fromString(content)) val quotes: Vector[Quote] = csvReader.toStream.drop(1).map { fields ⇒ parseCSVLine(fields.toVector) }.toVector quotes } private def parseCSVLine(field: Vector[String]): Quote = { require(field.length >= 7) Quote( parseDate(field(0)), BigDecimal(field(1)), BigDecimal(field(4)), BigDecimal(field(2)), BigDecimal(field(3)), BigDecimal(field(5)), BigDecimal(field(6)) ) } private def parseDate(date: String): ZonedDateTime = { LocalDate.parse(date, df).atStartOfDay().atZone(zoneId) } } object QuoteParser { def apply() = new QuoteParser }
Example 10
Source File: DateInterval.scala From chronoscala with MIT License | 5 votes |
package jp.ne.opt.chronoscala import java.time.{LocalDate, Period} import jp.ne.opt.chronoscala.Imports._ case class DateInterval(startDate: LocalDate, endDate: LocalDate, step: Period) extends Seq[LocalDate] { def apply(idx: Int): LocalDate = { if (0 <= idx && idx < length) { iterator.drop(idx).next } else { throw new IndexOutOfBoundsException(idx.toString) } } def iterator: Iterator[LocalDate] = Iterator.iterate(startDate)(_ + step).takeWhile(_ <= endDate) def length: Int = iterator.length def by(step: Period): DateInterval = this.copy(step = step) }
Example 11
Source File: WriteAndReadGenericApp.scala From parquet4s with MIT License | 5 votes |
package com.github.mjakubowski84.parquet4s.core import java.time.{LocalDate, ZoneOffset} import java.util.TimeZone import com.github.mjakubowski84.parquet4s.{ParquetReader, ParquetWriter, RowParquetRecord, ValueCodecConfiguration} import com.google.common.io.Files import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.{BINARY, INT32, INT64} import org.apache.parquet.schema.Type.Repetition.{OPTIONAL, REQUIRED} import org.apache.parquet.schema.{MessageType, OriginalType, Types} object WriteAndReadGenericApp extends App { val ID = "id" val Name = "name" val Birthday = "birthday" val Schema = "user_schema" val path = Files.createTempDir().getAbsolutePath val vcc = ValueCodecConfiguration(TimeZone.getTimeZone(ZoneOffset.UTC)) val users = List( (1L, "Alice", LocalDate.of(2000, 1, 1)), (2L, "Bob", LocalDate.of(1980, 2, 28)), (3L, "Cecilia", LocalDate.of(1977, 3, 15)) ).map { case (id, name, birthday) => RowParquetRecord.empty .add(ID, id, vcc) .add(Name, name, vcc) .add(Birthday, birthday, vcc) } // write implicit val schema: MessageType = Types.buildMessage() .addField(Types.primitive(INT64, REQUIRED).as(OriginalType.INT_64).named(ID)) .addField(Types.primitive(BINARY, OPTIONAL).as(OriginalType.UTF8).named(Name)) .addField(Types.primitive(INT32, OPTIONAL).as(OriginalType.DATE).named(Birthday)) .named(Schema) ParquetWriter.writeAndClose(s"$path/users.parquet", users) //read val readData = ParquetReader.read[RowParquetRecord](path) try { readData.foreach { record => val id = record.get[Long](ID, vcc) val name = record.get[String](Name, vcc) val birthday = record.get[LocalDate](Birthday, vcc) println(s"User[$ID=$id,$Name=$name,$Birthday=$birthday]") } } finally readData.close() }
Example 12
Source File: _05_EmployeeWithCompanionObject.scala From LearningScala with Apache License 2.0 | 5 votes |
package _000_intro import java.time.LocalDate class EmployeeWithCompanionObject private(val firstName: String, val lastName: String, val title: String, val hireDate: LocalDate) { override def toString = s"EmployeeWithCompanionObject($firstName, $lastName, $title, $hireDate)" def +(e: EmployeeWithCompanionObject) = true } object EmployeeWithCompanionObject { // or use apply :) def create(firstName: String, lastName: String, title: String) = new EmployeeWithCompanionObject(firstName, lastName, title, LocalDate.now) def create(firstName: String, lastName: String, title: String, localDate: LocalDate) = new EmployeeWithCompanionObject(firstName, lastName, title, localDate) } object Runner extends App { val e1 = EmployeeWithCompanionObject.create("Moh", "Noor", "Dev") val e2 = EmployeeWithCompanionObject.create("Jack", "Well", "Dev", LocalDate.of(2005, 5, 22)) println(e1) println(e2) println(e1 + e2) }
Example 13
Source File: MomentDateTime.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.js import java.time.temporal.ChronoField import java.time.{DayOfWeek, Duration, LocalDate, LocalTime} import dtc._ import moment.{Date, Moment, Units} import scala.scalajs.js.Array def underlyingMoment: Date = copy def dayOfWeek: DayOfWeek = DayOfWeek.of(dayOfWeekJSToJVM(underlying.day())) def dayOfMonth: Int = underlying.date() def month: Int = underlying.month() + 1 def year: Int = underlying.year() def hour: Int = underlying.hour() def minute: Int = underlying.minute() def second: Int = underlying.second() def millisecond: Int = underlying.millisecond() def withYear(year: Int): T = updated(_.year(year.toDouble)) def withMonth(month: Int): T = updated(_.month(month.toDouble - 1)) def withDayOfMonth(dayOfMonth: Int): T = updated(_.date(dayOfMonth.toDouble)) def withHour(hour: Int): T = updated(_.hour(hour.toDouble)) def withMinute(minute: Int): T = updated(_.minute(minute.toDouble)) def withSecond(second: Int): T = updated(_.second(second.toDouble)) def withMillisecond(millisecond: Int): T = updated(_.millisecond(millisecond.toDouble)) def withTime(time: LocalTime): T = updated(_ .hour(time.getHour.toDouble) .minute(time.getMinute.toDouble) .second(time.getSecond.toDouble) .millisecond(time.get(ChronoField.MILLI_OF_SECOND).toDouble) ) def withDate(date: LocalDate): T = updated(_ .year(date.getYear.toDouble) .month(date.getMonthValue.toDouble - 1) .date(date.getDayOfMonth.toDouble) ) def toLocalDate: LocalDate = LocalDate.of(year, month, dayOfMonth) def toLocalTime: LocalTime = LocalTime.of(hour, minute, second, millisToNanos(millisecond)) def yearsUntil(other: T): Long = other.underlying.diff(underlying, Units.Year).toLong def monthsUntil(other: T): Long = other.underlying.diff(underlying, Units.Month).toLong def daysUntil(other: T): Long = other.underlying.diff(underlying, Units.Day).toLong def millisecondsUntil(other: T): Long = other.underlying.diff(underlying, Units.Millisecond).toLong def secondsUntil(other: T): Long = other.underlying.diff(underlying, Units.Second).toLong def minutesUntil(other: T): Long = other.underlying.diff(underlying, Units.Minute).toLong def hoursUntil(other: T): Long = other.underlying.diff(underlying, Units.Hour).toLong def plus(d: Duration): T = plusMillis(d.toMillis) def minus(d: Duration): T = plusMillis(-d.toMillis) def plusDays(n: Int): T = updated(_.add(n.toDouble, Units.Day)) def plusMonths(n: Int): T = updated(_.add(n.toDouble, Units.Month)) def plusYears(n: Int): T = updated(_.add(n.toDouble, Units.Year)) def plusMillis(n: Long): T def format(formatString: String): String = underlying.format(formatString) override def toString: String = underlying.toString } object MomentDateTime { def compare[T <: MomentDateTime[T]](x: T, y: T): Int = Ordering.Double.compare(x.underlying.value(), y.underlying.value()) private[js] def constructorArray(date: LocalDate, time: LocalTime): Array[Int] = Array( date.getYear, date.getMonthValue - 1, date.getDayOfMonth, time.getHour, time.getMinute, time.getSecond, time.get(ChronoField.MILLI_OF_SECOND) ) private[js] def utcMoment(date: LocalDate, time: LocalTime): Date = Moment.utc(constructorArray(date, time)) }
Example 14
Source File: providers.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.instances.moment import java.time.LocalDate import dtc.js.{MomentLocalDateTime, MomentZonedDateTime} import dtc.{Provider, TimeZoneId} object providers { implicit val realMomentLocalDateTimeProvider: Provider[MomentLocalDateTime] = new Provider[MomentLocalDateTime] { def currentDate(zone: TimeZoneId): LocalDate = currentTime(zone).toLocalDate def currentTime(zone: TimeZoneId): MomentLocalDateTime = MomentLocalDateTime.now(zone) } implicit val realMomentZonedDateTimeProvider: Provider[MomentZonedDateTime] = new Provider[MomentZonedDateTime] { def currentDate(zone: TimeZoneId): LocalDate = currentTime(zone).toLocalDate def currentTime(zone: TimeZoneId): MomentZonedDateTime = MomentZonedDateTime.now(zone) } }
Example 15
Source File: MomentZonedDateTimeInstanceWithoutOrder.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.instances.moment import java.time.{DayOfWeek, Duration, LocalDate, LocalTime} import dtc.js.MomentZonedDateTime import dtc.{Offset, TimeZoneId, Zoned} trait MomentZonedDateTimeInstanceWithoutOrder extends Zoned[MomentZonedDateTime] { def capture(date: LocalDate, time: LocalTime, zone: TimeZoneId): MomentZonedDateTime = MomentZonedDateTime.of(date, time, zone) def withZoneSameInstant(x: MomentZonedDateTime, zone: TimeZoneId): MomentZonedDateTime = x.withZoneSameInstant(zone) def withZoneSameLocal(x: MomentZonedDateTime, zone: TimeZoneId): MomentZonedDateTime = x.withZoneSameLocal(zone) def zone(x: MomentZonedDateTime): TimeZoneId = x.zone def date(x: MomentZonedDateTime): LocalDate = x.toLocalDate def time(x: MomentZonedDateTime): LocalTime = x.toLocalTime def plus(x: MomentZonedDateTime, d: Duration): MomentZonedDateTime = x.plus(d) def minus(x: MomentZonedDateTime, d: Duration): MomentZonedDateTime = x.minus(d) def plusDays(x: MomentZonedDateTime, days: Int): MomentZonedDateTime = x.plusDays(days) def plusMonths(x: MomentZonedDateTime, months: Int): MomentZonedDateTime = x.plusMonths(months) def plusYears(x: MomentZonedDateTime, years: Int): MomentZonedDateTime = x.plusYears(years) def offset(x: MomentZonedDateTime): Offset = x.offset def withYear(x: MomentZonedDateTime, year: Int): MomentZonedDateTime = x.withYear(year) def withMonth(x: MomentZonedDateTime, month: Int): MomentZonedDateTime = x.withMonth(month) def withDayOfMonth(x: MomentZonedDateTime, dayOfMonth: Int): MomentZonedDateTime = x.withDayOfMonth(dayOfMonth) def withHour(x: MomentZonedDateTime, hour: Int): MomentZonedDateTime = x.withHour(hour) def withMinute(x: MomentZonedDateTime, minute: Int): MomentZonedDateTime = x.withMinute(minute) def withSecond(x: MomentZonedDateTime, second: Int): MomentZonedDateTime = x.withSecond(second) def withMillisecond(x: MomentZonedDateTime, millisecond: Int): MomentZonedDateTime = x.withMillisecond(millisecond) def withTime(x: MomentZonedDateTime, time: LocalTime): MomentZonedDateTime = x.withTime(time) def withDate(x: MomentZonedDateTime, date: LocalDate): MomentZonedDateTime = x.withDate(date) def dayOfWeek(x: MomentZonedDateTime): DayOfWeek = x.dayOfWeek def dayOfMonth(x: MomentZonedDateTime): Int = x.dayOfMonth def month(x: MomentZonedDateTime): Int = x.month def year(x: MomentZonedDateTime): Int = x.year def millisecond(x: MomentZonedDateTime): Int = x.millisecond def second(x: MomentZonedDateTime): Int = x.second def minute(x: MomentZonedDateTime): Int = x.minute def hour(x: MomentZonedDateTime): Int = x.hour def yearsUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.yearsUntil(until) def monthsUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.monthsUntil(until) def daysUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.daysUntil(until) def hoursUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.hoursUntil(until) def minutesUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.minutesUntil(until) def secondsUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.secondsUntil(until) def millisecondsUntil(x: MomentZonedDateTime, until: MomentZonedDateTime): Long = x.millisecondsUntil(until) def utc(x: MomentZonedDateTime): (LocalDate, LocalTime) = { val utcTime = x.withZoneSameInstant(TimeZoneId.UTC) utcTime.toLocalDate -> utcTime.toLocalTime } }
Example 16
Source File: package.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.instances import java.time.{DayOfWeek, Duration, LocalDate, LocalTime} import dtc.{Local, Capture, TimeZoneId, Zoned} import dtc.js.{MomentDateTime, MomentLocalDateTime, MomentZonedDateTime} package object moment { implicit val momentZonedWithStrictEquality: Zoned[MomentZonedDateTime] = new MomentZonedDateTimeInstanceWithoutOrder { def compare(x: MomentZonedDateTime, y: MomentZonedDateTime): Int = MomentZonedDateTime.compareStrict(x, y) } implicit val momentLocalDTC: Local[MomentLocalDateTime] = new Local[MomentLocalDateTime] { def date(x: MomentLocalDateTime): LocalDate = x.toLocalDate def time(x: MomentLocalDateTime): LocalTime = x.toLocalTime def plus(x: MomentLocalDateTime, d: Duration): MomentLocalDateTime = x.plus(d) def minus(x: MomentLocalDateTime, d: Duration): MomentLocalDateTime = x.minus(d) def plusDays(x: MomentLocalDateTime, days: Int): MomentLocalDateTime = x.plusDays(days) def plusMonths(x: MomentLocalDateTime, months: Int): MomentLocalDateTime = x.plusMonths(months) def plusYears(x: MomentLocalDateTime, years: Int): MomentLocalDateTime = x.plusYears(years) def compare(x: MomentLocalDateTime, y: MomentLocalDateTime): Int = MomentDateTime.compare(x, y) def of(date: LocalDate, time: LocalTime): MomentLocalDateTime = MomentLocalDateTime.of(date, time) def of( year: Int, month: Int, day: Int, hour: Int, minute: Int, second: Int, millisecond: Int): MomentLocalDateTime = MomentLocalDateTime.of(year, month, day, hour, minute, second, millisecond) def withYear(x: MomentLocalDateTime, year: Int): MomentLocalDateTime = x.withYear(year) def withMonth(x: MomentLocalDateTime, month: Int): MomentLocalDateTime = x.withMonth(month) def withDayOfMonth(x: MomentLocalDateTime, dayOfMonth: Int): MomentLocalDateTime = x.withDayOfMonth(dayOfMonth) def withHour(x: MomentLocalDateTime, hour: Int): MomentLocalDateTime = x.withHour(hour) def withMinute(x: MomentLocalDateTime, minute: Int): MomentLocalDateTime = x.withMinute(minute) def withSecond(x: MomentLocalDateTime, second: Int): MomentLocalDateTime = x.withSecond(second) def withMillisecond(x: MomentLocalDateTime, millisecond: Int): MomentLocalDateTime = x.withMillisecond(millisecond) def withTime(x: MomentLocalDateTime, time: LocalTime): MomentLocalDateTime = x.withTime(time) def withDate(x: MomentLocalDateTime, date: LocalDate): MomentLocalDateTime = x.withDate(date) def dayOfWeek(x: MomentLocalDateTime): DayOfWeek = x.dayOfWeek def dayOfMonth(x: MomentLocalDateTime): Int = x.dayOfMonth def month(x: MomentLocalDateTime): Int = x.month def year(x: MomentLocalDateTime): Int = x.year def millisecond(x: MomentLocalDateTime): Int = x.millisecond def second(x: MomentLocalDateTime): Int = x.second def minute(x: MomentLocalDateTime): Int = x.minute def hour(x: MomentLocalDateTime): Int = x.hour def yearsUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.yearsUntil(until) def monthsUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.monthsUntil(until) def daysUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.daysUntil(until) def hoursUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.hoursUntil(until) def minutesUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.minutesUntil(until) def secondsUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.secondsUntil(until) def millisecondsUntil(x: MomentLocalDateTime, until: MomentLocalDateTime): Long = x.millisecondsUntil(until) } implicit val captureMomentLocalDateTime: Capture[MomentLocalDateTime] = new Capture[MomentLocalDateTime] { def capture(date: LocalDate, time: LocalTime, zone: TimeZoneId): MomentLocalDateTime = MomentZonedDateTime.of(date, time, zone).withZoneSameInstant(TimeZoneId.UTC).toLocal } }
Example 17
Source File: Main.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.examples import java.time.{LocalDate, LocalDateTime, LocalTime, Month} import dtc.instances.localDateTime._ // scalastyle:off object Main extends App { val calendar = Calendar(List( CalendarEvent( LocalDateTime.of(LocalDate.now(), LocalTime.of(10, 0)), LocalDateTime.of(LocalDate.now(), LocalTime.of(11, 0)), "Breakfast" ), CalendarEvent( LocalDateTime.of(LocalDate.now().minusDays(2), LocalTime.of(12, 0)), LocalDateTime.of(LocalDate.now().minusDays(2), LocalTime.of(14, 0)), "Meeting" ), CalendarEvent( LocalDateTime.of(2016, Month.OCTOBER, 9, 11, 0), LocalDateTime.of(2016, Month.OCTOBER, 9, 11, 0), "Birthday party" ) )) println(calendar.eventsAfter(LocalDateTime.now().minusDays(1L)).mkString(", ")) println(calendar.onlyWorkDays.mkString(", ")) val period = Period(LocalDateTime.now(), LocalDateTime.now().plusDays(1L)) println(period.durationInMinutes) println(period.durationInSeconds) println(period.hours.mkString("\n")) }
Example 18
Source File: Main.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.examples import java.time.{Duration, LocalDate, LocalTime} import dtc.instances.jsDate._ import dtc.js.JSDate import scala.scalajs.js.annotation.JSExportTopLevel // scalastyle:off object Main { @JSExportTopLevel("Main") def main() = { val calendar = Calendar(List( CalendarEvent( JSDate.of(LocalDate.now(), LocalTime.of(10, 0)), JSDate.of(LocalDate.now(), LocalTime.of(11, 0)), "Breakfast" ), CalendarEvent( JSDate.of(LocalDate.now().plusDays(2), LocalTime.of(12, 0)), JSDate.of(LocalDate.now().plusDays(2), LocalTime.of(14, 0)), "Meeting" ), CalendarEvent( JSDate.of(2016, 10, 9, 11, 0), JSDate.of(2016, 10, 9, 11, 0), "Birthday party" ) )) println(calendar.eventsAfter(JSDate.now).mkString(", ")) println(calendar.onlyWorkDays.mkString(", ")) val period = Period(JSDate.now, JSDate.now.plus(Duration.ofDays(1L))) println(period.durationInMinutes) println(period.durationInSeconds) println(period.hours.mkString("\n")) } }
Example 19
Source File: ZonedDateTimeLaws.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.laws import java.time.temporal.ChronoUnit import java.time.{Duration, LocalDate, LocalTime} import cats.kernel.laws.discipline.{catsLawsIsEqToProp => p} import cats.kernel.laws._ import cats.instances.long._ import dtc._ import dtc.syntax.zoned._ import org.scalacheck.Prop._ import org.scalacheck.{Arbitrary, Gen, Prop} trait ZonedDateTimeLaws[A] { implicit def D: Zoned[A] val genA: Gen[A] val genDateAndDurationWithinSameOffset: Gen[(A, Duration)] val genDataSuite: Gen[ZonedDateTimeTestData[A]] val genLocalDate: Gen[LocalDate] val genLocalTime: Gen[LocalTime] val genValidYear: Gen[Int] val genTimeZone: Gen[TimeZoneId] def crossOffsetAddition: Prop = forAll(genDataSuite) { data => val target = D.plus(data.source, data.diff) p(D.offset(target) <-> data.targetOffset) && (D.date(target) <-> data.targetDate) && (D.time(target) <-> data.targetTime.truncatedTo(ChronoUnit.MILLIS)) } def localTimeAndOffsetCorrelation: Prop = forAll(genA, genTimeZone) { (date: A, zone: TimeZoneId) => val target = D.withZoneSameInstant(date, zone) D.time(date) <-> D.time(target).plusSeconds((date.offset.seconds - target.offset.seconds).toLong) } def withZoneSameInstantGivesSameInstant: Prop = forAll(genA, genTimeZone) { (date: A, zone: TimeZoneId) => val target = D.withZoneSameInstant(date, zone) p(D.zone(target) <-> zone) && (D.millisecondsUntil(date, target) <-> 0L) } } object ZonedDateTimeLaws { def apply[A]( gDateAndDurationWithinSameDST: Gen[(A, Duration)], gDataSuite: Gen[ZonedDateTimeTestData[A]], gLocalTime: Gen[LocalTime], gLocalDate: Gen[LocalDate], gValidYear: Gen[Int], gTimeZone: Gen[TimeZoneId])( implicit ev: Zoned[A], arbA: Arbitrary[A]): ZonedDateTimeLaws[A] = new ZonedDateTimeLaws[A] { def D: Zoned[A] = ev val genTimeZone: Gen[TimeZoneId] = gTimeZone val genDateAndDurationWithinSameOffset: Gen[(A, Duration)] = gDateAndDurationWithinSameDST val genDataSuite: Gen[ZonedDateTimeTestData[A]] = gDataSuite val genLocalDate: Gen[LocalDate] = gLocalDate val genLocalTime: Gen[LocalTime] = gLocalTime val genValidYear: Gen[Int] = gValidYear val genA: Gen[A] = arbA.arbitrary } }
Example 20
Source File: ZonedDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.laws import java.time.{Duration, LocalDate, LocalTime} import dtc.{TimeZoneId, Zoned} import org.scalacheck.{Arbitrary, Gen} import org.typelevel.discipline.Laws trait ZonedDateTimeTests[A] extends Laws { def generalLocalDateTimeLaws: GeneralLocalDateTimeLaws[A] def laws: ZonedDateTimeLaws[A] def zonedDateTime(implicit arbA: Arbitrary[A], arbD: Arbitrary[Duration]): RuleSet = { new DefaultRuleSet( name = "ZonedDateTime", parent = None, "[within same offset] seconds addition laws" -> generalLocalDateTimeLaws.secondsAddition, "[within same offset] minutes addition laws" -> generalLocalDateTimeLaws.minutesAddition, "[within same offset] hours addition laws" -> generalLocalDateTimeLaws.hoursAddition, "[within same offset] withYear laws" -> generalLocalDateTimeLaws.withYear, "[within same offset] withMonth laws" -> generalLocalDateTimeLaws.withMonth, "[within same offset] withDayOfMonth laws" -> generalLocalDateTimeLaws.withDayOfMonth, "[within same offset] withHour laws" -> generalLocalDateTimeLaws.withHour, "[within same offset] withMinute laws" -> generalLocalDateTimeLaws.withMinute, "[within same offset] withSecond laws" -> generalLocalDateTimeLaws.withSecond, "[within same offset] withMillisecond laws" -> generalLocalDateTimeLaws.withMillisecond, "[within same offset] withTime laws" -> generalLocalDateTimeLaws.withTime, "[within same offset] withDate laws" -> generalLocalDateTimeLaws.withDate, "[within same offset] daysUntil is consistent with addition" -> generalLocalDateTimeLaws.daysUntilIsConsistentWithPlus, "[within same offset] monthsUntil is consistent with addition" -> generalLocalDateTimeLaws.monthsUntilIsConsistentWithPlus, "[within same offset] yearsUntil counts only number of full years" -> generalLocalDateTimeLaws.yearsUntilCountsOnlyFullUnits, "cross-offset addition" -> laws.crossOffsetAddition, "withZoneSameInstant gives the same instant" -> laws.withZoneSameInstantGivesSameInstant, "local time difference is the offset" -> laws.localTimeAndOffsetCorrelation ) } } object ZonedDateTimeTests { def apply[A: Zoned]( gDateAndDurationWithinSameDST: Gen[(A, Duration)], gDataSuite: Gen[ZonedDateTimeTestData[A]], gValidYear: Gen[Int], gTimeZone: Gen[TimeZoneId])( implicit arbA: Arbitrary[A], arbLocalTime: Arbitrary[LocalTime], arbLocalDate: Arbitrary[LocalDate]): ZonedDateTimeTests[A] = new ZonedDateTimeTests[A] { def generalLocalDateTimeLaws: GeneralLocalDateTimeLaws[A] = GeneralLocalDateTimeLaws[A]( gDateAndDurationWithinSameDST, arbLocalTime.arbitrary, arbLocalDate.arbitrary, gValidYear ) def laws: ZonedDateTimeLaws[A] = ZonedDateTimeLaws[A]( gDateAndDurationWithinSameDST, gDataSuite, arbLocalTime.arbitrary, arbLocalDate.arbitrary, gValidYear, gTimeZone ) } }
Example 21
Source File: LocalDateTimeLaws.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.laws import java.time.temporal.{ChronoField, ChronoUnit} import java.time.{LocalDate, LocalTime} import cats.kernel.laws._ import cats.kernel.laws.discipline.{catsLawsIsEqToProp => p} import dtc.Local import dtc.syntax.local._ import org.scalacheck.{Gen, Prop} import org.scalacheck.Prop.forAll trait LocalDateTimeLaws[A] { implicit def D: Local[A] val genLocalDate: Gen[LocalDate] val genLocalTime: Gen[LocalTime] def constructorConsistency: Prop = forAll(genLocalDate, genLocalTime) { (date: LocalDate, time: LocalTime) => val dt = D.of(date, time) p(dt.date <-> date) && (dt.time <-> time.truncatedTo(ChronoUnit.MILLIS)) } def plainConstructorConsistency: Prop = forAll(genLocalDate, genLocalTime) { (date: LocalDate, time: LocalTime) => val dt = D.of( date.getYear, date.getMonthValue, date.getDayOfMonth, time.getHour, time.getMinute, time.getSecond, time.get(ChronoField.MILLI_OF_SECOND)) p(dt.date <-> date) && (dt.time <-> time.truncatedTo(ChronoUnit.MILLIS)) } } object LocalDateTimeLaws { def apply[A]( gLocalTime: Gen[LocalTime], gLocalDate: Gen[LocalDate])( implicit ev: Local[A]): LocalDateTimeLaws[A] = new LocalDateTimeLaws[A] { def D: Local[A] = ev val genLocalDate: Gen[LocalDate] = gLocalDate val genLocalTime: Gen[LocalTime] = gLocalTime } }
Example 22
Source File: DateTimeLaws.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.laws import java.time.{Duration, LocalDate, LocalTime} import dtc._ import cats.kernel.instances.int._ import cats.kernel.instances.long._ import cats.kernel.laws.discipline.catsLawsIsEqToProp import dtc.TimePoint import org.scalacheck.Prop._ import org.scalacheck.{Arbitrary, Gen, Prop} import dtc.syntax.all._ import cats.kernel.laws._ trait DateTimeLaws[A] { implicit def D: TimePoint[A] val genA: Gen[A] val genAdditionSafeDateAndDuration: Gen[(A, Duration)] // take into account that nanos are always positive in the Duration. private def fullNumberOfSeconds(d: Duration) = { val seconds = d.getSeconds if (seconds >= 0 || d.getNano == 0) seconds else seconds + 1 } def additionAndSubtractionOfSameDuration: Prop = forAll(genAdditionSafeDateAndDuration) { case (x, d) => D.plus(D.plus(x, d), d.negated()) <-> x } def additionOfZero: Prop = forAll(genAdditionSafeDateAndDuration) { case (x, _) => D.plus(x, Duration.ZERO) <-> x } def additionOfNonZero: Prop = forAll(genAdditionSafeDateAndDuration) { case (x, d) => Prop(d.isZero || (d.isNegative && D.lt(D.plus(x, d), x)) || D.gt(D.plus(x, d), x)) } def millisAddition: Prop = forAll(genAdditionSafeDateAndDuration) { case (x, d) => D.plus(x, d).millisecond <-> ((x.millisecond + d.toMillis) %% 1000) } def untilSelfIsAlwaysZero: Prop = forAll(genA) { x: A => (D.millisecondsUntil(x, x) <-> 0L) && (D.secondsUntil(x, x) <-> 0L) && (D.minutesUntil(x, x) <-> 0L) && (D.hoursUntil(x, x) <-> 0L) && (D.daysUntil(x, x) <-> 0L) && (D.monthsUntil(x, x) <-> 0L) && (D.yearsUntil(x, x) <-> 0L) } def untilIsConsistentWithPlus: Prop = forAll(genAdditionSafeDateAndDuration) { case (x, d) => val altered = D.plus(x, d) val truncated = truncateToMillis(d) (D.millisecondsUntil(x, altered) <-> truncated.toMillis) && (D.secondsUntil(x, altered) <-> fullNumberOfSeconds(truncated)) && (D.minutesUntil(x, altered) <-> fullNumberOfSeconds(truncated) / SecondsInMinute) && (D.hoursUntil(x, altered) <-> fullNumberOfSeconds(truncated) / (SecondsInMinute * MinutesInHour)) } def dateMustNotThrow: Prop = forAll(genA) { x: A => D.date(x) proved } def timeMustNotThrow: Prop = forAll(genA) { x: A => D.time(x) proved } def dateFieldsAreConsistentWithToLocalDate: Prop = forAll(genA) { x: A => catsLawsIsEqToProp(x.date.getDayOfWeek <-> x.dayOfWeek) && (LocalDate.of(x.year, x.month, x.dayOfMonth) <-> x.date) } def timeFieldsAreConsistentWithToLocalTime: Prop = forAll(genA) { x: A => LocalTime.of(x.hour, x.minute, x.second, millisToNanos(x.millisecond)) <-> x.time } } object DateTimeLaws { def apply[A](gDateAndDuration: Gen[(A, Duration)])( implicit ev: TimePoint[A], arbA: Arbitrary[A]): DateTimeLaws[A] = new DateTimeLaws[A] { def D: TimePoint[A] = ev val genA: Gen[A] = arbA.arbitrary val genAdditionSafeDateAndDuration: Gen[(A, Duration)] = gDateAndDuration } }
Example 23
Source File: LocalDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.laws import java.time.{Duration, LocalDate, LocalTime} import dtc.Local import org.scalacheck.{Arbitrary, Gen} import org.typelevel.discipline.Laws trait LocalDateTimeTests[A] extends Laws { def generalLaws: GeneralLocalDateTimeLaws[A] def laws: LocalDateTimeLaws[A] def localDateTime(implicit arbA: Arbitrary[A], arbD: Arbitrary[Duration]): RuleSet = { new DefaultRuleSet( name = "LocalDateTime", parent = None, "seconds addition laws" -> generalLaws.secondsAddition, "minutes addition laws" -> generalLaws.minutesAddition, "hours addition laws" -> generalLaws.hoursAddition, "constructor consistency" -> laws.constructorConsistency, "plain constructor consistency" -> laws.plainConstructorConsistency, "withYear laws" -> generalLaws.withYear, "withMonth laws" -> generalLaws.withMonth, "withDayOfMonth laws" -> generalLaws.withDayOfMonth, "withHour laws" -> generalLaws.withHour, "withMinute laws" -> generalLaws.withMinute, "withSecond laws" -> generalLaws.withSecond, "withMillisecond laws" -> generalLaws.withMillisecond, "withTime laws" -> generalLaws.withTime, "withDate laws" -> generalLaws.withDate, "daysUntil is consistent with addition" -> generalLaws.daysUntilIsConsistentWithPlus, "monthsUntil is consistent with addition" -> generalLaws.monthsUntilIsConsistentWithPlus, "yearsUntil counts only number of full years" -> generalLaws.yearsUntilCountsOnlyFullUnits ) } // see: https://github.com/moment/moment/issues/3029 def monthUntilFractionHandling(implicit arbA: Arbitrary[A], arbD: Arbitrary[Duration]): RuleSet = { new DefaultRuleSet( name = "LocalDateTime", parent = None, "monthsUntil counts only number of full months" -> generalLaws.monthsUntilCountsOnlyFullUnits ) } } object LocalDateTimeTests { def apply[A: Local]( gDateAndDuration: Gen[(A, Duration)], gValidYear: Gen[Int])( implicit arbA: Arbitrary[A], arbLocalTime: Arbitrary[LocalTime], arbLocalDate: Arbitrary[LocalDate]): LocalDateTimeTests[A] = new LocalDateTimeTests[A] { def laws: LocalDateTimeLaws[A] = LocalDateTimeLaws[A]( arbLocalTime.arbitrary, arbLocalDate.arbitrary ) def generalLaws: GeneralLocalDateTimeLaws[A] = GeneralLocalDateTimeLaws[A]( gDateAndDuration, arbLocalTime.arbitrary, arbLocalDate.arbitrary, gValidYear ) } }
Example 24
Source File: package.scala From dtc with Apache License 2.0 | 5 votes |
package dtc import java.time.{DayOfWeek, LocalDate, LocalTime} import cats.kernel.Eq import org.scalacheck.Prop import org.scalacheck.Prop._ import org.scalacheck.util.Pretty package object laws { case class NotChangedValidator[T](before: T, after: T) { def apply[P](name: String, props: (T => P)*)(implicit E: Eq[P]): Prop = { val falsy = props.filter(prop => E.neqv(prop(before), prop(after))) if (falsy.isEmpty) proved else falsified :| { val b = Pretty.pretty(before, Pretty.Params(0)) val a = Pretty.pretty(after, Pretty.Params(0)) s"(Property $name changed. Before: $b, after: $a)" } } } def notChanged[T, P](before: T, after: T) = NotChangedValidator(before, after) // eq instances implicit val eqLocalTime: Eq[LocalTime] = Eq.fromUniversalEquals implicit val eqLocalDate: Eq[LocalDate] = Eq.fromUniversalEquals implicit val eqDayOfWeek: Eq[DayOfWeek] = Eq.fromUniversalEquals }
Example 25
Source File: DTCSuite.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.temporal.ChronoField import java.time.temporal.ChronoUnit._ import java.time.{Duration, LocalDate, LocalTime} import dtc.TimeZoneId import org.scalacheck.{Arbitrary, Gen} import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.matchers.should.Matchers import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks import org.typelevel.discipline.scalatest.FunSpecDiscipline trait DTCSuite extends AnyFunSpecLike with Matchers with ScalaCheckDrivenPropertyChecks with FunSpecDiscipline { override implicit val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration( minSuccessful = 100 ) private val nanoOfDayRange = ChronoField.NANO_OF_DAY.range() val genLocalTime: Gen[LocalTime] = Gen.choose(nanoOfDayRange.getMinimum, nanoOfDayRange.getMaximum).map(LocalTime.ofNanoOfDay) implicit val arbLocalTime: Arbitrary[LocalTime] = Arbitrary(genLocalTime) val genDuration: Gen[Duration] = Gen.choose(Long.MinValue / 1000, Long.MaxValue / 1000) .map(l => Duration.of(l, MILLIS)) implicit val arbDuration = Arbitrary(genDuration) def genDateTimeFromSameOffsetPeriod(period: SameZoneOffsetPeriod): Gen[(LocalDate, LocalTime, TimeZoneId)] = for { date <- Gen.choose(period.startDate.toEpochDay + 1L, period.endDate.toEpochDay - 1L).map(LocalDate.ofEpochDay) timeBounds <- Gen.const( if (date == period.startDate && date == period.endDate) (period.startTime, period.endTime) else if (date == period.startDate) (period.startTime, LocalTime.MAX) else if (date == period.endDate) (LocalTime.MAX, period.endTime) else (LocalTime.MIN, LocalTime.MAX) ) time <- Gen.choose(timeBounds._1.toNanoOfDay, timeBounds._2.toNanoOfDay).map(LocalTime.ofNanoOfDay) } yield (date, time, period.zone) }
Example 26
Source File: MomentLocalDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{LocalDate, LocalTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import dtc.instances.moment._ import dtc.js.MomentLocalDateTime import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests} import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary import dtc.instances.moment.providers.realMomentLocalDateTimeProvider class MomentLocalDateTimeTests extends DTCSuiteJS { implicit val arbT: Arbitrary[MomentLocalDateTime] = Arbitrary(for { date <- arbitrary[LocalDate] time <- arbitrary[LocalTime] } yield MomentLocalDateTime.of(date, time)) implicit val cogenT = cogenMomentDateTime[MomentLocalDateTime] val pairGen = overflowSafePairGen.map(t => (MomentLocalDateTime.of(t._1, t._2), t._3)) val ldtTests = LocalDateTimeTests[MomentLocalDateTime]( pairGen, genJSValidYear ) checkAll("MomentLocalDateTimeTests", DateTimeTests[MomentLocalDateTime](pairGen).dateTime) checkAll("MomentLocalDateTimeTests", ldtTests.localDateTime) // see: https://github.com/moment/moment/issues/3029 // checkAll("MomentLocalDateTimeTests", ldtTests.localDateTime) checkAll("MomentLocalDateTimeTests", OrderTests[MomentLocalDateTime].order) checkAll("MomentLocalDateTimeTests", OrderTests[MomentLocalDateTime].partialOrder) checkAll("MomentLocalDateTimeTests", OrderTests[MomentLocalDateTime].eqv) checkAll("MomentLocalDateTimeTests", ProviderTests[MomentLocalDateTime](genTimeZone).provider) }
Example 27
Source File: MomentZonedDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{Duration, LocalDate, LocalTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import dtc.{TimeZoneId, Zoned} import dtc.js.MomentZonedDateTime import dtc.laws.{DateTimeTests, ProviderTests, ZonedDateTimeTestData, ZonedDateTimeTests} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen, Gen} import dtc.instances.moment.providers.realMomentZonedDateTimeProvider abstract class MomentZonedDateTimeTests(instance: Zoned[MomentZonedDateTime]) extends DTCSuiteJS { implicit val zonedInstance: Zoned[MomentZonedDateTime] = instance implicit val arbT: Arbitrary[MomentZonedDateTime] = Arbitrary(for { date <- arbitrary[LocalDate] time <- arbitrary[LocalTime] zone <- arbitrary[TimeZoneId] } yield MomentZonedDateTime.of(date, time, zone)) implicit val cogenT: Cogen[MomentZonedDateTime] = cogenMomentDateTime[MomentZonedDateTime] val pairGen: Gen[(MomentZonedDateTime, Duration)] = for { zone <- arbitrary[TimeZoneId] pair <- overflowSafePairGen } yield (MomentZonedDateTime.of(pair._1, pair._2, zone), pair._3) def genDateFromPeriod(period: SameZoneOffsetPeriod): Gen[MomentZonedDateTime] = genDateTimeFromSameOffsetPeriod(period).map(tpl => MomentZonedDateTime.of(tpl._1, tpl._2, tpl._3)) val overflowSafePairGenWithinSameOffset: Gen[(MomentZonedDateTime, Duration)] = for { period <- arbitrary[SameZoneOffsetPeriod] dateTime <- genDateFromPeriod(period) duration <- genDateFromPeriod(period) .map(other => dateTime.millisecondsUntil(other)) .map(Duration.ofMillis) } yield (dateTime, duration) val genZonedTestDataSuite: Gen[ZonedDateTimeTestData[MomentZonedDateTime]] = pairGen.map { case (date, duration) => val target = date.plus(duration) ZonedDateTimeTestData(date, duration, target.offset, target.toLocalTime, target.toLocalDate) } checkAll("MomentZonedDateTime", DateTimeTests[MomentZonedDateTime](pairGen).dateTime) checkAll("MomentZonedDateTime", ZonedDateTimeTests[MomentZonedDateTime]( overflowSafePairGenWithinSameOffset, genZonedTestDataSuite, genJSValidYear, genTimeZone ).zonedDateTime) checkAll("MomentZonedDateTime", OrderTests[MomentZonedDateTime].order) checkAll("MomentZonedDateTime", OrderTests[MomentZonedDateTime].partialOrder) checkAll("MomentZonedDateTime", OrderTests[MomentZonedDateTime].eqv) checkAll("MomentZonedDateTime", ProviderTests[MomentZonedDateTime](genTimeZone).provider) } class MomentZonedDateTimeWithStrictEqualityTests extends MomentZonedDateTimeTests(dtc.instances.moment.momentZonedWithStrictEquality) class MomentZonedDateTimeWithCrossZoneEqualityTests extends MomentZonedDateTimeTests(dtc.instances.moment.momentZonedWithCrossZoneEquality)
Example 28
Source File: JSDateTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{LocalDate, LocalTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import dtc.instances.jsDate._ import dtc.js.JSDate import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen} import dtc.instances.providers.realJSDateProvider class JSDateTests extends DTCSuiteJS { implicit val cogenT: Cogen[JSDate] = Cogen(_.jsGetTime.toLong) implicit val arbT: Arbitrary[JSDate] = Arbitrary(for { date <- arbitrary[LocalDate] time <- arbitrary[LocalTime] } yield JSDate.of(date, time)) val pairGen = overflowSafePairGen.map(t => (JSDate.of(t._1, t._2), t._3)) val ldtTests = LocalDateTimeTests[JSDate]( pairGen, genJSValidYear ) checkAll("JSDate", DateTimeTests[JSDate](pairGen).dateTime) checkAll("JSDate", ldtTests.localDateTime) checkAll("JSDate", ldtTests.monthUntilFractionHandling) checkAll("JSDate", OrderTests[JSDate].order) checkAll("JSDate", OrderTests[JSDate].partialOrder) checkAll("JSDate", OrderTests[JSDate].eqv) checkAll("JSDate", ProviderTests[JSDate](genTimeZone).provider) }
Example 29
Source File: zoned.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.cats.instances import java.time.{DayOfWeek, Duration, LocalDate, LocalTime} import cats.Invariant import dtc.{Offset, TimeZoneId, Zoned} object zoned extends CatsZonedInstances trait CatsZonedInstances { implicit val zonedInvariant: Invariant[Zoned] = new Invariant[Zoned] { def imap[A, B](ev: Zoned[A])(f: A => B)(g: B => A): Zoned[B] = new Zoned[B] { def compare(x: B, y: B): Int = ev.compare(g(x), g(y)) def zone(t: B): TimeZoneId = ev.zone(g(t)) def millisecond(x: B): Int = ev.millisecond(g(x)) def second(t: B): Int = ev.second(g(t)) def minute(t: B): Int = ev.minute(g(t)) def hour(t: B): Int = ev.hour(g(t)) def dayOfMonth(t: B): Int = ev.dayOfMonth(g(t)) def dayOfWeek(x: B): DayOfWeek = ev.dayOfWeek(g(x)) def month(t: B): Int = ev.month(g(t)) def year(t: B): Int = ev.year(g(t)) def capture(date: LocalDate, time: LocalTime, zone: TimeZoneId): B = f(ev.capture(date, time, zone)) def withZoneSameInstant(x: B, zone: TimeZoneId): B = f(ev.withZoneSameInstant(g(x), zone)) def withZoneSameLocal(x: B, zone: TimeZoneId): B = f(ev.withZoneSameLocal(g(x), zone)) def offset(x: B): Offset = ev.offset(g(x)) def date(x: B): LocalDate = ev.date(g(x)) def time(x: B): LocalTime = ev.time(g(x)) def plus(x: B, d: Duration): B = f(ev.plus(g(x), d)) def minus(x: B, d: Duration): B = f(ev.minus(g(x), d)) def plusDays(x: B, days: Int): B = f(ev.plusDays(g(x), days)) def plusMonths(x: B, months: Int): B = f(ev.plusMonths(g(x), months)) def plusYears(x: B, years: Int): B = f(ev.plusYears(g(x), years)) def withYear(x: B, year: Int): B = f(ev.withYear(g(x), year)) def withMonth(x: B, month: Int): B = f(ev.withMonth(g(x), month)) def withDayOfMonth(x: B, dayOfMonth: Int): B = f(ev.withDayOfMonth(g(x), dayOfMonth)) def withHour(x: B, hour: Int): B = f(ev.withHour(g(x), hour)) def withMinute(x: B, minute: Int): B = f(ev.withMinute(g(x), minute)) def withSecond(x: B, second: Int): B = f(ev.withSecond(g(x), second)) def withMillisecond(x: B, millisecond: Int): B = f(ev.withMillisecond(g(x), millisecond)) def withTime(x: B, time: LocalTime): B = f(ev.withTime(g(x), time)) def withDate(x: B, date: LocalDate): B = f(ev.withDate(g(x), date)) def yearsUntil(x: B, until: B): Long = ev.yearsUntil(g(x), g(until)) def monthsUntil(x: B, until: B): Long = ev.monthsUntil(g(x), g(until)) def daysUntil(x: B, until: B): Long = ev.daysUntil(g(x), g(until)) def hoursUntil(x: B, until: B): Long = ev.hoursUntil(g(x), g(until)) def minutesUntil(x: B, until: B): Long = ev.minutesUntil(g(x), g(until)) def secondsUntil(x: B, until: B): Long = ev.secondsUntil(g(x), g(until)) def millisecondsUntil(x: B, until: B): Long = ev.millisecondsUntil(g(x), g(until)) def utc(x: B): (LocalDate, LocalTime) = ev.utc(g(x)) } } }
Example 30
Source File: DateFormatterSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.util import java.time.LocalDate import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.internal.SQLConf class DateFormatterSuite extends SparkFunSuite with SQLHelper { test("parsing dates") { DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) { val formatter = DateFormatter() val daysSinceEpoch = formatter.parse("2018-12-02") assert(daysSinceEpoch === 17867) } } } test("format dates") { DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) { val formatter = DateFormatter() val date = formatter.format(17867) assert(date === "2018-12-02") } } } test("roundtrip date -> days -> date") { Seq( "0050-01-01", "0953-02-02", "1423-03-08", "1969-12-31", "1972-08-25", "1975-09-26", "2018-12-12", "2038-01-01", "5010-11-17").foreach { date => DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) { val formatter = DateFormatter() val days = formatter.parse(date) val formatted = formatter.format(days) assert(date === formatted) } } } } test("roundtrip days -> date -> days") { Seq( -701265, -371419, -199722, -1, 0, 967, 2094, 17877, 24837, 1110657).foreach { days => DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) { val formatter = DateFormatter() val date = formatter.format(days) val parsed = formatter.parse(date) assert(days === parsed) } } } } test("parsing date without explicit day") { val formatter = DateFormatter("yyyy MMM") val daysSinceEpoch = formatter.parse("2018 Dec") assert(daysSinceEpoch === LocalDate.of(2018, 12, 1).toEpochDay) } }
Example 31
Source File: JavaTimeJson.scala From pure-movie-server with Apache License 2.0 | 5 votes |
package pms.json import java.time.LocalDate import pms.core._ trait JavaTimeJson { implicit val localDateCirceCodec: Codec[LocalDate] = Codec.from( Decoder .apply[String] .map(s => LocalDate.parse(s, TimeFormatters.LocalDateFormatter)), Encoder .apply[String] .contramap(m => m.format(TimeFormatters.LocalDateFormatter)), ) }
Example 32
Source File: IMDBService.scala From pure-movie-server with Apache License 2.0 | 5 votes |
package pms.service.movie import java.time.LocalDate import pms.algebra.imdb._ import pms.algebra.movie._ import pms.algebra.user._ import pms.core.Fail import pms.effects._ import pms.effects.implicits._ private def imdbMovieToMovieCreation(imdb: IMDBMovie): MovieCreation = MovieCreation( name = MovieTitle(IMDBTitle.despook(imdb.title)), date = imdb.year.map { y => val year = ReleaseYear.despook(y) val ld = LocalDate.of(year.getValue, 1, 1) ReleaseDate(ld) }, ) } object IMDBService { def async[F[_]: Async](movieAlgebra: MovieAlgebra[F], imdbAlgebra: IMDBAlgebra[F]): IMDBService[F] = new IMDBService[F](movieAlgebra, imdbAlgebra) }
Example 33
Source File: DateRangeValidation.scala From vat-api with Apache License 2.0 | 5 votes |
package v1.controllers.requestParsers.validators.validations import java.time.{LocalDate} import v1.models.errors.{MtdError, RuleDateRangeInvalidError} object DateRangeValidation { def validate(from: String, to: String): List[MtdError] = { val fmtFrom = LocalDate.parse(from, dateFormat) val fmtTo = LocalDate.parse(to, dateFormat) List( checkIfDateRangeIsIncorrect(fmtFrom, fmtTo) ).flatten } private def checkIfDateRangeIsIncorrect(from: LocalDate, to: LocalDate): List[MtdError] = { if(!from.isBefore(to) || from.plusYears(1).isBefore(to)) { List(RuleDateRangeInvalidError) } else Nil } }
Example 34
Source File: FinancialDataDateFormatValidation.scala From vat-api with Apache License 2.0 | 5 votes |
package v1.controllers.requestParsers.validators.validations import java.time.LocalDate import v1.models.errors.{FinancialDataInvalidDateFromError, FinancialDataInvalidDateToError, MtdError} object FinancialDataDateFormatValidation { private def parseDate(date: String): LocalDate = LocalDate.parse(date, dateFormat) private val minimumSupportedDate: String = "2016-04-06" def validate(date: String, error: MtdError): List[MtdError] = DateFormatValidation.validate(date, error) match { case NoValidationErrors => error match { case FinancialDataInvalidDateFromError => validateFrom(date) case FinancialDataInvalidDateToError => validateTo(date) } case _ => List(error) } private def validateFrom(fromDate: String): List[MtdError] = { if (parseDate(minimumSupportedDate).isAfter(parseDate(fromDate))) List(FinancialDataInvalidDateFromError) else NoValidationErrors } private def validateTo(toDate: String): List[MtdError] = { if (LocalDate.now().isBefore(parseDate(toDate))) List(FinancialDataInvalidDateToError) else NoValidationErrors } }
Example 35
Source File: FinancialDataDateRangeValidation.scala From vat-api with Apache License 2.0 | 5 votes |
package v1.controllers.requestParsers.validators.validations import java.time.LocalDate import v1.models.errors.{FinancialDataInvalidDateRangeError, MtdError} object FinancialDataDateRangeValidation { def validate(from: String, to: String): List[MtdError] = { val fmtFrom = LocalDate.parse(from, dateFormat) val fmtTo = LocalDate.parse(to, dateFormat) List( checkIfDateRangeIsIncorrect(fmtFrom, fmtTo) ).flatten } private def checkIfDateRangeIsIncorrect(from: LocalDate, to: LocalDate): List[MtdError] = { if(!from.isBefore(to) || from.plusYears(1).minusDays(1).isBefore(to)) { List(FinancialDataInvalidDateRangeError) } else Nil } }
Example 36
Source File: FinancialDataReadsUtils.scala From vat-api with Apache License 2.0 | 5 votes |
package utils import java.time.LocalDate import play.api.libs.json.{JsValue, Json, Reads} import v1.models.response.common.TaxPeriod trait FinancialDataReadsUtils { def filterNotArrayReads[T](filterName: String, notMatching: Seq[String]) (implicit rds: Reads[Seq[T]]): Reads[Seq[T]] = (json: JsValue) => { json .validate[Seq[JsValue]] .flatMap( readJson => Json .toJson(readJson.filterNot { element => (element \ filterName).asOpt[String].exists(item => notMatching.contains(item.toLowerCase())) }) .validate[Seq[T]]) } def dateCheck(taxPeriod: Option[TaxPeriod], requestToDate: String): Boolean = { val toDate = taxPeriod.fold(None: Option[LocalDate]) { l => Some(LocalDate.parse(l.to)) } toDate.fold(true) { desTo => desTo.compareTo(LocalDate.parse(requestToDate)) <= 0 } } }
Example 37
Source File: PaymentsValidatorSpec.scala From vat-api with Apache License 2.0 | 5 votes |
package v1.controllers.requestParsers.validators import java.time.LocalDate import java.time.format.DateTimeFormatter import support.UnitSpec import v1.models.errors.{FinancialDataInvalidDateFromError, FinancialDataInvalidDateRangeError, FinancialDataInvalidDateToError, VrnFormatError} import v1.models.request.payments.PaymentsRawData class PaymentsValidatorSpec extends UnitSpec { val validator = new PaymentsValidator() private val validVrn = "123456789" private val invalidVrn = "thisIsNotAVrn" private val validFrom = "2019-01-01" private val validTo = "2019-12-31" private val dateTimeFormatter = DateTimeFormatter ofPattern "yyyy-MM-dd" "running a validation" should { "return no errors" when { "a valid request" in { validator.validate(PaymentsRawData(validVrn, Some(validFrom), Some(validTo))) shouldBe Nil } "a 'to' date is today" in { val todayDate = LocalDate.now().format(dateTimeFormatter) val yesterdayDate = LocalDate.now().minusDays(1).format(dateTimeFormatter) validator.validate(PaymentsRawData(validVrn, Some(yesterdayDate), Some(todayDate))) shouldBe List() } "a 'from' date is on the minimum supported date" in { validator.validate(PaymentsRawData(validVrn, Some("2016-04-06"), Some("2016-04-07"))) shouldBe List() } } "return VrnFormatError error" when { "an invalid Vrn is supplied" in { validator.validate(PaymentsRawData(invalidVrn, Some(validFrom), Some(validTo))) shouldBe List(VrnFormatError) } //maintain order of preference to match legacy validation "an invalid Vrn, and invalid dates are supplied" in { validator.validate(PaymentsRawData(invalidVrn, Some("invalidFromDate"), Some("invalidToDate"))) shouldBe List(VrnFormatError) } } "return only FinancialDataInvalidDateFromError error" when { "an invalid from date format is supplied" in { validator.validate(PaymentsRawData(validVrn, Some("12-31-2020"), Some(validTo))) shouldBe List(FinancialDataInvalidDateFromError) } //maintain order of preference to match legacy validation "an invalid from date and invalid to date are supplied" in { validator.validate(PaymentsRawData(validVrn, Some("12-31-2020"), Some("invalidDateTo"))) shouldBe List(FinancialDataInvalidDateFromError) } "a 'from' date is before the minimum supported date" in { validator.validate(PaymentsRawData(validVrn, Some("2016-04-05"), Some("2019-01-01"))) shouldBe List(FinancialDataInvalidDateFromError) } } "return only FinancialDataInvalidDateToError error" when { "an invalid to date format is supplied" in { validator.validate(PaymentsRawData(validVrn, Some(validFrom), Some("12-31-2020"))) shouldBe List(FinancialDataInvalidDateToError) } "a 'to' date is in the future" in { val tomorrowDate = LocalDate.now().plusDays(1).format(dateTimeFormatter) validator.validate(PaymentsRawData(validVrn, Some("2018-01-01"), Some(tomorrowDate))) shouldBe List(FinancialDataInvalidDateToError) } } "return RuleDateRangeError error" when { "invalid date range is supplied" in { validator.validate(PaymentsRawData(validVrn, Some("2018-01-01"), Some("2019-01-01"))) shouldBe List(FinancialDataInvalidDateRangeError) } } } }
Example 38
Source File: LiabilitiesValidatorSpec.scala From vat-api with Apache License 2.0 | 5 votes |
package v1.controllers.requestParsers.validators import java.time.LocalDate import java.time.format.DateTimeFormatter import support.UnitSpec import v1.models.errors.{FinancialDataInvalidDateFromError, FinancialDataInvalidDateRangeError, FinancialDataInvalidDateToError, VrnFormatError} import v1.models.request.liabilities.LiabilitiesRawData class LiabilitiesValidatorSpec extends UnitSpec { val validator = new LiabilitiesValidator() private val validVrn = "123456789" private val invalidVrn = "thisIsNotAVrn" private val validFrom = "2019-01-01" private val validTo = "2019-12-31" private val dateTimeFormatter = DateTimeFormatter ofPattern "yyyy-MM-dd" "running a validation" should { "return no errors" when { "a valid request" in { validator.validate(LiabilitiesRawData(validVrn, Some(validFrom), Some(validTo))) shouldBe Nil } "a 'to' date is today" in { val todayDate = LocalDate.now().format(dateTimeFormatter) val yesterdayDate = LocalDate.now().minusDays(1).format(dateTimeFormatter) validator.validate(LiabilitiesRawData(validVrn, Some(yesterdayDate), Some(todayDate))) shouldBe List() } "a 'from' date is on the minimum supported date" in { validator.validate(LiabilitiesRawData(validVrn, Some("2016-04-06"), Some("2016-04-07"))) shouldBe List() } } "return VrnFormatError error" when { "an invalid Vrn is supplied" in { validator.validate(LiabilitiesRawData(invalidVrn, Some(validFrom), Some(validTo))) shouldBe List(VrnFormatError) } //maintain order of preference to match legacy validation "an invalid Vrn, and invalid dates are supplied" in { validator.validate(LiabilitiesRawData(invalidVrn, Some("invalidFromDate"), Some("invalidToDate"))) shouldBe List(VrnFormatError) } } "return only FinancialDataInvalidDateFromError error" when { "an invalid from date format is supplied" in { validator.validate(LiabilitiesRawData(validVrn, Some("12-31-2020"), Some(validTo))) shouldBe List(FinancialDataInvalidDateFromError) } //maintain order of preference to match legacy validation "an invalid from date and invalid to date are supplied" in { validator.validate(LiabilitiesRawData(validVrn, Some("12-31-2020"), Some("invalidDateTo"))) shouldBe List(FinancialDataInvalidDateFromError) } "a 'from' date is before the minimum supported date" in { validator.validate(LiabilitiesRawData(validVrn, Some("2016-04-05"), Some("2019-01-01"))) shouldBe List(FinancialDataInvalidDateFromError) } } "return only FinancialDataInvalidDateToError error" when { "an invalid to date format is supplied" in { validator.validate(LiabilitiesRawData(validVrn, Some(validFrom), Some("12-31-2020"))) shouldBe List(FinancialDataInvalidDateToError) } "a 'to' date is in the future" in { val tomorrowDate = LocalDate.now().plusDays(1).format(dateTimeFormatter) validator.validate(LiabilitiesRawData(validVrn, Some("2018-01-01"), Some(tomorrowDate))) shouldBe List(FinancialDataInvalidDateToError) } } "return RuleDateRangeError error" when { "invalid date range is supplied" in { validator.validate(LiabilitiesRawData(validVrn, Some("2018-01-01"), Some("2019-01-01"))) shouldBe List(FinancialDataInvalidDateRangeError) } } } }
Example 39
Source File: FinancialDataDateFormatValidationSpec.scala From vat-api with Apache License 2.0 | 5 votes |
package v1.controllers.requestParsers.validators.validations import java.time.LocalDate import support.UnitSpec import v1.models.errors.{FinancialDataInvalidDateFromError, FinancialDataInvalidDateToError} class FinancialDataDateFormatValidationSpec extends UnitSpec{ "validate" should { "return an empty list" when { "passed a valid date" in { FinancialDataDateFormatValidation.validate("2019-02-02", FinancialDataInvalidDateFromError) shouldBe List() } "a 'from' date is on the minimum supported date" in { FinancialDataDateFormatValidation.validate("2016-04-06", FinancialDataInvalidDateFromError) shouldBe List() } "a 'to' date is today" in { lazy val dateTomorrow = LocalDate.now().format(dateFormat) FinancialDataDateFormatValidation.validate(dateTomorrow, FinancialDataInvalidDateFromError) shouldBe List() } } "return a list containing an error" when { "passed a date with an invalid month" in { FinancialDataDateFormatValidation.validate("2019-13-02", FinancialDataInvalidDateFromError)shouldBe List(FinancialDataInvalidDateFromError) } "passed a date with an invalid day" in { FinancialDataDateFormatValidation.validate("2019-02-32", FinancialDataInvalidDateFromError) shouldBe List(FinancialDataInvalidDateFromError) } "passed a date with an invalid year" in { FinancialDataDateFormatValidation.validate("201-02-02", FinancialDataInvalidDateFromError) shouldBe List(FinancialDataInvalidDateFromError) } "passed a date with an invalid separator" in { FinancialDataDateFormatValidation.validate("2012.02-02", FinancialDataInvalidDateFromError) shouldBe List(FinancialDataInvalidDateFromError) } "passed a date written as text" in { FinancialDataDateFormatValidation.validate("2nd Feb 2012", FinancialDataInvalidDateFromError) shouldBe List(FinancialDataInvalidDateFromError) } "a 'from' date is before the minimum supported date" in { FinancialDataDateFormatValidation.validate("2016-04-05", FinancialDataInvalidDateFromError) shouldBe List(FinancialDataInvalidDateFromError) } "a 'to' date is in the future" in { val dateTomorrow: String = LocalDate.now().plusDays(1).format(dateFormat) FinancialDataDateFormatValidation.validate(dateTomorrow, FinancialDataInvalidDateToError) shouldBe List(FinancialDataInvalidDateToError) } } } }
Example 40
Source File: BqResultSetTest.scala From scalikejdbc-bigquery with Apache License 2.0 | 5 votes |
package scalikejdbc.bigquery import java.time.{LocalDate, LocalTime, ZoneId, ZonedDateTime} import com.google.cloud.bigquery._ import org.scalatest.flatspec.AnyFlatSpec class BqResultSetTest extends AnyFlatSpec { it should "be able to instantiate from null Schema" in { val tableResult = MockUtil.tableResultFromSeq(Nil, null) new BqResultSet(tableResult) } it should "correctly traversable" in { val row1 = Seq(BqParameter.String("first")).map(MockUtil.fieldValueFromParameter(_)) val row2 = Seq(BqParameter.String("second")).map(MockUtil.fieldValueFromParameter(_)) val row3 = Seq(BqParameter.String("third")).map(MockUtil.fieldValueFromParameter(_)) val schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); val queryResult = MockUtil.tableResultFromSeq(Seq(row1, row2, row3), schema) val resultSet = new BqResultSet(queryResult) assert(resultSet.next()) assert(resultSet.next()) assert(resultSet.next()) assert(!resultSet.next()) } it should "correctly get value" in { val row = Seq( BqParameter.Int64(42L), BqParameter.Float64(3.14159), BqParameter.Bool(true), BqParameter.String("hello"), BqParameter.Bytes(Array[Byte](104, 101, 108, 108, 111)), BqParameter.Date(LocalDate.of(2017, 3, 22)), // BqParameter.DateTime BqParameter.Time(LocalTime.of(19, 58, 0, 0)), BqParameter.Timestamp(ZonedDateTime.of(2017, 3, 22, 19, 58, 0, 0, ZoneId.of("Asia/Tokyo"))) ).map(MockUtil.fieldValueFromParameter(_)) val fields = Seq( Field.of("int64_column", LegacySQLTypeName.INTEGER), Field.of("float64_column", LegacySQLTypeName.FLOAT), Field.of("bool_column", LegacySQLTypeName.BOOLEAN), Field.of("string_column", LegacySQLTypeName.STRING), Field.of("bytes_column", LegacySQLTypeName.BYTES), Field.of("date_column", LegacySQLTypeName.STRING), Field.of("time_column", LegacySQLTypeName.STRING), Field.of("timestamp_column", LegacySQLTypeName.TIMESTAMP) ) val schema = Schema.of(fields: _*) val queryResult = MockUtil.tableResultFromSeq(Seq(row), schema) val resultSet = new BqResultSet(queryResult) assert(resultSet.next()) // int64 assert(resultSet.getInt(0) == 42) assert(resultSet.getInt("int64_column") == 42) // float64 assert(resultSet.getDouble(1) == 3.14159) assert(resultSet.getDouble("float64_column") == 3.14159) // bool assert(resultSet.getBoolean(2) == true) assert(resultSet.getBoolean("bool_column") == true) // string assert(resultSet.getString(3) == "hello") assert(resultSet.getString("string_column") == "hello") // bytes assert(resultSet.getBytes(4).sameElements(Array[Byte](104, 101, 108, 108, 111))) assert(resultSet.getBytes("bytes_column").sameElements(Array[Byte](104, 101, 108, 108, 111))) // date assert(resultSet.getDate(5).toLocalDate == LocalDate.of(2017, 3, 22)) assert(resultSet.getDate("date_column").toLocalDate == LocalDate.of(2017, 3, 22)) // time assert(resultSet.getTime(6).toLocalTime == LocalTime.of(19, 58, 0, 0)) assert(resultSet.getTime("time_column").toLocalTime == LocalTime.of(19, 58, 0, 0)) // timestamp assert(resultSet.getTimestamp(7).toInstant == ZonedDateTime.of(2017, 3, 22, 19, 58, 0, 0, ZoneId.of("Asia/Tokyo")).toInstant) assert(resultSet.getTimestamp("timestamp_column").toInstant == ZonedDateTime.of(2017, 3, 22, 19, 58, 0, 0, ZoneId.of("Asia/Tokyo")).toInstant) } }
Example 41
Source File: GeneratableStringFormats.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.gen.formats import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit import java.time.{Instant, LocalDate} import de.leanovate.swaggercheck.generators.Generators import de.leanovate.swaggercheck.schema.model.formats.StringFormats import de.leanovate.swaggercheck.schema.model.{JsonPath, ValidationResult} import org.scalacheck.Gen object GeneratableStringFormats { object URLString extends GeneratableFormat[String] { override def generate: Gen[String] = Generators.url override def validate(path: JsonPath, value: String): ValidationResult = StringFormats.URLString.validate(path, value) } object URIString extends GeneratableFormat[String] { override def generate: Gen[String] = Generators.uri override def validate(path: JsonPath, value: String): ValidationResult = StringFormats.URIString.validate(path, value) } object UUIDString extends GeneratableFormat[String] { override def generate: Gen[String] = Gen.uuid.map(_.toString) override def validate(path: JsonPath, value: String): ValidationResult = StringFormats.UUIDString.validate(path, value) } object EmailString extends GeneratableFormat[String] { val emailPattern = """(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])""".r override def generate: Gen[String] = Generators.email override def validate(path: JsonPath, value: String): ValidationResult = StringFormats.EmailString.validate(path, value) } object DateString extends GeneratableFormat[String] { override def generate: Gen[String] = { Gen.choose[Int](-300000, 300000).map { diff: Int => val instant = LocalDate.ofEpochDay(diff) DateTimeFormatter.ISO_DATE.format(instant) } } override def validate(path: JsonPath, value: String): ValidationResult = StringFormats.DateString.validate(path, value) } object DateTimeString extends GeneratableFormat[String] { override def generate: Gen[String] = { Gen.choose[Long](Long.MinValue, Long.MaxValue).map { diff: Long => val instant = Instant.now().plus(diff, ChronoUnit.NANOS) DateTimeFormatter.ISO_INSTANT.format(instant) } } override def validate(path: JsonPath, value: String): ValidationResult = StringFormats.DateTimeString.validate(path, value) } val defaultFormats = Map( "url" -> URLString, "uri" -> URIString, "uuid" -> UUIDString, "email" -> EmailString, "date" -> DateString, "date-time" -> DateTimeString ) }
Example 42
Source File: AnyThing.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.fixtures.model import java.net.{URI, URL} import java.time.temporal.ChronoUnit import java.time.{Instant, LocalDate} import java.util.UUID import de.leanovate.swaggercheck.generators.Generators import org.scalacheck.{Arbitrary, Gen} import play.api.libs.json.{Json, OFormat} import scala.util.Try case class AnyThing( anUUID: String, anURL: String, anURI: String, anEmail: String, aDate: LocalDate, aDateTime: Instant, anInt32: Int, anInt64: Long, aFloat: Float, aDouble: Double, aBoolean: Boolean, anEnum: String, aMap: Map[String, String] ) { def isValid: Boolean = { Try { UUID.fromString(anUUID) new URL(anURL) new URI(anURI) }.isSuccess && Set("V1", "V2", "V3").contains(anEnum) } } object AnyThing { implicit val jsonFormat: OFormat[AnyThing] = Json.format[AnyThing] implicit val arbitrary = Arbitrary(for { anUUID <- Gen.uuid.map(_.toString) anURL <- Generators.url anURI <- Generators.uri anEmail <- Generators.email aDate <- Arbitrary.arbitrary[Int].map(diff => LocalDate.now().plus(diff, ChronoUnit.DAYS)) aDateTime <- Arbitrary.arbitrary[Long].map(diff => Instant.now().plus(diff, ChronoUnit.NANOS)) anInt32 <- Arbitrary.arbitrary[Int] anInt64 <- Arbitrary.arbitrary[Long] aFloat <- Arbitrary.arbitrary[Float] aDouble <- Arbitrary.arbitrary[Double] aBoolean <- Arbitrary.arbitrary[Boolean] anEnum <- Gen.oneOf("V1", "V2", "V3") aMap <- Arbitrary.arbitrary[Map[String, String]] } yield AnyThing(anUUID, anURL, anURI, anEmail, aDate, aDateTime, anInt32, anInt64, aFloat, aDouble, aBoolean, anEnum, aMap)) }
Example 43
Source File: StringFormats.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model.formats import java.net.{URI, URL} import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit import java.time.{Instant, LocalDate} import java.util.UUID import de.leanovate.swaggercheck.schema.model.{JsonPath, ValidationResult} import scala.util.Try object StringFormats { object URLString extends ValueFormat[String] { override def validate(path: JsonPath, value: String): ValidationResult = if (Try(new URL(value)).isSuccess) ValidationResult.success else ValidationResult.error(s"'$value' is not an url: $path") } object URIString extends ValueFormat[String] { override def validate(path: JsonPath, value: String): ValidationResult = if (Try(new URI(value)).isSuccess) ValidationResult.success else ValidationResult.error(s"'$value' is not an uri: $path") } object UUIDString extends ValueFormat[String] { override def validate(path: JsonPath, value: String): ValidationResult = if (Try(UUID.fromString(value)).isSuccess) ValidationResult.success else ValidationResult.error(s"'$value' is not an uuid: $path") } object EmailString extends ValueFormat[String] { val emailPattern = """(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])""".r override def validate(path: JsonPath, value: String): ValidationResult = if (emailPattern.pattern.matcher(value).matches()) { ValidationResult.success } else { ValidationResult.error(s"'$value' is not an email: $path") } } object DateString extends ValueFormat[String] { override def validate(path: JsonPath, value: String): ValidationResult = if (Try(DateTimeFormatter.ISO_DATE.parse(value)).isSuccess) ValidationResult.success else ValidationResult.error(s"'$value' is not a date: $path") } object DateTimeString extends ValueFormat[String] { override def validate(path: JsonPath, value: String): ValidationResult = if (Try(DateTimeFormatter.ISO_DATE_TIME.parse(value)).isSuccess) ValidationResult.success else ValidationResult.error(s"'$value' is not a date-time: $path") } val defaultFormats = Map( "url" -> URLString, "uri" -> URIString, "uuid" -> UUIDString, "email" -> EmailString, "date" -> DateString, "date-time" -> DateTimeString ) }
Example 44
Source File: ExpiryServiceTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import java.time.{Instant, LocalDate, ZoneOffset} import cats.effect.ExitCase import cats.implicits._ import cats.{Id, catsInstancesForId} import com.evolutiongaming.kafka.journal.ExpireAfter import com.evolutiongaming.kafka.journal.ExpireAfter.implicits._ import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpireOn.implicits._ import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpiryService.Action import com.evolutiongaming.kafka.journal.util.BracketFromMonad import org.scalatest.FunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.util.control.NonFatal class ExpiryServiceTest extends FunSuite with Matchers { import ExpiryServiceTest._ test("expireOn") { val expireAfter = 1.day.toExpireAfter val expected = LocalDate.of(2019, 12, 12).toExpireOn expireService.expireOn(expireAfter, timestamp) shouldEqual expected } for { (expiry, expireAfter, action) <- List( ( none[Expiry], 1.minute.toExpireAfter.some, Action.update(Expiry( 1.minute.toExpireAfter, LocalDate.of(2019, 12, 11).toExpireOn))), ( none[Expiry], 1.day.toExpireAfter.some, Action.update(Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn))), ( Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 11).toExpireOn).some, 1.day.toExpireAfter.some, Action.update(Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn))), ( Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn).some, 1.day.toExpireAfter.some, Action.ignore), ( Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn).some, none[ExpireAfter], Action.remove)) } yield { test(s"action expiry: $expiry, expireAfter: $expireAfter, action: $action") { expireService.action(expiry, expireAfter, timestamp) shouldEqual action } } } object ExpiryServiceTest { implicit val bracketId: BracketFromMonad[Id, Throwable] = new BracketFromMonad[Id, Throwable] { def F = catsInstancesForId def bracketCase[A, B](acquire: Id[A])(use: A => Id[B])(release: (A, ExitCase[Throwable]) => Id[Unit]) = { flatMap(acquire) { a => try { val b = use(a) try release(a, ExitCase.Completed) catch { case NonFatal(_) => } b } catch { case NonFatal(e) => release(a, ExitCase.Error(e)) raiseError(e) } } } def raiseError[A](a: Throwable) = throw a def handleErrorWith[A](fa: Id[A])(f: Throwable => Id[A]) = fa } val timestamp: Instant = Instant.parse("2019-12-11T10:10:10.00Z") val zoneId: ZoneOffset = ZoneOffset.UTC val expireService: ExpiryService[Id] = ExpiryService[Id](zoneId) }
Example 45
Source File: LoadDataBenchmark.scala From memsql-spark-connector with Apache License 2.0 | 5 votes |
package com.memsql.spark import java.sql.{Connection, Date, DriverManager} import java.time.{Instant, LocalDate} import java.util.Properties import org.apache.spark.sql.types._ import com.github.mrpowers.spark.daria.sql.SparkSessionExt._ import org.apache.spark.sql.{SaveMode, SparkSession} import scala.util.Random // LoadDataBenchmark is written to test load data with CPU profiler // this feature is accessible in Ultimate version of IntelliJ IDEA // see https://www.jetbrains.com/help/idea/async-profiler.html#profile for more details object LoadDataBenchmark extends App { final val masterHost: String = sys.props.getOrElse("memsql.host", "localhost") final val masterPort: String = sys.props.getOrElse("memsql.port", "5506") val spark: SparkSession = SparkSession .builder() .master("local") .config("spark.sql.shuffle.partitions", "1") .config("spark.driver.bindAddress", "localhost") .config("spark.datasource.memsql.ddlEndpoint", s"${masterHost}:${masterPort}") .config("spark.datasource.memsql.database", "testdb") .getOrCreate() def jdbcConnection: Loan[Connection] = { val connProperties = new Properties() connProperties.put("user", "root") Loan( DriverManager.getConnection( s"jdbc:mysql://$masterHost:$masterPort", connProperties )) } def executeQuery(sql: String): Unit = { jdbcConnection.to(conn => Loan(conn.createStatement).to(_.execute(sql))) } executeQuery("set global default_partitions_per_leaf = 2") executeQuery("drop database if exists testdb") executeQuery("create database testdb") def genRow(): (Long, Int, Double, String) = (Random.nextLong(), Random.nextInt(), Random.nextDouble(), Random.nextString(20)) val df = spark.createDF( List.fill(1000000)(genRow()), List(("LongType", LongType, true), ("IntType", IntegerType, true), ("DoubleType", DoubleType, true), ("StringType", StringType, true)) ) val start = System.nanoTime() df.write .format("memsql") .mode(SaveMode.Append) .save("testdb.batchinsert") val diff = System.nanoTime() - start println("Elapsed time: " + diff + "ns [CSV serialization] ") executeQuery("truncate testdb.batchinsert") val avroStart = System.nanoTime() df.write .format(DefaultSource.MEMSQL_SOURCE_NAME_SHORT) .mode(SaveMode.Append) .option(MemsqlOptions.LOAD_DATA_FORMAT, "Avro") .save("testdb.batchinsert") val avroDiff = System.nanoTime() - avroStart println("Elapsed time: " + avroDiff + "ns [Avro serialization] ") }
Example 46
Source File: BatchInsertBenchmark.scala From memsql-spark-connector with Apache License 2.0 | 5 votes |
package com.memsql.spark import java.sql.{Connection, Date, DriverManager} import java.time.LocalDate import java.util.Properties import org.apache.spark.sql.types._ import com.github.mrpowers.spark.daria.sql.SparkSessionExt._ import org.apache.spark.sql.{SaveMode, SparkSession} import scala.util.Random // BatchInsertBenchmark is written to test batch insert with CPU profiler // this feature is accessible in Ultimate version of IntelliJ IDEA // see https://www.jetbrains.com/help/idea/async-profiler.html#profile for more details object BatchInsertBenchmark extends App { final val masterHost: String = sys.props.getOrElse("memsql.host", "localhost") final val masterPort: String = sys.props.getOrElse("memsql.port", "5506") val spark: SparkSession = SparkSession .builder() .master("local") .config("spark.sql.shuffle.partitions", "1") .config("spark.driver.bindAddress", "localhost") .config("spark.datasource.memsql.ddlEndpoint", s"${masterHost}:${masterPort}") .config("spark.datasource.memsql.database", "testdb") .getOrCreate() def jdbcConnection: Loan[Connection] = { val connProperties = new Properties() connProperties.put("user", "root") Loan( DriverManager.getConnection( s"jdbc:mysql://$masterHost:$masterPort", connProperties )) } def executeQuery(sql: String): Unit = { jdbcConnection.to(conn => Loan(conn.createStatement).to(_.execute(sql))) } executeQuery("set global default_partitions_per_leaf = 2") executeQuery("drop database if exists testdb") executeQuery("create database testdb") def genDate() = Date.valueOf(LocalDate.ofEpochDay(LocalDate.of(2001, 4, 11).toEpochDay + Random.nextInt(10000))) def genRow(): (Long, Int, Double, String, Date) = (Random.nextLong(), Random.nextInt(), Random.nextDouble(), Random.nextString(20), genDate()) val df = spark.createDF( List.fill(1000000)(genRow()), List(("LongType", LongType, true), ("IntType", IntegerType, true), ("DoubleType", DoubleType, true), ("StringType", StringType, true), ("DateType", DateType, true)) ) val start = System.nanoTime() df.write .format("memsql") .option("tableKey.primary", "IntType") .option("onDuplicateKeySQL", "IntType = IntType") .mode(SaveMode.Append) .save("testdb.batchinsert") val diff = System.nanoTime() - start println("Elapsed time: " + diff + "ns") }
Example 47
Source File: EvalProposals.scala From docspell with GNU General Public License v3.0 | 5 votes |
package docspell.joex.process import java.time.{LocalDate, Period} import cats.effect.Sync import cats.implicits._ import docspell.common._ import docspell.joex.scheduler.Task import docspell.store.records.RAttachmentMeta object EvalProposals { def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] = Task { _ => Timestamp .current[F] .map { now => val metas = data.metas.map(calcCandidateWeight(now.toUtcDate)) data.copy(metas = metas) } } def calcCandidateWeight(now: LocalDate)(rm: RAttachmentMeta): RAttachmentMeta = { val list = rm.proposals.change(mp => mp.addWeights(weight(rm, mp, now))) rm.copy(proposals = list.sortByWeights) } def weight(rm: RAttachmentMeta, mp: MetaProposal, ref: LocalDate)( cand: MetaProposal.Candidate ): Double = mp.proposalType match { case MetaProposalType.DueDate => //for due dates, sort earliest on top MetaProposal .parseDate(cand) .map { ld => val p = Period.between(ref, ld) // conversion only for sorting val d = p.getYears * 365 + p.getMonths * 31 + p.getDays d.toDouble } .getOrElse(2000.0) case _ => val textLen = rm.content.map(_.length).getOrElse(0) val tagCount = cand.origin.size.toDouble val pos = cand.origin.map(_.startPosition).min val words = cand.origin.map(_.label.split(' ').length).max.toDouble val nerFac = cand.origin.map(label => nerTagFactor(label.tag, mp.proposalType)).min (1 / words) * (1 / tagCount) * positionWeight(pos, textLen) * nerFac } def positionWeight(pos: Int, total: Int): Double = if (total <= 0) 1 else { val p = math.abs(pos.toDouble / total.toDouble) if (p < 0.7) p / 2 else p } def nerTagFactor(tag: NerTag, mt: MetaProposalType): Double = tag match { case NerTag.Date => 1.0 case NerTag.Email => 0.5 case NerTag.Location => 1.0 case NerTag.Misc => 1.0 case NerTag.Organization => if (mt == MetaProposalType.CorrOrg) 0.8 else 1.0 case NerTag.Person => if ( mt == MetaProposalType.CorrPerson || mt == MetaProposalType.ConcPerson ) 0.8 else 1.0 case NerTag.Website => 0.5 } }
Example 48
Source File: MetaProposal.scala From docspell with GNU General Public License v3.0 | 5 votes |
package docspell.common import java.time.LocalDate import cats.data.NonEmptyList import cats.implicits._ import cats.kernel.Order import docspell.common.MetaProposal.Candidate import docspell.common._ import io.circe._ import io.circe.generic.semiauto._ def flatten(s: NonEmptyList[Candidate]): NonEmptyList[Candidate] = { def mergeInto( res: NonEmptyList[Candidate], el: Candidate ): NonEmptyList[Candidate] = { val l = res.map(c => if (c.ref.id == el.ref.id) c.copy(origin = c.origin ++ el.origin) else c ) if (l == res) l :+ el else l } val init = NonEmptyList.of(s.head) s.tail.foldLeft(init)(mergeInto) } implicit val jsonDecoder: Decoder[MetaProposal] = deriveDecoder[MetaProposal] implicit val jsonEncoder: Encoder[MetaProposal] = deriveEncoder[MetaProposal] }
Example 49
Source File: DoobieMeta.scala From docspell with GNU General Public License v3.0 | 5 votes |
package docspell.store.impl import java.time.format.DateTimeFormatter import java.time.{Instant, LocalDate} import docspell.common._ import docspell.common.syntax.all._ import com.github.eikek.calev.CalEvent import doobie._ import doobie.implicits.legacy.instant._ import doobie.util.log.Success import emil.doobie.EmilDoobieMeta import io.circe.{Decoder, Encoder} trait DoobieMeta extends EmilDoobieMeta { implicit val sqlLogging = LogHandler({ case e @ Success(_, _, _, _) => DoobieMeta.logger.trace("SQL " + e) case e => DoobieMeta.logger.error(s"SQL Failure: $e") }) def jsonMeta[A](implicit d: Decoder[A], e: Encoder[A]): Meta[A] = Meta[String].imap(str => str.parseJsonAs[A].fold(ex => throw ex, identity))(a => e.apply(a).noSpaces ) implicit val metaCollectiveState: Meta[CollectiveState] = Meta[String].imap(CollectiveState.unsafe)(CollectiveState.asString) implicit val metaUserState: Meta[UserState] = Meta[String].imap(UserState.unsafe)(UserState.asString) implicit val metaPassword: Meta[Password] = Meta[String].imap(Password(_))(_.pass) implicit val metaIdent: Meta[Ident] = Meta[String].imap(Ident.unsafe)(_.id) implicit val metaContactKind: Meta[ContactKind] = Meta[String].imap(ContactKind.unsafe)(_.asString) implicit val metaTimestamp: Meta[Timestamp] = Meta[Instant].imap(Timestamp(_))(_.value) implicit val metaJobState: Meta[JobState] = Meta[String].imap(JobState.unsafe)(_.name) implicit val metaDirection: Meta[Direction] = Meta[Boolean].imap(flag => if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction )(d => Direction.isIncoming(d)) implicit val metaPriority: Meta[Priority] = Meta[Int].imap(Priority.fromInt)(Priority.toInt) implicit val metaLogLevel: Meta[LogLevel] = Meta[String].imap(LogLevel.unsafeString)(_.name) implicit val metaLenientUri: Meta[LenientUri] = Meta[String].imap(LenientUri.unsafe)(_.asString) implicit val metaNodeType: Meta[NodeType] = Meta[String].imap(NodeType.unsafe)(_.name) implicit val metaLocalDate: Meta[LocalDate] = Meta[String].imap(str => LocalDate.parse(str))(_.format(DateTimeFormatter.ISO_DATE)) implicit val metaItemState: Meta[ItemState] = Meta[String].imap(ItemState.unsafe)(_.name) implicit val metNerTag: Meta[NerTag] = Meta[String].imap(NerTag.unsafe)(_.name) implicit val metaNerLabel: Meta[NerLabel] = jsonMeta[NerLabel] implicit val metaNerLabelList: Meta[List[NerLabel]] = jsonMeta[List[NerLabel]] implicit val metaItemProposal: Meta[MetaProposal] = jsonMeta[MetaProposal] implicit val metaItemProposalList: Meta[MetaProposalList] = jsonMeta[MetaProposalList] implicit val metaLanguage: Meta[Language] = Meta[String].imap(Language.unsafe)(_.iso3) implicit val metaCalEvent: Meta[CalEvent] = Meta[String].timap(CalEvent.unsafe)(_.asString) } object DoobieMeta extends DoobieMeta { import org.log4s._ private val logger = getLogger }
Example 50
Source File: ClientSideAndSharedRouteSpecExample.scala From fintrospect with Apache License 2.0 | 5 votes |
package examples.clients import java.time.LocalDate import com.twitter.finagle.http.Method.Get import com.twitter.finagle.http.{Request, Response} import com.twitter.finagle.{Http, Service} import com.twitter.util.Await import io.fintrospect.RouteSpec import io.fintrospect.formats.PlainText.ResponseBuilder._ import io.fintrospect.parameters._ import io.fintrospect.testing.TestHttpServer import io.fintrospect.util.HttpRequestResponseUtil.{headersFrom, statusAndContentFrom} object ClientSideAndSharedRouteSpecExample extends App { val theDate = Path.localDate("date") val theWeather = Query.optional.string("weather") val theUser = Header.required.string("user") val gender = FormField.optional.string("gender") val body = Body.form(gender) val sharedRouteSpec = RouteSpec() .taking(theUser) .taking(theWeather) .body(body) .at(Get) / "firstSection" / theDate val fakeServerRoute = sharedRouteSpec bindTo (dateFromPath => Service.mk[Request, Response] { request: Request => { println("URL was " + request.uri) println("Headers were " + headersFrom(request)) println("Form sent was " + (body <-- request)) println("Date send was " + dateFromPath.toString) Ok(dateFromPath.toString) } }) Await.result(new TestHttpServer(10000, fakeServerRoute).start()) val client = sharedRouteSpec bindToClient Http.newService("localhost:10000") val theCall = client(theWeather --> Option("sunny"), body --> Form(gender --> "male"), theDate --> LocalDate.of(2015, 1, 1), theUser --> System.getenv("USER")) val response = Await.result(theCall) println("Response headers: " + headersFrom(response)) println("Response: " + statusAndContentFrom(response)) }
Example 51
Source File: Payloads.scala From http-verbs with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.examples.utils import play.api.libs.json.{Json, Reads} import java.time.LocalDate import scala.io.Source object XmlPayloads { val bankHolidays: String = Source.fromFile(getClass.getResource("/bankHolidays.xml").toURI, "UTF-8").getLines.mkString } object JsonPayloads { val bankHolidays: String = Source.fromFile(getClass.getResource("/bankHolidays.json").toURI, "UTF-8").getLines.mkString val userId: String = Source.fromFile(getClass.getResource("/userId.json").toURI, "UTF-8").getLines.mkString } case class BankHolidays(events: Seq[BankHoliday]) case class BankHoliday(title: String, date: LocalDate) object BankHolidays { implicit val bhr: Reads[BankHoliday] = Json.reads[BankHoliday] val reads: Reads[BankHolidays] = Json.reads[BankHolidays] } case class User(email: String, fullName: String) object User { val writes = Json.writes[User] } case class UserIdentifier(id: String) object UserIdentifier { val reads = Json.reads[UserIdentifier] }
Example 52
Source File: ArrayOfLocalDatesReading.scala From jsoniter-scala with MIT License | 5 votes |
package com.github.plokhotnyuk.jsoniter_scala.benchmark import java.nio.charset.StandardCharsets.UTF_8 import java.time.LocalDate import com.avsystem.commons.serialization.json._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.AVSystemCodecs._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.BorerJsonEncodersDecoders._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.DslPlatformJson._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.JacksonSerDesers._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.JsoniterScalaCodecs._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.SprayFormats._ import com.github.plokhotnyuk.jsoniter_scala.benchmark.UPickleReaderWriters._ import com.github.plokhotnyuk.jsoniter_scala.core._ import com.rallyhealth.weejson.v1.jackson.FromJson import com.rallyhealth.weepickle.v1.WeePickle.ToScala import io.circe.parser._ import org.openjdk.jmh.annotations.Benchmark import play.api.libs.json.Json import spray.json._ class ArrayOfLocalDatesReading extends ArrayOfLocalDatesBenchmark { @Benchmark def avSystemGenCodec(): Array[LocalDate] = JsonStringInput.read[Array[LocalDate]](new String(jsonBytes, UTF_8)) @Benchmark def borer(): Array[LocalDate] = io.bullet.borer.Json.decode(jsonBytes).to[Array[LocalDate]].value @Benchmark def circe(): Array[LocalDate] = decode[Array[LocalDate]](new String(jsonBytes, UTF_8)).fold(throw _, identity) @Benchmark def dslJsonScala(): Array[LocalDate] = dslJsonDecode[Array[LocalDate]](jsonBytes) @Benchmark def jacksonScala(): Array[LocalDate] = jacksonMapper.readValue[Array[LocalDate]](jsonBytes) @Benchmark def jsoniterScala(): Array[LocalDate] = readFromArray[Array[LocalDate]](jsonBytes) @Benchmark def playJson(): Array[LocalDate] = Json.parse(jsonBytes).as[Array[LocalDate]] @Benchmark def sprayJson(): Array[LocalDate] = JsonParser(jsonBytes).convertTo[Array[LocalDate]] @Benchmark def uPickle(): Array[LocalDate] = read[Array[LocalDate]](jsonBytes) @Benchmark def weePickle(): Array[LocalDate] = FromJson(jsonBytes).transform(ToScala[Array[LocalDate]]) }
Example 53
Source File: ArrayOfLocalDateTimesBenchmark.scala From jsoniter-scala with MIT License | 5 votes |
package com.github.plokhotnyuk.jsoniter_scala.benchmark import java.nio.charset.StandardCharsets.UTF_8 import java.time.{LocalDate, LocalDateTime, LocalTime} import org.openjdk.jmh.annotations.{Param, Setup} abstract class ArrayOfLocalDateTimesBenchmark extends CommonParams { @Param(Array("1", "10", "100", "1000", "10000", "100000", "1000000")) var size: Int = 1000 var obj: Array[LocalDateTime] = _ var jsonString: String = _ var jsonBytes: Array[Byte] = _ var preallocatedBuf: Array[Byte] = _ @Setup def setup(): Unit = { obj = (1 to size).map { i => val n = Math.abs(i * 1498724053) LocalDateTime.of(LocalDate.ofEpochDay(i), LocalTime.ofNanoOfDay(((n % 86000) | 0x1) * 1000000000L + (i % 4 match { case 0 => 0 case 1 => ((n % 1000) | 0x1) * 1000000 case 2 => ((n % 1000000) | 0x1) * 1000 case 3 => (n | 0x1) % 1000000000 }))) }.toArray jsonString = obj.mkString("[\"", "\",\"", "\"]") jsonBytes = jsonString.getBytes(UTF_8) preallocatedBuf = new Array[Byte](jsonBytes.length + 100) } }
Example 54
Source File: ArrayOfLocalDatesBenchmark.scala From jsoniter-scala with MIT License | 5 votes |
package com.github.plokhotnyuk.jsoniter_scala.benchmark import java.nio.charset.StandardCharsets.UTF_8 import java.time.LocalDate import org.openjdk.jmh.annotations.{Param, Setup} abstract class ArrayOfLocalDatesBenchmark extends CommonParams { @Param(Array("1", "10", "100", "1000", "10000", "100000", "1000000")) var size: Int = 1000 var obj: Array[LocalDate] = _ var jsonString: String = _ var jsonBytes: Array[Byte] = _ var preallocatedBuf: Array[Byte] = _ @Setup def setup(): Unit = { obj = (1 to size).map(i => LocalDate.ofEpochDay(i)).toArray jsonString = obj.mkString("[\"", "\",\"", "\"]") jsonBytes = jsonString.getBytes(UTF_8) preallocatedBuf = new Array[Byte](jsonBytes.length + 100) } }
Example 55
Source File: BlogPostPage.scala From hepek with Apache License 2.0 | 5 votes |
package ba.sake.hepek.html.statik import java.time.format.DateTimeFormatter import java.time.LocalDate import scalatags.Text.all._ import ba.sake.hepek.core.RelativePath import ba.sake.hepek.utils.StringUtils import ba.sake.stone.Wither trait BlogPostPage extends StaticPage { def blogSettings: BlogSettings = BlogSettings() def categoryPosts: List[BlogPostPage] = List.empty } @Wither final case class BlogSettings( author: Option[String] = None, createDate: Option[LocalDate] = None, sections: List[Section] = List.empty, dateFormat: DateTimeFormatter = BlogSettings.DefaultDateFormat ) { def withDateFormat(df: DateTimeFormatter) = copy(dateFormat = df) def withDateFormat(df: String) = copy(dateFormat = DateTimeFormatter.ofPattern(df)) } object BlogSettings { val DefaultDateFormatPattern = "dd.MM.yyyy" val DefaultDateFormat = DateTimeFormatter.ofPattern(DefaultDateFormatPattern) } def ref(implicit caller: RelativePath): String = if (owner == caller) "#" + id else caller.relTo(owner) + "#" + id }
Example 56
Source File: Index.scala From hepek with Apache License 2.0 | 5 votes |
package docs import java.time.LocalDate import scalatags.Text.all._ import utils.Imports.Bundle._, Classes._ import templates.HepekDocsStaticPage object Index extends HepekDocsStaticPage { private val ratios = Ratios(Ratio(1, 4, 1), Ratio(1, 1), Ratio(1, 4, 1)) private val grid = Grid.withScreenRatios( Grid.screenRatios.withSm(None).withXs(None).withLg(ratios).withMd(ratios) ) import grid._ override def pageSettings = super.pageSettings .withTitle("Welcome!") .withDescription("Hepek docs") val currYear = LocalDate.now() override def pageContent = frag( div(cls := "page-header", txtAlignCenter)( h1("Welcome!") ), row( s""" Hepek is a collection of useful projects for typesafe HTML construction: - [Components](${hepek.components.Index.ref}) - [Static site generator](${hepek.Index.ref}) - [Play framework integration](${hepek.play.Index.ref}) Components can be used in **any project**. It only depends on Scalatags and Commonmark. If you need typesafe grid, form inputs, panels, navbars, markdown, maths and lots more, this is the library you're looking for. SSG has support for automatic relative links, PDF rendering and lots more. Hepek Play seamlessly integrates components with Play framework. """.md ), super.pageContent ) }
Example 57
Source File: Parsers.scala From get-programming-with-scala with MIT License | 5 votes |
package org.example.movies.entities import java.time.LocalDate import io.circe.Decoder import io.circe.parser.decode import io.circe.generic.auto._ import scala.util.Try object Parsers { def parseInt(row: Map[String, String], key: String): Option[Int] = parseAs(row, key, _.toInt) def parseDouble(row: Map[String, String], key: String): Option[Double] = parseAs(row, key, _.toDouble) def parseString(row: Map[String, String], key: String): Option[String] = parseAs(row, key, identity) def parseFloat(row: Map[String, String], key: String): Option[Float] = parseAs(row, key, _.toFloat) def parseLocalDate(row: Map[String, String], key: String): Option[LocalDate] = parseAs(row, key, LocalDate.parse) def parseGenres(row: Map[String, String], key: String): Option[List[Genre]] = parseJsonAs[List[Genre]](row, key) private def parseJsonAs[T: Decoder](row: Map[String, String], key: String): Option[T] = for { rawJson <- parseString(row, key) // for some reason the CSV is using ' rather than " for JSON, // so we need to clean the data before decoding cleanJson = rawJson.replace("'", "\"") // rather than returning an error for invalid JSON, we return no value decodedValue <- decode[T](cleanJson).toOption } yield decodedValue private def parseAs[T](row: Map[String, String], key: String, f: String => T): Option[T] = row.get(key).flatMap { value => // if we cannot parse a value, we return no value Try(f(value)).toOption } }
Example 58
Source File: Movie.scala From get-programming-with-scala with MIT License | 5 votes |
package org.example.movies.entities import java.time.LocalDate import org.slf4j.LoggerFactory case class Genre(id: Int, name: String) case class Movie(genres: List[Genre], id: Int, imdbId: String, originalLanguage: String, originalTitle: String, title: String, overview: String, popularity: Option[Float], releaseDate: Option[LocalDate], revenue: Int, budget: Int, duration: Option[Double], voteAverage: Float, voteCount: Float) object Movie { import Parsers._ private val logger = LoggerFactory.getLogger(this.getClass) def parse(row: Map[String, String]): Option[Movie] = { // header from the CSV file: // adult,belongs_to_collection,budget,genres,homepage,id,imdb_id,original_language,original_title,overview,popularity,poster_path,production_companies,production_countries,release_date,revenue,runtime,spoken_languages,status,tagline,title,video,vote_average,vote_count val movie = for { val revenue = parseInt(row, "revenue").getOrElse[Int](0) val title = parseString(row, "title").getOrElse(originalTitle) val voteAverage = parseFloat(row, "vote_average").getOrElse[Float](0) val voteCount = parseFloat(row, "vote_count").getOrElse[Float](0) Movie(genres, id, imdbId, originalLanguage, originalTitle, title, overview, popularity, releaseDate, revenue, budget, runtimeInMinutes, voteAverage, voteCount) } if (movie.isEmpty) logger.warn(s"Skipping malformed movie row") movie } }
Example 59
Source File: EventsByTagPubsubSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import java.time.{ LocalDate, ZoneOffset } import akka.cluster.Cluster import akka.persistence.cassandra.CassandraSpec import akka.persistence.cassandra.journal.JournalSettings import akka.persistence.query.{ EventEnvelope, NoOffset } import akka.stream.testkit.scaladsl.TestSink import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ object EventsByTagPubsubSpec { val today = LocalDate.now(ZoneOffset.UTC) val config = ConfigFactory.parseString(s""" akka.actor.provider = "akka.cluster.ClusterActorRefProvider" akka.actor.serialize-messages = off akka.actor.serialize-creators = off akka.remote.netty.tcp.port = 0 akka.remote.artery.canonical.port = 0 akka.remote.netty.tcp.hostname = "127.0.0.1" akka.persistence.cassandra { query.refresh-interval = 10s events-by-tag { pubsub-notification = on flush-interval = 0ms eventual-consistency-delay = 0s } } """).withFallback(EventsByTagSpec.config) } class EventsByTagPubsubSpec extends CassandraSpec(EventsByTagPubsubSpec.config) { val journalSettings = new JournalSettings(system, system.settings.config.getConfig("akka.persistence.cassandra")) override protected def beforeAll(): Unit = { super.beforeAll() Cluster(system).join(Cluster(system).selfAddress) } "Cassandra query getEventsByTag when running clustered with pubsub enabled" must { "present new events to an ongoing getEventsByTag stream long before polling would kick in" in { val actor = system.actorOf(TestActor.props("EventsByTagPubsubSpec_a")) val blackSrc = queries.eventsByTag(tag = "black", offset = NoOffset) val probe = blackSrc.runWith(TestSink.probe[Any]) probe.request(2) probe.expectNoMessage(300.millis) actor ! "a black car" probe.within(5.seconds) { // long before refresh-interval, which is 10s probe.expectNextPF { case e @ EventEnvelope(_, _, _, "a black car") => e } } } } }
Example 60
Source File: data.scala From scalalaz-gen with Apache License 2.0 | 5 votes |
package ru.scalalaz.gen import java.nio.file.Path import java.time.format.DateTimeFormatter import java.time.{ LocalDate, ZoneOffset } import knockoff.DefaultDiscounter._ import _root_.knockoff._ case class EpisodeSettings(title: String, description: String, audio: Enclosure, page: String, date: LocalDate) { def RFCDate: String = { val dateTime = date.atStartOfDay().atOffset(ZoneOffset.UTC) dateTime.format(DateTimeFormatter.RFC_1123_DATE_TIME) } def ISODate: String = { date.format(DateTimeFormatter.ISO_DATE) } } case class Episode(settings: EpisodeSettings, content: String) { def title: String = settings.title def asHtml: String = { val blocks = knockoff(content) toXHTML(blocks).mkString } } case class EpisodeFile(path: Path, episode: Episode) case class SpecialPageSettings(title: String, date: LocalDate) { def ISODate: String = { date.format(DateTimeFormatter.ISO_DATE) } } case class Page(settings: SpecialPageSettings, content: String) { def title: String = settings.title def asHtml: String = { val blocks = knockoff(content) toXHTML(blocks).mkString } } case class PageFile(path: Path, page: Page)
Example 61
Source File: EpisodeSettingsExtractor.scala From scalalaz-gen with Apache License 2.0 | 5 votes |
package ru.scalalaz.gen.parsing import java.time.LocalDate import java.time.format.{ DateTimeFormatter, DateTimeParseException } import cats.Apply import cats.data.Validated.Valid import cats.data.{ Validated, ValidatedNel } import ru.scalalaz.gen.{ Enclosure, EpisodeSettings, SpecialPageSettings } object EpisodeSettingsExtractor { import ru.scalalaz.gen.parsing.EpisodeErrors._ def fromMap( map: Map[String, Option[String]] ): ValidatedNel[PageParseError, SpecialPageSettings] = new SettingsExtractor(map).extract class SettingsExtractor(map: Map[String, Option[String]]) { def extract: ValidatedNel[PageParseError, SpecialPageSettings] = Apply[ValidatedNel[PageParseError, *]].map2( read("title").toValidatedNel, read("date").andThen(parseDate).toValidatedNel ) { case (title, date) => //val enc = Enclosure(encUrl, if (encLength != "") encLength.toInt else -1) SpecialPageSettings(title, date) } private def read(key: String): Validated[PageParseError, String] = Validated.fromOption(map.get(key).flatten, MissingKey(key)) private def optRead(key: String): Validated[PageParseError, String] = Valid(map.get(key).flatten.getOrElse("")) private def parseDate( date: String ): Validated[PageParseError, LocalDate] = { def toDate = LocalDate.parse(date, DateTimeFormatter.ISO_LOCAL_DATE) Validated .catchOnly[DateTimeParseException](toDate) .leftMap(e => InvalidDate(e.getMessage)) } } }
Example 62
Source File: EpisodeParserSpec.scala From scalalaz-gen with Apache License 2.0 | 5 votes |
package ru.scalalaz.gen.parsing import java.time.LocalDate import cats.data.Validated.Valid import org.scalatest.matchers.should.Matchers import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.Inside class EpisodeParserSpec extends AnyFlatSpec with Matchers with Inside { val episodeStr = """ |title=Episode#1 |page=http://scalalaz.ru/series-01.html |date=2016-11-28 |audio.url=https://scalalaz.ru/mp3/scalalaz-podcast-1.mp3 |audio.length=6 |---- |### Yoyoyo! |it is a new episode!""".stripMargin it should "parse from string" in { val result = EpisodeParser.fromString(episodeStr) inside(result) { case Valid(episode) => episode.content shouldBe "### Yoyoyo!\nit is a new episode!" val rss = episode.settings rss.title shouldBe "Episode#1" rss.page shouldBe "http://scalalaz.ru/series-01.html" rss.date shouldBe LocalDate.of(2016, 11, 28) rss.audio.url shouldBe "https://scalalaz.ru/mp3/scalalaz-podcast-1.mp3" rss.audio.length shouldBe 6 } } }
Example 63
Source File: FinaglePostgresDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.finagle.postgres import java.nio.charset.Charset import java.time.{ LocalDate, LocalDateTime, ZoneId } import java.util.{ Date, UUID } import com.twitter.finagle.postgres.values.ValueDecoder import com.twitter.util.Return import com.twitter.util.Throw import com.twitter.util.Try import io.getquill.FinaglePostgresContext import io.getquill.util.Messages.fail import io.netty.buffer.ByteBuf trait FinaglePostgresDecoders { this: FinaglePostgresContext[_] => import ValueDecoder._ type Decoder[T] = FinaglePostgresDecoder[T] case class FinaglePostgresDecoder[T]( vd: ValueDecoder[T], default: Throwable => T = (e: Throwable) => fail(e.getMessage) ) extends BaseDecoder[T] { override def apply(index: Index, row: ResultRow): T = row.getTry[T](index)(vd) match { case Return(r) => r case Throw(e) => default(e) } def orElse[U](f: U => T)(implicit vdu: ValueDecoder[U]): FinaglePostgresDecoder[T] = { val mappedVd = vdu.map[T](f) FinaglePostgresDecoder[T]( new ValueDecoder[T] { def decodeText(recv: String, text: String): Try[T] = { val t = vd.decodeText(recv, text) if (t.isReturn) t else mappedVd.decodeText(recv, text) } def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[T] = { val t = vd.decodeBinary(recv, bytes, charset) if (t.isReturn) t else mappedVd.decodeBinary(recv, bytes, charset) } } ) } } implicit def decoderDirectly[T](implicit vd: ValueDecoder[T]): Decoder[T] = FinaglePostgresDecoder(vd) def decoderMapped[U, T](f: U => T)(implicit vd: ValueDecoder[U]): Decoder[T] = FinaglePostgresDecoder(vd.map[T](f)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = FinaglePostgresDecoder[Option[T]]( new ValueDecoder[Option[T]] { def decodeText(recv: String, text: String): Try[Option[T]] = Return(d.vd.decodeText(recv, text).toOption) def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[Option[T]] = Return(d.vd.decodeBinary(recv, bytes, charset).toOption) }, _ => None ) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = decoderMapped[I, O](mapped.f)(d.vd) implicit val stringDecoder: Decoder[String] = decoderDirectly[String] implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoderDirectly[BigDecimal] implicit val booleanDecoder: Decoder[Boolean] = decoderDirectly[Boolean] implicit val shortDecoder: Decoder[Short] = decoderDirectly[Short] implicit val byteDecoder: Decoder[Byte] = decoderMapped[Short, Byte](_.toByte) implicit val intDecoder: Decoder[Int] = decoderDirectly[Int].orElse[Long](_.toInt) implicit val longDecoder: Decoder[Long] = decoderDirectly[Long].orElse[Int](_.toLong) implicit val floatDecoder: Decoder[Float] = decoderDirectly[Float].orElse[Double](_.toFloat) implicit val doubleDecoder: Decoder[Double] = decoderDirectly[Double] implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoderDirectly[Array[Byte]] implicit val dateDecoder: Decoder[Date] = decoderMapped[LocalDateTime, Date](d => Date.from(d.atZone(ZoneId.systemDefault()).toInstant)) implicit val localDateDecoder: Decoder[LocalDate] = decoderDirectly[LocalDate].orElse[LocalDateTime](_.toLocalDate) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoderDirectly[LocalDateTime].orElse[LocalDate](_.atStartOfDay) implicit val uuidDecoder: Decoder[UUID] = decoderDirectly[UUID] }
Example 64
Source File: ArrayEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.async import java.time.LocalDate import java.util.Date import io.getquill.PostgresAsyncContext import io.getquill.context.sql.encoding.ArrayEncoding import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } trait ArrayEncoders extends ArrayEncoding { self: PostgresAsyncContext[_] => implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col] implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayRawEncoder[BigDecimal, Col] implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col] implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col] implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col] implicit def arrayIntEncoder[Col <: Seq[Index]]: Encoder[Col] = arrayRawEncoder[Index, Col] implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col] implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col] implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col] implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col] implicit def arrayJodaDateTimeEncoder[Col <: Seq[JodaDateTime]]: Encoder[Col] = arrayEncoder[JodaDateTime, Col](_.toLocalDateTime) implicit def arrayJodaLocalDateTimeEncoder[Col <: Seq[JodaLocalDateTime]]: Encoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col] implicit def arrayJodaLocalDateEncoder[Col <: Seq[JodaLocalDate]]: Encoder[Col] = arrayRawEncoder[JodaLocalDate, Col] implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](encodeLocalDate.f) def arrayEncoder[T, Col <: Seq[T]](mapper: T => Any): Encoder[Col] = encoder[Col]((col: Col) => col.toIndexedSeq.map(mapper), SqlTypes.ARRAY) def arrayRawEncoder[T, Col <: Seq[T]]: Encoder[Col] = arrayEncoder[T, Col](identity) }
Example 65
Source File: ArrayDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.async import java.time.LocalDate import java.util.Date import io.getquill.PostgresAsyncContext import io.getquill.context.sql.encoding.ArrayEncoding import io.getquill.util.Messages.fail import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } import scala.collection.compat._ import scala.reflect.ClassTag trait ArrayDecoders extends ArrayEncoding { self: PostgresAsyncContext[_] => implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawEncoder[String, Col] implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayRawEncoder[BigDecimal, Col] implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawEncoder[Boolean, Col] implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayDecoder[Short, Byte, Col](_.toByte) implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawEncoder[Short, Col] implicit def arrayIntDecoder[Col <: Seq[Index]](implicit bf: CBF[Index, Col]): Decoder[Col] = arrayRawEncoder[Index, Col] implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawEncoder[Long, Col] implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayDecoder[Double, Float, Col](_.toFloat) implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawEncoder[Double, Col] implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, Date, Col](_.toDate) implicit def arrayJodaDateTimeDecoder[Col <: Seq[JodaDateTime]](implicit bf: CBF[JodaDateTime, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, JodaDateTime, Col](_.toDateTime) implicit def arrayJodaLocalDateTimeDecoder[Col <: Seq[JodaLocalDateTime]](implicit bf: CBF[JodaLocalDateTime, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col] implicit def arrayJodaLocalDateDecoder[Col <: Seq[JodaLocalDate]](implicit bf: CBF[JodaLocalDate, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDate, Col] implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[JodaLocalDate, LocalDate, Col](decodeLocalDate.f) def arrayDecoder[I, O, Col <: Seq[O]](mapper: I => O)(implicit bf: CBF[O, Col], iTag: ClassTag[I], oTag: ClassTag[O]): Decoder[Col] = AsyncDecoder[Col](SqlTypes.ARRAY)(new BaseDecoder[Col] { def apply(index: Index, row: ResultRow): Col = { row(index) match { case seq: IndexedSeq[Any] => seq.foldLeft(bf.newBuilder) { case (b, x: I) => b += mapper(x) case (_, x) => fail(s"Array at index $index contains element of ${x.getClass.getCanonicalName}, but expected $iTag") }.result() case value => fail( s"Value '$value' at index $index is not an array so it cannot be decoded to collection of $oTag" ) } } }) def arrayRawEncoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] = arrayDecoder[T, T, Col](identity) }
Example 66
Source File: PostgresEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.ndbc import java.time.{ LocalDate, LocalDateTime, ZoneOffset } import java.util.{ Date, UUID } import io.getquill.dsl.CoreDsl import io.trane.ndbc.PostgresPreparedStatement import scala.language.implicitConversions import scala.reflect.ClassTag trait LowPriorityPostgresImplicits { this: CoreDsl => implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: BaseEncoder[O]): BaseEncoder[I] = mappedBaseEncoder(mapped, e) } trait PostgresEncoders extends LowPriorityPostgresImplicits with io.getquill.dsl.LowPriorityImplicits { this: NdbcContext[_, _, PostgresPreparedStatement, _] => type Encoder[T] = BaseEncoder[T] protected val zoneOffset: ZoneOffset def encoder[T, U](f: PostgresPreparedStatement => (Int, U) => PostgresPreparedStatement)(implicit ev: T => U): Encoder[T] = (idx, v, ps) => if (v == null) ps.setNull(idx) else f(ps)(idx, v) def arrayEncoder[T, U: ClassTag, Col <: Seq[T]](f: PostgresPreparedStatement => (Int, Array[U]) => PostgresPreparedStatement)(ev: T => U): Encoder[Col] = (idx, v, ps) => if (v == null) ps.setNull(idx) else f(ps)(idx, v.map(ev).toArray[U]) implicit override def anyValMappedEncoder[I <: AnyVal, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] = mappedEncoder implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] = (idx, v, ps) => if (v == null) ps.setNull(idx) else v match { case None => ps.setNull(idx) case Some(v) => e(idx, v, ps) } implicit def toLocalDateTime(d: Date) = LocalDateTime.ofInstant(d.toInstant(), zoneOffset) implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUUID) implicit val stringEncoder: Encoder[String] = encoder(_.setString) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(_.setBigDecimal)(_.bigDecimal) implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBoolean) implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte) implicit val shortEncoder: Encoder[Short] = encoder(_.setShort) implicit val intEncoder: Encoder[Int] = encoder(_.setInteger) implicit val longEncoder: Encoder[Long] = encoder(_.setLong) implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat) implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(_.setByteArray) implicit val dateEncoder: Encoder[Date] = encoder(_.setLocalDateTime) implicit val localDateEncoder: Encoder[LocalDate] = encoder(_.setLocalDate) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(_.setLocalDateTime) implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayEncoder[String, String, Col](_.setStringArray)(identity) implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, java.math.BigDecimal, Col](_.setBigDecimalArray)(_.bigDecimal) implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayEncoder[Boolean, java.lang.Boolean, Col](_.setBooleanArray)(_.booleanValue) implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayEncoder[Byte, java.lang.Short, Col](_.setShortArray)(identity) implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayEncoder[Short, java.lang.Short, Col](_.setShortArray)(_.shortValue) implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayEncoder[Int, java.lang.Integer, Col](_.setIntegerArray)(_.intValue) implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayEncoder[Long, java.lang.Long, Col](_.setLongArray)(_.longValue) implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayEncoder[Float, java.lang.Float, Col](_.setFloatArray)(_.floatValue) implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayEncoder[Double, java.lang.Double, Col](_.setDoubleArray)(_.doubleValue) implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayEncoder[Date, LocalDateTime, Col](_.setLocalDateTimeArray)(identity) implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, LocalDate, Col](_.setLocalDateArray)(identity) }
Example 67
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jasync import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime } import java.util.Date import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } trait Encoders { this: JAsyncContext[_, _, _] => type Encoder[T] = AsyncEncoder[T] type EncoderSqlType = SqlTypes.SqlTypes case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder.apply(index, value, row) } def encoder[T](sqlType: DecoderSqlType): Encoder[T] = encoder(identity[T], sqlType) def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] = AsyncEncoder[T](sqlType)(new BaseEncoder[T] { def apply(index: Index, value: T, row: PrepareRow) = row :+ f(value) }) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = AsyncEncoder(e.sqlType)(new BaseEncoder[I] { def apply(index: Index, value: I, row: PrepareRow) = e(index, mapped.f(value), row) }) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] { def apply(index: Index, value: Option[T], row: PrepareRow) = { value match { case None => nullEncoder(index, null, row) case Some(v) => d(index, v, row) } } }) private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL) implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal]((bd: BigDecimal) => bd.bigDecimal, SqlTypes.REAL) implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN) implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT) implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT) implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER) implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT) implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT) implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY) implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP) implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE) implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP) implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP) implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] = MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId))) implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] = MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId))) implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] = MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth)) implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] = MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond)) implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] = MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli)) implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder) }
Example 68
Source File: Encodings.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.cassandra.encoding import java.time.{ Instant, LocalDate, ZonedDateTime, ZoneId } import java.util.Date import com.datastax.driver.core.{ LocalDate => CasLocalDate } import io.getquill.context.cassandra.CassandraContext trait Encodings extends CassandraMapperConversions with CassandraTypes { this: CassandraContext[_] => protected val zoneId = ZoneId.systemDefault implicit val encodeJava8LocalDate: MappedEncoding[LocalDate, CasLocalDate] = MappedEncoding(ld => CasLocalDate.fromYearMonthDay(ld.getYear, ld.getMonthValue, ld.getDayOfMonth)) implicit val decodeJava8LocalDate: MappedEncoding[CasLocalDate, LocalDate] = MappedEncoding(ld => LocalDate.of(ld.getYear, ld.getMonth, ld.getDay)) implicit val encodeJava8Instant: MappedEncoding[Instant, Date] = MappedEncoding(Date.from) implicit val decodeJava8Instant: MappedEncoding[Date, Instant] = MappedEncoding(_.toInstant) implicit val encodeJava8ZonedDateTime: MappedEncoding[ZonedDateTime, Date] = MappedEncoding(zdt => Date.from(zdt.toInstant)) implicit val decodeJava8ZonedDateTime: MappedEncoding[Date, ZonedDateTime] = MappedEncoding(d => ZonedDateTime.ofInstant(d.toInstant, zoneId)) }
Example 69
Source File: ArrayEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jasync import java.sql.Timestamp import java.time.LocalDate import java.util.Date import io.getquill.PostgresJAsyncContext import io.getquill.context.sql.encoding.ArrayEncoding import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } trait ArrayEncoders extends ArrayEncoding { self: PostgresJAsyncContext[_] => implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col] implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayRawEncoder[BigDecimal, Col] implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col] implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col] implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col] implicit def arrayIntEncoder[Col <: Seq[Index]]: Encoder[Col] = arrayRawEncoder[Index, Col] implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col] implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col] implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col] implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayEncoder[Date, Col](d => Timestamp.from(d.toInstant)) implicit def arrayJodaDateTimeEncoder[Col <: Seq[JodaDateTime]]: Encoder[Col] = arrayEncoder[JodaDateTime, Col](_.toLocalDateTime) implicit def arrayJodaLocalDateTimeEncoder[Col <: Seq[JodaLocalDateTime]]: Encoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col] implicit def arrayJodaLocalDateEncoder[Col <: Seq[JodaLocalDate]]: Encoder[Col] = arrayRawEncoder[JodaLocalDate, Col] implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](encodeLocalDate.f) def arrayEncoder[T, Col <: Seq[T]](mapper: T => Any): Encoder[Col] = encoder[Col]((col: Col) => col.toIndexedSeq.map(mapper).mkString("{", ",", "}"), SqlTypes.ARRAY) def arrayRawEncoder[T, Col <: Seq[T]]: Encoder[Col] = arrayEncoder[T, Col](identity) }
Example 70
Source File: ArrayDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jasync import java.time.LocalDate import java.util import java.util.Date import io.getquill.PostgresJAsyncContext import io.getquill.context.sql.encoding.ArrayEncoding import io.getquill.util.Messages.fail import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } import scala.reflect.ClassTag import scala.collection.compat._ import scala.jdk.CollectionConverters._ trait ArrayDecoders extends ArrayEncoding { self: PostgresJAsyncContext[_] => implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawEncoder[String, Col] implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayDecoder[java.math.BigDecimal, BigDecimal, Col](BigDecimal.javaBigDecimal2bigDecimal) implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawEncoder[Boolean, Col] implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayDecoder[Short, Byte, Col](_.toByte) implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawEncoder[Short, Col] implicit def arrayIntDecoder[Col <: Seq[Index]](implicit bf: CBF[Index, Col]): Decoder[Col] = arrayRawEncoder[Index, Col] implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawEncoder[Long, Col] implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayDecoder[Double, Float, Col](_.toFloat) implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawEncoder[Double, Col] implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, Date, Col](_.toDate) implicit def arrayJodaDateTimeDecoder[Col <: Seq[JodaDateTime]](implicit bf: CBF[JodaDateTime, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, JodaDateTime, Col](_.toDateTime) implicit def arrayJodaLocalDateTimeDecoder[Col <: Seq[JodaLocalDateTime]](implicit bf: CBF[JodaLocalDateTime, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col] implicit def arrayJodaLocalDateDecoder[Col <: Seq[JodaLocalDate]](implicit bf: CBF[JodaLocalDate, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDate, Col] implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[JodaLocalDate, LocalDate, Col](decodeLocalDate.f) def arrayDecoder[I, O, Col <: Seq[O]](mapper: I => O)(implicit bf: CBF[O, Col], iTag: ClassTag[I], oTag: ClassTag[O]): Decoder[Col] = AsyncDecoder[Col](SqlTypes.ARRAY)(new BaseDecoder[Col] { def apply(index: Index, row: ResultRow): Col = row.get(index) match { case seq: util.ArrayList[_] => seq.asScala.foldLeft(bf.newBuilder) { case (b, x: I) => b += mapper(x) case (_, x) => fail(s"Array at index $index contains element of ${x.getClass.getCanonicalName}, but expected $iTag") }.result() case value => fail( s"Value '$value' at index $index is not an array so it cannot be decoded to collection of $oTag" ) } }) def arrayRawEncoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] = arrayDecoder[T, T, Col](identity) }
Example 71
Source File: MirrorDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.mirror import java.time.LocalDate import java.util.{ Date, UUID } import scala.reflect.ClassTag import io.getquill.context.Context trait MirrorDecoders { this: Context[_, _] => override type PrepareRow = Row override type ResultRow = Row override type Decoder[T] = MirrorDecoder[T] case class MirrorDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] { override def apply(index: Index, row: ResultRow) = decoder(index, row) } def decoder[T: ClassTag]: Decoder[T] = MirrorDecoder((index: Index, row: ResultRow) => row[T](index)) def decoderUnsafe[T]: Decoder[T] = MirrorDecoder((index: Index, row: ResultRow) => row.data(index).asInstanceOf[T]) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = MirrorDecoder((index: Index, row: ResultRow) => mapped.f(d.apply(index, row))) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = MirrorDecoder((index: Index, row: ResultRow) => row[Option[Any]](index) match { case Some(v) => Some(d(0, Row(v))) case None => None }) implicit val stringDecoder: Decoder[String] = decoder[String] implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder[BigDecimal] implicit val booleanDecoder: Decoder[Boolean] = decoder[Boolean] implicit val byteDecoder: Decoder[Byte] = decoder[Byte] implicit val shortDecoder: Decoder[Short] = decoder[Short] implicit val intDecoder: Decoder[Int] = decoder[Int] implicit val longDecoder: Decoder[Long] = decoder[Long] implicit val floatDecoder: Decoder[Float] = decoder[Float] implicit val doubleDecoder: Decoder[Double] = decoder[Double] implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder[Array[Byte]] implicit val dateDecoder: Decoder[Date] = decoder[Date] implicit val localDateDecoder: Decoder[LocalDate] = decoder[LocalDate] implicit val uuidDecoder: Decoder[UUID] = decoder[UUID] }
Example 72
Source File: MirrorEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.mirror import java.time.LocalDate import java.util.{ Date, UUID } import io.getquill.context.Context trait MirrorEncoders { this: Context[_, _] => override type PrepareRow = Row override type ResultRow = Row override type Encoder[T] = MirrorEncoder[T] case class MirrorEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder(index, value, row) } def encoder[T]: Encoder[T] = MirrorEncoder((index: Index, value: T, row: PrepareRow) => row.add(value)) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = MirrorEncoder((index: Index, value: I, row: PrepareRow) => e(index, mapped.f(value), row)) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = MirrorEncoder((index: Index, value: Option[T], row: PrepareRow) => { value match { case None => row.add(None) case Some(v) => row.add(d(index, v, Row()).data.headOption) } }) implicit val stringEncoder: Encoder[String] = encoder[String] implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal] implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean] implicit val byteEncoder: Encoder[Byte] = encoder[Byte] implicit val shortEncoder: Encoder[Short] = encoder[Short] implicit val intEncoder: Encoder[Int] = encoder[Int] implicit val longEncoder: Encoder[Long] = encoder[Long] implicit val floatEncoder: Encoder[Float] = encoder[Float] implicit val doubleEncoder: Encoder[Double] = encoder[Double] implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]] implicit val dateEncoder: Encoder[Date] = encoder[Date] implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate] implicit val uuidEncoder: Encoder[UUID] = encoder[UUID] }
Example 73
Source File: FinagleMysqlEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.finagle.mysql import java.sql.Timestamp import java.time.{ LocalDate, LocalDateTime } import java.util.{ Date, UUID } import com.twitter.finagle.mysql.CanBeParameter._ import com.twitter.finagle.mysql.Parameter.wrap import com.twitter.finagle.mysql._ import io.getquill.FinagleMysqlContext trait FinagleMysqlEncoders { this: FinagleMysqlContext[_] => type Encoder[T] = FinagleMySqlEncoder[T] case class FinagleMySqlEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder(index, value, row) } def encoder[T](f: T => Parameter): Encoder[T] = FinagleMySqlEncoder((index, value, row) => row :+ f(value)) def encoder[T](implicit cbp: CanBeParameter[T]): Encoder[T] = encoder[T]((v: T) => v: Parameter) private[this] val nullEncoder = encoder((_: Null) => Parameter.NullParameter) implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] = FinagleMySqlEncoder { (index, value, row) => value match { case None => nullEncoder.encoder(index, null, row) case Some(v) => e.encoder(index, v, row) } } implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = FinagleMySqlEncoder(mappedBaseEncoder(mapped, e.encoder)) implicit val stringEncoder: Encoder[String] = encoder[String] implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal] { (value: BigDecimal) => BigDecimalValue(value): Parameter } implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean] implicit val byteEncoder: Encoder[Byte] = encoder[Byte] implicit val shortEncoder: Encoder[Short] = encoder[Short] implicit val intEncoder: Encoder[Int] = encoder[Int] implicit val longEncoder: Encoder[Long] = encoder[Long] implicit val floatEncoder: Encoder[Float] = encoder[Float] implicit val doubleEncoder: Encoder[Double] = encoder[Double] implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]] implicit val dateEncoder: Encoder[Date] = encoder[Date] { (value: Date) => timestampValue(new Timestamp(value.getTime)): Parameter } implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate] { (d: LocalDate) => DateValue(java.sql.Date.valueOf(d)): Parameter } implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder[LocalDateTime] { (d: LocalDateTime) => timestampValue(new Timestamp(d.atZone(injectionTimeZone.toZoneId).toInstant.toEpochMilli)): Parameter } implicit val uuidEncoder: Encoder[UUID] = mappedEncoder(MappedEncoding(_.toString), stringEncoder) }
Example 74
Source File: SqlContext.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.sql import java.time.LocalDate import io.getquill.idiom.{ Idiom => BaseIdiom } import java.util.{ Date, UUID } import io.getquill.context.Context import io.getquill.context.sql.dsl.SqlDsl import io.getquill.NamingStrategy trait SqlContext[Idiom <: BaseIdiom, Naming <: NamingStrategy] extends Context[Idiom, Naming] with SqlDsl { implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] implicit val stringDecoder: Decoder[String] implicit val bigDecimalDecoder: Decoder[BigDecimal] implicit val booleanDecoder: Decoder[Boolean] implicit val byteDecoder: Decoder[Byte] implicit val shortDecoder: Decoder[Short] implicit val intDecoder: Decoder[Int] implicit val longDecoder: Decoder[Long] implicit val floatDecoder: Decoder[Float] implicit val doubleDecoder: Decoder[Double] implicit val byteArrayDecoder: Decoder[Array[Byte]] implicit val dateDecoder: Decoder[Date] implicit val localDateDecoder: Decoder[LocalDate] implicit val uuidDecoder: Decoder[UUID] implicit val stringEncoder: Encoder[String] implicit val bigDecimalEncoder: Encoder[BigDecimal] implicit val booleanEncoder: Encoder[Boolean] implicit val byteEncoder: Encoder[Byte] implicit val shortEncoder: Encoder[Short] implicit val intEncoder: Encoder[Int] implicit val longEncoder: Encoder[Long] implicit val floatEncoder: Encoder[Float] implicit val doubleEncoder: Encoder[Double] implicit val byteArrayEncoder: Encoder[Array[Byte]] implicit val dateEncoder: Encoder[Date] implicit val localDateEncoder: Encoder[LocalDate] implicit val uuidEncoder: Encoder[UUID] }
Example 75
Source File: ArrayMirrorEncoding.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.sql.encoding.mirror import java.time.LocalDate import java.util.Date import io.getquill.SqlMirrorContext import io.getquill.context.sql.encoding.ArrayEncoding trait ArrayMirrorEncoding extends ArrayEncoding { this: SqlMirrorContext[_, _] => implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = encoder[Col] implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = encoder[Col] implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = encoder[Col] implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = encoder[Col] implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = encoder[Col] implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = encoder[Col] implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = encoder[Col] implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = encoder[Col] implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = encoder[Col] implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = encoder[Col] implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = encoder[Col] implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = decoderUnsafe[Col] }
Example 76
Source File: ArrayEncoding.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.sql.encoding import java.time.LocalDate import java.util.Date import io.getquill.context.sql.SqlContext import scala.collection.compat._ import scala.language.higherKinds trait ArrayEncoding { self: SqlContext[_, _] => type CBF[T, Col] = Factory[T, Col] implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] implicit def arrayMappedEncoder[I, O, Col[X] <: Seq[X]]( implicit mapped: MappedEncoding[I, O], e: Encoder[Seq[O]] ): Encoder[Col[I]] = { mappedEncoder[Col[I], Seq[O]](MappedEncoding((col: Col[I]) => col.map(mapped.f)), e) } implicit def arrayMappedDecoder[I, O, Col[X] <: Seq[X]]( implicit mapped: MappedEncoding[I, O], d: Decoder[Seq[I]], bf: Factory[O, Col[O]] ): Decoder[Col[O]] = { mappedDecoder[Seq[I], Col[O]](MappedEncoding((col: Seq[I]) => col.foldLeft(bf.newBuilder)((b, x) => b += mapped.f(x)).result), d) } }
Example 77
Source File: ArrayMirrorEncodingSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.sql.mirror import java.time.LocalDate import java.util.Date import io.getquill.context.sql.encoding.ArrayEncodingBaseSpec import io.getquill.context.sql.testContext class ArrayMirrorEncodingSpec extends ArrayEncodingBaseSpec { val ctx = testContext import ctx._ val q = quote(query[ArraysTestEntity]) "Support all sql base types and `Seq` implementers" in { val insertStr = ctx.run(q.insert(lift(e))).string val selectStr = ctx.run(q).string insertStr mustEqual "INSERT INTO ArraysTestEntity (texts,decimals,bools,bytes,shorts,ints,longs,floats," + "doubles,timestamps,dates) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" selectStr mustEqual "SELECT x.texts, x.decimals, x.bools, x.bytes, x.shorts, x.ints, x.longs, x.floats, " + "x.doubles, x.timestamps, x.dates FROM ArraysTestEntity x" } "Support Seq encoding basing on MappedEncoding" in { val wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity")) val insertStr = ctx.run(wrapQ.insert(lift(wrapE))).string val selectStr = ctx.run(wrapQ).string insertStr mustEqual "INSERT INTO ArraysTestEntity (texts) VALUES (?)" selectStr mustEqual "SELECT x.texts FROM ArraysTestEntity x" } "Provide implicit encoders for raw types" in { implicitly[Encoder[List[String]]] implicitly[Encoder[List[BigDecimal]]] implicitly[Encoder[List[Boolean]]] implicitly[Encoder[List[Byte]]] implicitly[Encoder[List[Short]]] implicitly[Encoder[List[Index]]] implicitly[Encoder[List[Long]]] implicitly[Encoder[List[Float]]] implicitly[Encoder[List[Double]]] implicitly[Encoder[List[Date]]] implicitly[Encoder[List[LocalDate]]] } "Provide implicit decoders for raw types" in { implicitly[Decoder[List[String]]] implicitly[Decoder[List[BigDecimal]]] implicitly[Decoder[List[Boolean]]] implicitly[Decoder[List[Byte]]] implicitly[Decoder[List[Short]]] implicitly[Decoder[List[Index]]] implicitly[Decoder[List[Long]]] implicitly[Decoder[List[Float]]] implicitly[Decoder[List[Double]]] implicitly[Decoder[List[Date]]] implicitly[Decoder[List[LocalDate]]] } }
Example 78
Source File: ArrayEncodingBaseSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.sql.encoding import java.time.LocalDate import java.util.Date import io.getquill.{ MappedEncoding, Spec } import org.scalatest.{ Assertion, BeforeAndAfterEach } trait ArrayEncodingBaseSpec extends Spec with BeforeAndAfterEach { // Support all sql base types and `Seq` implementers case class ArraysTestEntity( texts: List[String], decimals: Seq[BigDecimal], bools: Vector[Boolean], bytes: List[Byte], shorts: IndexedSeq[Short], ints: Seq[Int], longs: Seq[Long], floats: Seq[Float], doubles: Seq[Double], timestamps: Seq[Date], dates: Seq[LocalDate] ) val e = ArraysTestEntity(List("test"), Seq(BigDecimal(2.33)), Vector(true, true), List(1), IndexedSeq(3), Seq(2), Seq(1, 2, 3), Seq(1f, 2f), Seq(4d, 3d), Seq(new Date(System.currentTimeMillis())), Seq(LocalDate.now())) // casting types can be dangerous so we need to ensure that everything is ok def baseEntityDeepCheck(e1: ArraysTestEntity, e2: ArraysTestEntity): Assertion = { e1.texts.head mustBe e2.texts.head e1.decimals.head mustBe e2.decimals.head e1.bools.head mustBe e2.bools.head e1.bytes.head mustBe e2.bytes.head e1.shorts.head mustBe e2.shorts.head e1.ints.head mustBe e2.ints.head e1.longs.head mustBe e2.longs.head e1.floats.head mustBe e2.floats.head e1.doubles.head mustBe e2.doubles.head e1.timestamps.head mustBe e2.timestamps.head e1.dates.head mustBe e2.dates.head } // Support Seq encoding basing on MappedEncoding case class StrWrap(str: String) implicit val strWrapEncode: MappedEncoding[StrWrap, String] = MappedEncoding(_.str) implicit val strWrapDecode: MappedEncoding[String, StrWrap] = MappedEncoding(StrWrap.apply) case class WrapEntity(texts: Seq[StrWrap]) val wrapE = WrapEntity(List("hey", "ho").map(StrWrap.apply)) }
Example 79
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.async import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime } import java.util.Date import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime } trait Encoders { this: AsyncContext[_, _, _] => type Encoder[T] = AsyncEncoder[T] type EncoderSqlType = SqlTypes.SqlTypes case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder.apply(index, value, row) } def encoder[T](sqlType: DecoderSqlType): Encoder[T] = encoder(identity[T], sqlType) def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] = AsyncEncoder[T](sqlType)(new BaseEncoder[T] { def apply(index: Index, value: T, row: PrepareRow) = row :+ f(value) }) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = AsyncEncoder(e.sqlType)(new BaseEncoder[I] { def apply(index: Index, value: I, row: PrepareRow) = e(index, mapped.f(value), row) }) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] { def apply(index: Index, value: Option[T], row: PrepareRow) = { value match { case None => nullEncoder(index, null, row) case Some(v) => d(index, v, row) } } }) private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL) implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal](SqlTypes.REAL) implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN) implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT) implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT) implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER) implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT) implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT) implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY) implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP) implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE) implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP) implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP) implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] = MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId))) implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] = MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId))) implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] = MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth)) implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] = MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond)) implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] = MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli)) implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder) }
Example 80
Source File: Decoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.time.{ LocalDate, LocalDateTime } import java.util import java.util.Calendar import scala.math.BigDecimal.javaBigDecimal2bigDecimal trait Decoders { this: JdbcContextBase[_, _] => type Decoder[T] = JdbcDecoder[T] case class JdbcDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] { def apply(index: Index, row: ResultRow) = decoder(index + 1, row) } def decoder[T](d: BaseDecoder[T]): Decoder[T] = JdbcDecoder(d) def decoder[T](f: ResultRow => Index => T): Decoder[T] = decoder((index, row) => f(row)(index)) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = JdbcDecoder(mappedBaseDecoder(mapped, d.decoder)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = JdbcDecoder( (index, row) => { try { // According to the JDBC spec, we first need to read the object before `row.wasNull` works row.getObject(index) if (row.wasNull()) { None } else { Some(d.decoder(index, row)) } } catch { case _: NullPointerException if row.wasNull() => None } } ) implicit val stringDecoder: Decoder[String] = decoder(_.getString) implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder((index, row) => row.getBigDecimal(index)) implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) implicit val intDecoder: Decoder[Int] = decoder(_.getInt) implicit val longDecoder: Decoder[Long] = decoder(_.getLong) implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder(_.getBytes) implicit val dateDecoder: Decoder[util.Date] = decoder((index, row) => new util.Date(row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).getTime)) implicit val localDateDecoder: Decoder[LocalDate] = decoder((index, row) => row.getDate(index, Calendar.getInstance(dateTimeZone)).toLocalDate) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoder((index, row) => row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).toLocalDateTime) }
Example 81
Source File: ArrayEncoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.sql.{ Timestamp, Date => SqlDate } import java.sql.Types._ import java.time.LocalDate import java.util.Date import io.getquill.context.sql.encoding.ArrayEncoding import scala.collection.compat._ trait ArrayEncoders extends ArrayEncoding { self: JdbcContextBase[_, _] => implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col](VARCHAR) implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, Col](parseJdbcType(NUMERIC), _.bigDecimal) implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col](BOOLEAN) implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col](TINYINT) implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col](SMALLINT) implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayRawEncoder[Int, Col](INTEGER) implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col](BIGINT) implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col](FLOAT) implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col](DOUBLE) implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col](TIMESTAMP) implicit def arrayTimestampEncoder[Col <: Seq[Timestamp]]: Encoder[Col] = arrayRawEncoder[Timestamp, Col](TIMESTAMP) implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](parseJdbcType(DATE), SqlDate.valueOf) def arrayRawEncoder[T, Col <: Seq[T]](jdbcType: Int): Encoder[Col] = arrayRawEncoder[T, Col](parseJdbcType(jdbcType)) }
Example 82
Source File: ArrayDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.sql.Timestamp import java.time.LocalDate import java.util.Date import java.sql.{ Date => SqlDate } import java.math.{ BigDecimal => JBigDecimal } import io.getquill.context.sql.encoding.ArrayEncoding import io.getquill.util.Messages.fail import scala.collection.compat._ import scala.reflect.ClassTag trait ArrayDecoders extends ArrayEncoding { self: JdbcContextBase[_, _] => implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawDecoder[String, Col] implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayDecoder[JBigDecimal, BigDecimal, Col](BigDecimal.apply) implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawDecoder[Boolean, Col] implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayRawDecoder[Byte, Col] implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawDecoder[Short, Col] implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = arrayRawDecoder[Int, Col] implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawDecoder[Long, Col] implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayRawDecoder[Float, Col] implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawDecoder[Double, Col] implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayRawDecoder[Date, Col] implicit def arrayTimestampDecoder[Col <: Seq[Timestamp]](implicit bf: CBF[Timestamp, Col]): Decoder[Col] = arrayRawDecoder[Timestamp, Col] implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[SqlDate, LocalDate, Col](_.toLocalDate) def arrayRawDecoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] = arrayDecoder[T, T, Col](identity) }
Example 83
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.sql.{ Date, Timestamp, Types } import java.time.{ LocalDate, LocalDateTime } import java.util.{ Calendar, TimeZone } import java.{ sql, util } trait Encoders { this: JdbcContextBase[_, _] => type Encoder[T] = JdbcEncoder[T] protected val dateTimeZone = TimeZone.getDefault case class JdbcEncoder[T](sqlType: Int, encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder(index + 1, value, row) } def encoder[T](sqlType: Int, f: (Index, T, PrepareRow) => Unit): Encoder[T] = JdbcEncoder(sqlType, (index: Index, value: T, row: PrepareRow) => { f(index, value, row) row }) def encoder[T](sqlType: Int, f: PrepareRow => (Index, T) => Unit): Encoder[T] = encoder(sqlType, (index: Index, value: T, row: PrepareRow) => f(row)(index, value)) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = JdbcEncoder(e.sqlType, mappedBaseEncoder(mapped, e.encoder)) private[this] val nullEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setNull) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = JdbcEncoder( d.sqlType, (index, value, row) => value match { case Some(v) => d.encoder(index, v, row) case None => nullEncoder.encoder(index, d.sqlType, row) } ) implicit val stringEncoder: Encoder[String] = encoder(Types.VARCHAR, _.setString) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(Types.NUMERIC, (index, value, row) => row.setBigDecimal(index, value.bigDecimal)) implicit val byteEncoder: Encoder[Byte] = encoder(Types.TINYINT, _.setByte) implicit val shortEncoder: Encoder[Short] = encoder(Types.SMALLINT, _.setShort) implicit val intEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setInt) implicit val longEncoder: Encoder[Long] = encoder(Types.BIGINT, _.setLong) implicit val floatEncoder: Encoder[Float] = encoder(Types.FLOAT, _.setFloat) implicit val doubleEncoder: Encoder[Double] = encoder(Types.DOUBLE, _.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(Types.VARBINARY, _.setBytes) implicit val dateEncoder: Encoder[util.Date] = encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, new sql.Timestamp(value.getTime), Calendar.getInstance(dateTimeZone))) implicit val localDateEncoder: Encoder[LocalDate] = encoder(Types.DATE, (index, value, row) => row.setDate(index, Date.valueOf(value), Calendar.getInstance(dateTimeZone))) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, Timestamp.valueOf(value), Calendar.getInstance(dateTimeZone))) }
Example 84
Source File: ArrayJdbcEncodingSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc.postgres import java.sql.Timestamp import java.time.LocalDate import java.util.UUID import io.getquill.context.sql.encoding.ArrayEncodingBaseSpec import io.getquill.{ Literal, PostgresJdbcContext } class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { val ctx = testContext import ctx._ val q = quote(query[ArraysTestEntity]) val corrected = e.copy(timestamps = e.timestamps.map(d => new Timestamp(d.getTime))) "Support all sql base types and `Seq` implementers" in { ctx.run(q.insert(lift(corrected))) val actual = ctx.run(q).head actual mustEqual corrected baseEntityDeepCheck(actual, corrected) } "Support Seq encoding basing on MappedEncoding" in { val wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity")) ctx.run(wrapQ.insert(lift(wrapE))) ctx.run(wrapQ).head.texts mustBe wrapE.texts } "Timestamps" in { case class Timestamps(timestamps: List[Timestamp]) val tE = Timestamps(List(new Timestamp(System.currentTimeMillis()))) val tQ = quote(querySchema[Timestamps]("ArraysTestEntity")) ctx.run(tQ.insert(lift(tE))) ctx.run(tQ).head.timestamps mustBe tE.timestamps } "Catch invalid decoders" in { val newCtx = new PostgresJdbcContext(Literal, "testPostgresDB") { // avoid transforming from java.sql.Date to java.time.LocalDate override implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[LocalDate, LocalDate, Col](identity) } import newCtx._ newCtx.run(query[ArraysTestEntity].insert(lift(corrected))) intercept[IllegalStateException] { newCtx.run(query[ArraysTestEntity]).head mustBe corrected } newCtx.close() } "Custom decoders/encoders" in { case class Entity(uuids: List[UUID]) val e = Entity(List(UUID.randomUUID(), UUID.randomUUID())) val q = quote(querySchema[Entity]("ArraysTestEntity")) implicit def arrayUUIDEncoder[Col <: Seq[UUID]]: Encoder[Col] = arrayRawEncoder[UUID, Col]("uuid") implicit def arrayUUIDDecoder[Col <: Seq[UUID]](implicit bf: CBF[UUID, Col]): Decoder[Col] = arrayRawDecoder[UUID, Col] ctx.run(q.insert(lift(e))) ctx.run(q).head.uuids mustBe e.uuids } "Arrays in where clause" in { ctx.run(q.insert(lift(corrected))) val actual1 = ctx.run(q.filter(_.texts == lift(List("test")))) val actual2 = ctx.run(q.filter(_.texts == lift(List("test2")))) actual1 mustEqual List(corrected) actual2 mustEqual List() } "empty array on found null" in { case class ArraysTestEntity(texts: Option[List[String]]) ctx.run(query[ArraysTestEntity].insert(lift(ArraysTestEntity(None)))) case class E(texts: List[String]) ctx.run(querySchema[E]("ArraysTestEntity")).headOption.map(_.texts) mustBe Some(Nil) } override protected def beforeEach(): Unit = { ctx.run(q.delete) () } }
Example 85
Source File: DateTimeSpecs.scala From guardrail with MIT License | 5 votes |
package dateTime.server.springMvc.dateTime import java.time.{ LocalDate, OffsetDateTime } import java.util.concurrent.CompletableFuture import org.junit.runner.RunWith import org.mockito.{ ArgumentMatchersSugar, MockitoSugar } import org.scalatest.{ BeforeAndAfterAll, FreeSpec, Matchers } import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.autoconfigure.EnableAutoConfiguration import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc import org.springframework.boot.test.context.SpringBootTest import org.springframework.context.annotation.ComponentScan import org.springframework.http.MediaType import org.springframework.test.context.TestContextManager import org.springframework.test.context.junit4.SpringRunner import org.springframework.test.web.servlet.MockMvc import org.springframework.test.web.servlet.request.MockMvcRequestBuilders.{ asyncDispatch, post, get} import org.springframework.test.web.servlet.result.MockMvcResultHandlers.print import org.springframework.test.web.servlet.result.MockMvcResultMatchers.{ request, status } import spring.test.TestApplication @RunWith(classOf[SpringRunner]) @SpringBootTest(classes = Array(classOf[TestApplication])) @AutoConfigureMockMvc @ComponentScan @EnableAutoConfiguration class DateTimeSpecs extends FreeSpec with Matchers with BeforeAndAfterAll with MockitoSugar with ArgumentMatchersSugar { @Autowired var mvc: MockMvc = _ @Autowired var handlerMock: DateTimeHandler = _ new TestContextManager(this.getClass).prepareTestInstance(this) "test jsre310 stuff" - { "dates everywhere" in { val offsetDtNow = OffsetDateTime.now val localDtNow = LocalDate.now when( handlerMock.getSomething( eqTo(offsetDtNow), eqTo(java.util.Optional.of(offsetDtNow)), eqTo(localDtNow), eqTo(java.util.Optional.of(localDtNow)) ) ).thenReturn(CompletableFuture.completedFuture(DateTimeHandler.GetSomethingResponse.NoContent)) val mvcResult = mvc .perform( get("/foo") .param("dateTime", offsetDtNow.toString) .param("optionalDateTime", offsetDtNow.toString) .param("date", localDtNow.toString) .param("optionalDate", localDtNow.toString) .contentType(MediaType.APPLICATION_JSON) ) .andExpect(request.asyncStarted) .andReturn mvc.perform(asyncDispatch(mvcResult)).andDo(print()).andExpect(status.isNoContent) } } }
Example 86
Source File: DateEncoderTest.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.record.encoder import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime, LocalTime} import com.sksamuel.avro4s.{AvroSchema, DefaultFieldMapper, Encoder, ImmutableRecord} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers //noinspection ScalaDeprecation class DateEncoderTest extends AnyFunSuite with Matchers { test("encode LocalTime as TIME-MILLIS") { case class Foo(s: LocalTime) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(LocalTime.of(12, 50, 45))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(46245000000L))) } test("encode LocalDate as DATE") { case class Foo(s: LocalDate) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(LocalDate.of(2018, 9, 10))) shouldBe ImmutableRecord(schema, Vector(java.lang.Integer.valueOf(17784))) } test("encode java.sql.Date as DATE") { case class Foo(s: Date) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(Date.valueOf(LocalDate.of(2018, 9, 10)))) shouldBe ImmutableRecord(schema, Vector(java.lang.Integer.valueOf(17784))) } test("encode LocalDateTime as timestamp-nanos") { case class Foo(s: LocalDateTime) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 123))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739000000123L))) Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 123009))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739000123009L))) Encoder[Foo].encode(Foo(LocalDateTime.of(2018, 9, 10, 11, 58, 59, 328187943))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1536580739328187943L))) } test("encode Timestamp as TIMESTAMP-MILLIS") { case class Foo(s: Timestamp) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(Timestamp.from(Instant.ofEpochMilli(1538312231000L)))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1538312231000L))) } test("encode Instant as TIMESTAMP-MILLIS") { case class Foo(s: Instant) val schema = AvroSchema[Foo] Encoder[Foo].encode(Foo(Instant.ofEpochMilli(1538312231000L))) shouldBe ImmutableRecord(schema, Vector(java.lang.Long.valueOf(1538312231000L))) } }
Example 87
Source File: DateDecoderTest.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.record.decoder import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime, LocalTime} import com.sksamuel.avro4s.SchemaFor.TimestampNanosLogicalType import com.sksamuel.avro4s.{AvroSchema, Decoder, SchemaFor} import org.apache.avro.generic.GenericData import org.apache.avro.{LogicalTypes, SchemaBuilder} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers //noinspection ScalaDeprecation class DateDecoderTest extends AnyFunSuite with Matchers { case class WithLocalTime(z: LocalTime) case class WithLocalDate(z: LocalDate) case class WithDate(z: Date) case class WithLocalDateTime(z: LocalDateTime) case class WithTimestamp(z: Timestamp) case class WithInstant(z: Instant) test("decode int to LocalTime") { val schema = AvroSchema[WithLocalTime] val record = new GenericData.Record(schema) record.put("z", 46245000000L) Decoder[WithLocalTime].decode(record) shouldBe WithLocalTime(LocalTime.of(12, 50, 45)) } test("decode int to LocalDate") { val schema = AvroSchema[WithLocalDate] val record = new GenericData.Record(schema) record.put("z", 17784) Decoder[WithLocalDate].decode(record) shouldBe WithLocalDate(LocalDate.of(2018, 9, 10)) } test("decode int to java.sql.Date") { val schema = AvroSchema[WithDate] val record = new GenericData.Record(schema) record.put("z", 17784) Decoder[WithDate].decode(record) shouldBe WithDate(Date.valueOf(LocalDate.of(2018, 9, 10))) } test("decode timestamp-millis to LocalDateTime") { val dateSchema = LogicalTypes.timestampMillis().addToSchema(SchemaBuilder.builder.longType) val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord() val record = new GenericData.Record(schema) record.put("z", 1572707106376L) Decoder[WithLocalDateTime].withSchema(SchemaFor(schema)).decode(record) shouldBe WithLocalDateTime( LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376000000)) } test("decode timestamp-micros to LocalDateTime") { val dateSchema = LogicalTypes.timestampMicros().addToSchema(SchemaBuilder.builder.longType) val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord() val record = new GenericData.Record(schema) record.put("z", 1572707106376001L) Decoder[WithLocalDateTime].withSchema(SchemaFor(schema)).decode(record) shouldBe WithLocalDateTime( LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376001000)) } test("decode timestamp-nanos to LocalDateTime") { val dateSchema = TimestampNanosLogicalType.addToSchema(SchemaBuilder.builder.longType) val schema = SchemaBuilder.record("foo").fields().name("z").`type`(dateSchema).noDefault().endRecord() val record = new GenericData.Record(schema) record.put("z", 1572707106376000002L) Decoder[WithLocalDateTime].decode(record) shouldBe WithLocalDateTime( LocalDateTime.of(2019, 11, 2, 15, 5, 6, 376000002)) } test("decode long to Timestamp") { val schema = AvroSchema[WithTimestamp] val record = new GenericData.Record(schema) record.put("z", 1538312231000L) Decoder[WithTimestamp].decode(record) shouldBe WithTimestamp(new Timestamp(1538312231000L)) } test("decode long to Instant") { val schema = AvroSchema[WithInstant] val record = new GenericData.Record(schema) record.put("z", 1538312231000L) Decoder[WithInstant].decode(record) shouldBe WithInstant(Instant.ofEpochMilli(1538312231000L)) } }
Example 88
Source File: DateSchemaTest.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.schema import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime, LocalTime} import com.sksamuel.avro4s.AvroSchema import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class DateSchemaTest extends AnyFunSuite with Matchers { test("generate date logical type for LocalDate") { case class LocalDateTest(date: LocalDate) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localdate.json")) val schema = AvroSchema[LocalDateTest] schema.toString(true) shouldBe expected.toString(true) } test("generate date logical type for Date") { case class DateTest(date: Date) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/date.json")) val schema = AvroSchema[DateTest] schema.toString(true) shouldBe expected.toString(true) } test("generate time logical type for LocalTime") { case class LocalTimeTest(time: LocalTime) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localtime.json")) val schema = AvroSchema[LocalTimeTest] schema.toString(true) shouldBe expected.toString(true) } test("generate timestamp-nanos for LocalDateTime") { case class LocalDateTimeTest(time: LocalDateTime) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/localdatetime.json")) val schema = AvroSchema[LocalDateTimeTest] schema.toString(true) shouldBe expected.toString(true) } test("generate timestamp-millis logical type for Instant") { case class InstantTest(instant: Instant) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/instant.json")) val schema = AvroSchema[InstantTest] schema.toString(true) shouldBe expected.toString(true) } test("generate timestamp-millis logical type for Timestamp") { case class TimestampTest(ts: Timestamp) val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/timestamp.json")) val schema = AvroSchema[TimestampTest] schema.toString(true) shouldBe expected.toString(true) } }
Example 89
Source File: localdate.scala From cats-time with MIT License | 5 votes |
package io.chrisdavenport.cats.time.instances import cats._ import cats.implicits._ import java.time.LocalDate import java.time.format.DateTimeFormatter import java.time.format.DateTimeFormatter.ISO_LOCAL_DATE trait localdate { final def showLocalDate(formatter: DateTimeFormatter): Show[LocalDate] = Show[String].contramap(_.format(formatter)) implicit final val localdateInstances = new Show[LocalDate] with Order[LocalDate] with Hash[LocalDate]{ override def hash(x: LocalDate): Int = x.hashCode override def compare(x: LocalDate, y: LocalDate): Int = x.compareTo(y) override def show(x: LocalDate): String = x.format(ISO_LOCAL_DATE) } } object localdate extends localdate
Example 90
Source File: LoggableSuite.scala From tofu with Apache License 2.0 | 5 votes |
package tofu.logging import java.time.LocalDate import cats.syntax.either._ import cats.syntax.monoid._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import tofu.logging.LoggableSuite.TestInt import tofu.syntax.logRenderer._ import tofu.syntax.loggable._ class LoggableSuite extends AnyFlatSpec with Matchers { implicit val testIntLoggable: Loggable[TestInt] = new Loggable[Int] { def fields[I, V, R, S](a: Int, i: I)(implicit r: LogRenderer[I, V, R, S]): R = i.sub("missing")(r.zero) |+| i.sub("sign")((v: V) => r.coalesce( v => v.whenVal(a < 0)(v.putString("negative")), v => v.whenVal(a > 0)(v.putString("positive")), v ) ) |+| r.sub("value", i)(_.putInt(a.toLong)) def putValue[I, V, R, S](a: Int, v: V)(implicit r: LogRenderer[I, V, R, S]): S = v.putInt(a.toLong) def logShow(a: Int): String = a.toString }.contramap(_.value) def json[A](a: A)(implicit loggable: Loggable[A]) = TethysBuilder(a) "int custom logging" should "be correct" in { json(TestInt(1)) shouldBe """{"missing":null,"sign":"positive","value":1}""" json(TestInt(-1)) shouldBe """{"missing":null,"sign":"negative","value":-1}""" } implicit val testLoggableEither = Loggable.either[String, Int].named("kek") "either custom logging" should "handle left" in { json("lol".asLeft[Int]) shouldBe """{"kek":"lol"}""" } it should "handle right" in { json(1.asRight[String]) shouldBe """{"kek":1}""" } it should "show left correct" in { "lol".asLeft[Int].logShow shouldBe "lol" } it should "show right correct" in { 1.asRight[String].logShow shouldBe "1" } "hide after named" should "hide value in message" in { case class ResponseTime(value: Int) val hideAfterNamed: Loggable[ResponseTime] = Loggable.intLoggable.named("responseTime").hide.contramap(_.value) val namedAfterHide: Loggable[ResponseTime] = Loggable.intLoggable.hide.named("responseTime").contramap(_.value) val sample = ResponseTime(1337) assert(hideAfterNamed.loggedValue(sample).toString.isEmpty) assert(namedAfterHide.loggedValue(sample).toString.isEmpty) } "local date" should "have loggable instance" in { LocalDate.ofYearDay(1999, 256).logShow shouldBe "1999-09-13" } } object LoggableSuite { final case class TestInt(value: Int) }
Example 91
Source File: Merge.scala From tofu with Apache License 2.0 | 5 votes |
package tofu.data package derived import java.time.{Instant, LocalDate, LocalDateTime, ZonedDateTime} import cats.kernel.Semigroup import magnolia.{CaseClass, Magnolia, SealedTrait} import simulacrum.typeclass import derevo.Derivation @typeclass trait Merge[A] { def merge(a: A, b: A): A } trait MergeInstances1 { type Typeclass[A] = Merge[A] def combine[T](caseClass: CaseClass[Typeclass, T]): Typeclass[T] = (a, b) => caseClass.construct(p => p.typeclass.merge(p.dereference(a), p.dereference(b))) def dispatch[T](sealedTrait: SealedTrait[Typeclass, T]): Typeclass[T] = (a, b) => sealedTrait.dispatch(a) { h => if (h.cast.isDefinedAt(b)) h.typeclass.merge(h.cast(a), h.cast(b)) else a } implicit def instance[A]: Merge[A] = macro Magnolia.gen[A] } object Merge extends Derivation[Merge] with MergeInstances1 { implicit def optionInstance[A](implicit m: Merge[A]): Merge[Option[A]] = (ao, bo) => ao.fold(bo)(a => bo.fold(ao)(b => Some(m.merge(a, b)))) implicit def primitiveInstance[A: Primitive]: Merge[A] = (a: A, _: A) => a sealed class Primitive[A] final implicit object primitiveByte extends Primitive[Byte] final implicit object primitiveShort extends Primitive[Short] final implicit object primitiveInt extends Primitive[Int] final implicit object primitiveLong extends Primitive[Long] final implicit object primitiveChar extends Primitive[Char] final implicit object primitiveFloat extends Primitive[Float] final implicit object primitiveDouble extends Primitive[Double] final implicit object primitiveUnit extends Primitive[Unit] final implicit object primitiveBigDecimal extends Primitive[BigDecimal] final implicit object primitiveBigInt extends Primitive[BigInt] final implicit object primitiveLocalDateTime extends Primitive[LocalDateTime] final implicit object primitiveZonedDateTime extends Primitive[ZonedDateTime] final implicit object primitiveLocalDate extends Primitive[LocalDate] final implicit object primitiveInstant extends Primitive[Instant] final implicit object primitiveString extends Primitive[String] } object Merged { trait OpaqueTag extends Any type Base = Any { type MergedOpaque } type Mer[A] <: Base with OpaqueTag def apply[A](value: A): Mer[A] = value.asInstanceOf[Mer[A]] implicit final class MergedOps[A](private val mer: Mer[A]) extends AnyVal { def value: A = mer.asInstanceOf[A] } implicit def mergedSemigroup[A: Merge]: Semigroup[Merged[A]] = (x, y) => apply(Merge[A].merge(x.value, y.value)) }
Example 92
Source File: QueryStringBindablesSpec.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.web.utils import java.net.URLEncoder import java.time.{LocalDate, LocalDateTime} import gospeak.core.domain.UserRequest import gospeak.libs.scala.domain.{Page, Url} import gospeak.web.testingutils.BaseSpec import gospeak.web.utils.QueryStringBindables._ class QueryStringBindablesSpec extends BaseSpec { describe("QueryStringBindables") { describe("LocalDateTime") { it("should parse and format dates") { val ldt = LocalDateTime.of(2019, 9, 21, 19, 12) val date = "21/09/2019" val dateTime = s"$date 19:12" val dateEncoded = URLEncoder.encode(date, "UTF-8") val dateTimeEncoded = URLEncoder.encode(dateTime, "UTF-8") LocalDateTime.parse(dateTime, dtf1) shouldBe ldt LocalDateTime.parse(dateTimeEncoded, dtf2) shouldBe ldt LocalDate.parse(date, df1).atTime(19, 12) shouldBe ldt LocalDate.parse(dateEncoded, df2).atTime(19, 12) shouldBe ldt ldt.format(df1) shouldBe date } it("should bind & unbind a LocalDateTime when no params") { val ldt = LocalDateTime.of(2019, 9, 21, 0, 0) val date = "21/09/2019" val dateTimeEncoded = URLEncoder.encode(date + " 00:00", "UTF-8") localDateTimeQueryStringBindable.bind("key", Map("key" -> Seq(date))) shouldBe Some(Right(ldt)) localDateTimeQueryStringBindable.unbind("key", ldt) shouldBe s"key=$dateTimeEncoded" } } describe("Page.Params") { it("should bind & unbind a Page.Params when no params") { val params = Page.Params() pageParamsQueryStringBindable.bind("", Map()) shouldBe Some(Right(params)) pageParamsQueryStringBindable.unbind("", params) shouldBe "" } it("should bind & unbind a Page.Params when all params") { val params = buildParams(2, 30, "test", "name") pageParamsQueryStringBindable.bind("", Map( Page.No.key -> Seq("2"), Page.Size.key -> Seq("30"), Page.Search.key -> Seq("test"), Page.OrderBy.key -> Seq("name"))) shouldBe Some(Right(params)) pageParamsQueryStringBindable.unbind("", params) shouldBe s"${Page.No.key}=2&${Page.Size.key}=30&${Page.Search.key}=test&${Page.OrderBy.key}=name" } it("should bind & unbind filters") { val params = Page.Params.defaults.toggleFilter("f1").withFilter("f2", "v2") pageParamsQueryStringBindable.bind("", Map( "f1" -> Seq("true"), "f2" -> Seq("v2"))) shouldBe Some(Right(params)) pageParamsQueryStringBindable.unbind("", params) shouldBe s"f1=true&f2=v2" } } it("should bind & unbind a Url") { val url = Url.from("http://youtube.com").right.get urlQueryStringBindable.bind("key", Map("key" -> Seq("http://youtube.com"))) shouldBe Some(Right(url)) urlQueryStringBindable.unbind("key", url) shouldBe s"key=http%3A%2F%2Fyoutube.com" } it("should bind & unbind a UserRequest.Id") { val id = UserRequest.Id.generate() userRequestIdQueryStringBindable.bind("key", Map("key" -> Seq(id.value))) shouldBe Some(Right(id)) userRequestIdQueryStringBindable.unbind("key", id) shouldBe s"key=${id.value}" } } private def buildParams(no: Int, size: Int, search: String, order: String) = Page.Params(Page.No(no), Page.Size(size), Some(Page.Search(search)), Some(Page.OrderBy(order))) }
Example 93
Source File: Sponsor.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.{Instant, LocalDate} import gospeak.core.domain.utils.{Constants, Info} import gospeak.libs.scala.Extensions._ import gospeak.libs.scala.domain.{DataClass, IId, Price, UuidIdBuilder} case class Sponsor(id: Sponsor.Id, group: Group.Id, partner: Partner.Id, pack: SponsorPack.Id, contact: Option[Contact.Id], start: LocalDate, finish: LocalDate, paid: Option[LocalDate], price: Price, info: Info) { def data: Sponsor.Data = Sponsor.Data(this) def isCurrent(now: Instant): Boolean = start.atStartOfDay().toInstant(Constants.defaultZoneId).isBefore(now) && finish.atStartOfDay().toInstant(Constants.defaultZoneId).isAfter(now) } object Sponsor { def apply(group: Group.Id, data: Data, info: Info): Sponsor = new Sponsor(Id.generate(), group, data.partner, data.pack, data.contact, data.start, data.finish, data.paid, data.price, info) final class Id private(value: String) extends DataClass(value) with IId object Id extends UuidIdBuilder[Id]("Sponsor.Id", new Id(_)) final case class Full(sponsor: Sponsor, pack: SponsorPack, partner: Partner, contact: Option[Contact]) { def isCurrent(now: Instant): Boolean = sponsor.isCurrent(now) def id: Id = sponsor.id def start: LocalDate = sponsor.start def finish: LocalDate = sponsor.finish def price: Price = sponsor.price def paid: Option[LocalDate] = sponsor.paid def hasContact(id: Contact.Id): Boolean = contact.exists(_.id == id) } final case class Data(partner: Partner.Id, pack: SponsorPack.Id, contact: Option[Contact.Id], start: LocalDate, finish: LocalDate, paid: Option[LocalDate], price: Price) object Data { def apply(s: Sponsor): Data = new Data(s.partner, s.pack, s.contact, s.start, s.finish, s.paid, s.price) } }
Example 94
Source File: Partner.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.core.domain import java.time.{LocalDate, LocalDateTime} import gospeak.core.domain.utils.{Info, SocialAccounts} import gospeak.libs.scala.domain._ final case class Partner(id: Partner.Id, group: Group.Id, slug: Partner.Slug, name: Partner.Name, notes: Markdown, // private infos for the group description: Option[Markdown], // public description logo: Logo, social: SocialAccounts, info: Info) { def data: Partner.Data = Partner.Data(this) def users: List[User.Id] = info.users } object Partner { def apply(group: Group.Id, data: Data, info: Info): Partner = new Partner(Id.generate(), group, data.slug, data.name, data.notes, data.description, data.logo, data.social, info) final class Id private(value: String) extends DataClass(value) with IId object Id extends UuidIdBuilder[Id]("Partner.Id", new Id(_)) { val empty = new Id("00000000-0000-0000-0000-000000000000") } final class Slug private(value: String) extends DataClass(value) with ISlug object Slug extends SlugBuilder[Slug]("Partner.Slug", new Slug(_)) final case class Name(value: String) extends AnyVal final case class Full(partner: Partner, venueCount: Long, sponsorCount: Long, lastSponsorDate: Option[LocalDate], contactCount: Long, eventCount: Long, lastEventDate: Option[LocalDateTime]) { def slug: Slug = partner.slug def name: Name = partner.name def logo: Logo = partner.logo def social: SocialAccounts = partner.social } final case class Data(slug: Partner.Slug, name: Partner.Name, notes: Markdown, description: Option[Markdown], logo: Logo, social: SocialAccounts) object Data { def apply(p: Partner): Data = new Data(p.slug, p.name, p.notes, p.description, p.logo, p.social) } }
Example 95
Source File: LdapAuthenticator.scala From asura with MIT License | 5 votes |
package asura.app.api.auth import java.time.{Duration, LocalDate, ZoneId} import java.util.Date import org.ldaptive._ import org.ldaptive.auth.{Authenticator, BindAuthenticationHandler, SearchDnResolver} import org.pac4j.core.context.WebContext import org.pac4j.core.credentials.UsernamePasswordCredentials import org.pac4j.core.profile.CommonProfile import org.pac4j.jwt.config.signature.SecretSignatureConfiguration import org.pac4j.jwt.profile.JwtGenerator import org.pac4j.ldap.profile.service.LdapProfileService import play.api.Configuration object LdapAuthenticator { def apply(configuration: Configuration): LdapProfileService = { val connConfig = new ConnectionConfig() connConfig.setConnectTimeout(Duration.ofMillis(configuration.get[Long]("asura.ldap.connectionTimeout"))) connConfig.setResponseTimeout(Duration.ofMillis(configuration.get[Long]("asura.ldap.responseTimeout"))) connConfig.setLdapUrl(configuration.get[String]("asura.ldap.url")) connConfig.setConnectionInitializer(new BindConnectionInitializer(configuration.get[String]("asura.ldap.bindDn"), new Credential(configuration.get[String]("asura.ldap.password")))) val connFactory = new DefaultConnectionFactory(connConfig) val handler = new BindAuthenticationHandler(connFactory) val dnResolver = new SearchDnResolver(connFactory) dnResolver.setBaseDn(configuration.get[String]("asura.ldap.searchbase")) dnResolver.setSubtreeSearch(true) dnResolver.setUserFilter(configuration.get[String]("asura.ldap.userFilter")) val authenticator = new Authenticator() authenticator.setDnResolver(dnResolver) authenticator.setAuthenticationHandler(handler) new CustomLdapProfileService(configuration, connFactory, authenticator, configuration.get[String]("asura.ldap.searchbase")) } class CustomLdapProfileService( configuration: Configuration, connectionFactory: ConnectionFactory, authenticator: Authenticator, usersDn: String) extends LdapProfileService(connectionFactory, authenticator, usersDn) { this.setIdAttribute(configuration.get[String]("asura.ldap.userIdAttr")) this.setAttributes(s"${configuration.get[String]("asura.ldap.userRealNameAttr")},${configuration.get[String]("asura.ldap.userEmailAttr")}") override def validate(credentials: UsernamePasswordCredentials, context: WebContext): Unit = { super.validate(credentials, context) val jwtGenerator = new JwtGenerator[CommonProfile](new SecretSignatureConfiguration(configuration.get[String]("asura.jwt.secret"))) val tomorrow = LocalDate.now().plusDays(1).atStartOfDay().plusHours(3) jwtGenerator.setExpirationTime(Date.from(tomorrow.atZone(ZoneId.systemDefault()).toInstant())) val profile = credentials.getUserProfile val token = jwtGenerator.generate(profile) profile.addAttribute("token", token) } } }
Example 96
Source File: SimpleTestUsernamePasswordAuthenticator.scala From asura with MIT License | 5 votes |
package asura.app.api.auth import java.time.{LocalDate, ZoneId} import java.util.Date import org.pac4j.core.context.WebContext import org.pac4j.core.credentials.UsernamePasswordCredentials import org.pac4j.core.credentials.authenticator.Authenticator import org.pac4j.core.exception.CredentialsException import org.pac4j.core.profile.CommonProfile import org.pac4j.core.util.{CommonHelper, Pac4jConstants} import org.pac4j.jwt.config.signature.SecretSignatureConfiguration import org.pac4j.jwt.profile.JwtGenerator import play.api.Configuration class SimpleTestUsernamePasswordAuthenticator(configuration: Configuration) extends Authenticator[UsernamePasswordCredentials] { override def validate(credentials: UsernamePasswordCredentials, context: WebContext): Unit = { if (credentials == null) throw new CredentialsException("No credential") val username = credentials.getUsername val password = credentials.getPassword if (CommonHelper.isBlank(username)) throw new CredentialsException("Username cannot be blank") if (CommonHelper.isBlank(password)) throw new CredentialsException("Password cannot be blank") if (CommonHelper.areNotEquals(username, password)) throw new CredentialsException("Username : '" + username + "' does not match password") val profile = new CommonProfile() profile.setId(username) profile.addAttribute(Pac4jConstants.USERNAME, username) val jwtGenerator = new JwtGenerator[CommonProfile](new SecretSignatureConfiguration(configuration.get[String]("asura.jwt.secret"))) val tomorrow = LocalDate.now().plusDays(1).atStartOfDay() jwtGenerator.setExpirationTime(Date.from(tomorrow.atZone(ZoneId.systemDefault()).toInstant())) val token = jwtGenerator.generate(profile) profile.addAttribute("token", token) credentials.setUserProfile(profile) } }
Example 97
Source File: LogEntry.scala From infinispan-spark with Apache License 2.0 | 5 votes |
package org.infinispan.spark.domain import java.io.{ObjectInput, ObjectOutput} import java.time.LocalDate import java.time.temporal.ChronoUnit import org.infinispan.commons.io.UnsignedNumeric import org.infinispan.commons.marshall._ @SerializeWith(classOf[LogEntrySerializer]) class LogEntry(val date: LocalDate, var opCode: Int, val userAgent: String, val domain: String) class LogEntrySerializer extends Externalizer[LogEntry] { override def writeObject(output: ObjectOutput, obj: LogEntry): Unit = { output.writeObject(obj.date) UnsignedNumeric.writeUnsignedInt(output, obj.opCode) output.writeUTF(obj.userAgent) output.writeUTF(obj.domain) } override def readObject(input: ObjectInput): LogEntry = { val date = input.readObject().asInstanceOf[LocalDate] val opCode = UnsignedNumeric.readUnsignedInt(input) val userAgent = input.readUTF() val domain = input.readUTF() new LogEntry(date, opCode, userAgent, domain) } } object EntryGenerator { val browser = Set("Firefox", "Chrome", "MSIE") val domainNames = Set("no-ip.org", "dnf.it", "google.com", "localhost") def generate(numEntries: Int, errorCondition: LogEntry => Boolean, startDate: LocalDate, endDate: LocalDate) = { val userAgentsIterator = circularIterator(browser) val domainNamesIterator = circularIterator(domainNames) val diffDays = ChronoUnit.DAYS.between(startDate, endDate) val daysPerEntry = diffDays.toFloat / numEntries.toFloat (1 to numEntries).map { i => val browser = userAgentsIterator.next() val domain = domainNamesIterator.next() val dateTime = startDate.plusDays(Math.floor(daysPerEntry * i).toInt) val entry = new LogEntry(dateTime, 0, browser, domain) val op = if (errorCondition(entry)) 500 else 200 entry.opCode = op entry }.toList } private def circularIterator[T](s: Set[T]) = Iterator.continually(s).flatten }
Example 98
Source File: Implicits.scala From scala-cass with MIT License | 5 votes |
package com.weather.scalacass.jdk8 import com.weather.scalacass.{ CassFormatDecoder, CassFormatEncoder } import com.weather.scalacass.CassFormatDecoderVersionSpecific.codecCassFormatDecoder import CassFormatEncoder.sameTypeCassFormatEncoder import java.time.{ Instant, LocalDate, LocalTime, ZonedDateTime } import com.datastax.driver.core.{ Cluster, DataType } import com.google.common.reflect.TypeToken object Implicits { implicit val timeEncoder: CassFormatEncoder[LocalTime] = sameTypeCassFormatEncoder(DataType.time) implicit val timeDecoder: CassFormatDecoder[LocalTime] = codecCassFormatDecoder(TypeToken.of(classOf[LocalTime])) implicit val dateEncoder: CassFormatEncoder[LocalDate] = sameTypeCassFormatEncoder(DataType.date) implicit val dateDecoder: CassFormatDecoder[LocalDate] = codecCassFormatDecoder(TypeToken.of(classOf[LocalDate])) implicit val instantEncoder: CassFormatEncoder[Instant] = sameTypeCassFormatEncoder(DataType.timestamp) implicit val instantDecoder: CassFormatDecoder[Instant] = codecCassFormatDecoder(TypeToken.of(classOf[Instant])) implicit def zonedDateTimeEncoder(implicit cluster: Cluster): CassFormatEncoder[ZonedDateTime] = sameTypeCassFormatEncoder(cluster.getMetadata.newTupleType(DataType.timestamp, DataType.varchar)) implicit val zonedDateTimeDecoder: CassFormatDecoder[ZonedDateTime] = codecCassFormatDecoder(TypeToken.of(classOf[ZonedDateTime])) }
Example 99
Source File: LighthouseConfigurationParser.scala From lighthouse with Apache License 2.0 | 5 votes |
package be.dataminded.lighthouse.config import java.time.LocalDate import be.dataminded.lighthouse.datalake.Datalake import org.apache.spark.SparkConf import scopt.OptionParser class LighthouseConfigurationParser extends OptionParser[LighthouseConfiguration]("lighthouse") { override def showUsageOnError: Boolean = true opt[LocalDate]('d', "localdate") .action((localDate, config) => config.copy(localDate = localDate)) .text("The localdate for which the job has to run") .optional() opt[String]('e', "environment") .action((environment, config) => config.copy(environment = environment)) .withFallback(() => fallbackEnvironment()) .validate(item => if (item.nonEmpty) success else failure("The configured environment for Lighthouse is empty")) .required() override def parse(args: Seq[String], init: LighthouseConfiguration): Option[LighthouseConfiguration] = { super.parse(args, init) match { case success @ Some(config) => System.setProperty(Datalake.PropertyName, config.environment) success case None => None } } private def fallbackEnvironment(): String = { Option(System.getProperty(Datalake.PropertyName)) .orElse(Option(System.getenv("LIGHTHOUSE_ENVIRONMENT"))) .getOrElse(new SparkConf().get(Datalake.PropertyName, "")) } } case class LighthouseConfiguration(localDate: LocalDate = LocalDate.now(), environment: String = "test")
Example 100
Source File: package.scala From lighthouse with Apache License 2.0 | 5 votes |
package be.dataminded.lighthouse import java.time.LocalDate import java.time.format.DateTimeFormatter import be.dataminded.lighthouse.common.DateTimeFormatters import scopt.Read import scopt.Read.reads import scala.util.{Failure, Success, Try} package object config { implicit val LocalDateSupport: Read[LocalDate] = reads { timestamp => tryParseLocalDate(timestamp) match { case Success(localDate) => localDate case Failure(e) => throw new IllegalArgumentException(s"The given timestamp: [$timestamp] could not be parsed", e) } } private def tryParseLocalDate(timestamp: String): Try[LocalDate] = { Try { LocalDate.parse(timestamp, DateTimeFormatter.ISO_LOCAL_DATE) } recover { case _ => LocalDate.parse(timestamp, DateTimeFormatters.SimpleDateFormat) } } }
Example 101
Source File: FileSystemDataLinkTest.scala From lighthouse with Apache License 2.0 | 5 votes |
package be.dataminded.lighthouse.datalake import java.time.LocalDate import java.time.Month.DECEMBER import be.dataminded.lighthouse.Models import be.dataminded.lighthouse.spark.Csv import be.dataminded.lighthouse.testing.SparkFunSuite import better.files._ import org.apache.spark.sql.types._ import org.scalatest.BeforeAndAfter import org.scalatest.matchers.should.Matchers class FileSystemDataLinkTest extends SparkFunSuite with Matchers with BeforeAndAfter { import spark.implicits._ val customerPath: String = Resource.getUrl("customers.csv").getPath() val ordersPath: String = Resource.getUrl("orders.csv").getPath() val options = Map("header" -> "true") test("A FileSystemDataLink can read a DataFrame from a local file") { val link = new FileSystemDataLink(path = customerPath, format = Csv, options = options) link.read().count should equal(3) } test("A FileSystemDataLink can read a DataSet from a local file") { val link = new FileSystemDataLink(path = customerPath, format = Csv, options = options) val dataset = link.readAs[Models.RawCustomer]() dataset.count should equal(3) } test("A FileSystemDataLink can leverage a specified schema") { val schema = Option( StructType( StructField("id", ByteType) :: StructField("firstName", StringType, nullable = true) :: StructField("lastName", StringType, nullable = true) :: StructField("yearOfBirth", ShortType, nullable = true) :: Nil ) ) val link = new FileSystemDataLink(path = customerPath, format = Csv, options = options, schema = schema) val dataset = link.read() dataset.schema should equal(schema.get) } test("A FileSystemDataLink can be used to write a DataFrame") { val link = new FileSystemDataLink(path = "./target/output/orc") link.write(Seq(Models.RawCustomer("1", "Pascal", "Knapen", "1982")).toDF()) link.read().count should equal(1) } test("A FileSystemDataLink can be used to write a DataSet") { val link = new FileSystemDataLink(path = "./target/output/orc") link.write(Seq(Models.RawCustomer("1", "Pascal", "Knapen", "1982")).toDS()) link.read().count should equal(1) } test("A snapshot of FileSystemDataLink can be used to write a DataFrame for a specific date") { val link = new FileSystemDataLink(path = "./target/output/orc").snapshotOf(LocalDate.of(1982, DECEMBER, 21)) link.write(Seq(Models.RawCustomer("1", "Pascal", "Knapen", "1982")).toDF()) "./target/output/orc/1982/12/21".toFile.exists should equal(true) link.read().count should equal(1) } test("A snapshot of FileSystemDataLink can be used to write a DataSet for a specific date") { val link = new FileSystemDataLink(path = "./target/output/orc").snapshotOf(LocalDate.of(1982, DECEMBER, 21)) link.write(Seq(Models.RawCustomer("1", "Pascal", "Knapen", "1982")).toDS()) "./target/output/orc/1982/12/21".toFile.exists should equal(true) link.read().count should equal(1) } after { file"target/output".delete(swallowIOExceptions = true) } }
Example 102
Source File: Executor.scala From neo4j-spark-connector with Apache License 2.0 | 5 votes |
package org.neo4j.spark import java.time.{LocalDate, LocalDateTime, OffsetTime, ZoneOffset, ZonedDateTime} import java.util import java.sql.Timestamp import org.apache.spark.SparkContext import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.types.StructType import org.neo4j.spark.dataframe.CypherTypes import org.neo4j.spark.utils.{Neo4jSessionAwareIterator, Neo4jUtils} import scala.collection.JavaConverters._ object Executor { def convert(value: AnyRef): Any = value match { case it: util.Collection[_] => it.toArray() case m: java.util.Map[_,_] => m.asScala case _ => Neo4jUtils.convert(value) } def toJava(parameters: Map[String, Any]): java.util.Map[String, Object] = { parameters.mapValues(toJava).asJava } private def toJava(x: Any): AnyRef = x match { case y: Seq[_] => y.asJava case _ => x.asInstanceOf[AnyRef] } val EMPTY = Array.empty[Any] val EMPTY_RESULT = new CypherResult(new StructType(), Iterator.empty) class CypherResult(val schema: StructType, val rows: Iterator[Array[Any]]) { def sparkRows: Iterator[Row] = rows.map(row => new GenericRowWithSchema(row, schema)) def fields = schema.fieldNames } def execute(sc: SparkContext, query: String, parameters: Map[String, AnyRef]): CypherResult = { execute(Neo4jConfig(sc.getConf), query, parameters) } private def rows(result: Iterator[_]) = { var i = 0 while (result.hasNext) i = i + 1 i } def execute(config: Neo4jConfig, query: String, parameters: Map[String, Any], write: Boolean = false): CypherResult = { val result = new Neo4jSessionAwareIterator(config, query, toJava(parameters), write) if (!result.hasNext) { return EMPTY_RESULT } val peek = result.peek() val keyCount = peek.size() if (keyCount == 0) { return new CypherResult(new StructType(), Array.fill[Array[Any]](rows(result))(EMPTY).toIterator) } val keys = peek.keys().asScala val fields = keys.map(k => (k, peek.get(k).`type`())).map(keyType => CypherTypes.field(keyType)) val schema = StructType(fields) val it = result.map(record => { val row = new Array[Any](keyCount) var i = 0 while (i < keyCount) { val value = convert(record.get(i).asObject()) row.update(i, value) i = i + 1 } row }) new CypherResult(schema, it) } }
Example 103
Source File: ParameterMappers.scala From neotypes with MIT License | 5 votes |
package neotypes package implicits.mappers import java.time.{Duration, LocalDate, LocalDateTime, LocalTime, Period, OffsetDateTime, OffsetTime, ZonedDateTime} import java.util.UUID import mappers.ParameterMapper import org.neo4j.driver.v1.Value import org.neo4j.driver.v1.types.{IsoDuration, Point} import scala.collection.Iterable import scala.jdk.CollectionConverters._ trait ParameterMappers { implicit final val BooleanParameterMapper: ParameterMapper[Boolean] = ParameterMapper.fromCast(Boolean.box) implicit final val ByteArrayParameterMapper: ParameterMapper[Array[Byte]] = ParameterMapper.identity implicit final val DoubleParameterMapper: ParameterMapper[Double] = ParameterMapper.fromCast(Double.box) implicit final val DurationParameterMapper: ParameterMapper[Duration] = ParameterMapper.identity implicit final val FloatParameterMapper: ParameterMapper[Float] = ParameterMapper.fromCast(Float.box) implicit final val IntParameterMapper: ParameterMapper[Int] = ParameterMapper.fromCast(Int.box) implicit final val IsoDurationParameterMapper: ParameterMapper[IsoDuration] = ParameterMapper.identity implicit final val LocalDateParameterMapper: ParameterMapper[LocalDate] = ParameterMapper.identity implicit final val LocalDateTimeParameterMapper: ParameterMapper[LocalDateTime] = ParameterMapper.identity implicit final val LocalTimeParameterMapper: ParameterMapper[LocalTime] = ParameterMapper.identity implicit final val LongParameterMapper: ParameterMapper[Long] = ParameterMapper.fromCast(Long.box) implicit final val OffsetDateTimeParameterMapper: ParameterMapper[OffsetDateTime] = ParameterMapper.identity implicit final val OffsetTimeParameterMapper: ParameterMapper[OffsetTime] = ParameterMapper.identity implicit final val PeriodParameterMapper: ParameterMapper[Period] = ParameterMapper.identity implicit final val PointParameterMapper: ParameterMapper[Point] = ParameterMapper.identity implicit final val StringParameterMapper: ParameterMapper[String] = ParameterMapper.identity implicit final val UUIDParameterMapper: ParameterMapper[UUID] = ParameterMapper[String].contramap(_.toString) implicit final val ValueParameterMapper: ParameterMapper[Value] = ParameterMapper.identity implicit final val ZonedDateTimeParameterMapper: ParameterMapper[ZonedDateTime] = ParameterMapper.identity private final def iterableParameterMapper[T](mapper: ParameterMapper[T]): ParameterMapper[Iterable[T]] = ParameterMapper.fromCast { col => col.iterator.map(v => mapper.toQueryParam(v).underlying).asJava } implicit final def collectionParameterMapper[T, C[_]](implicit mapper: ParameterMapper[T], ev: C[T] <:< Iterable[T]): ParameterMapper[C[T]] = iterableParameterMapper(mapper).contramap(ev) private final def iterableMapParameterMapper[V](mapper: ParameterMapper[V]): ParameterMapper[Iterable[(String, V)]] = ParameterMapper.fromCast { col => col.iterator.map { case (key, v) => key -> mapper.toQueryParam(v).underlying }.toMap.asJava } implicit final def mapParameterMapper[V, M[_, _]](implicit mapper: ParameterMapper[V], ev: M[String, V] <:< Iterable[(String, V)]): ParameterMapper[M[String, V]] = iterableMapParameterMapper(mapper).contramap(ev) implicit final def optionAnyRefParameterMapper[T](implicit mapper: ParameterMapper[T]): ParameterMapper[Option[T]] = ParameterMapper.fromCast { optional => optional.map(v => mapper.toQueryParam(v).underlying).orNull } }
Example 104
Source File: BloombergFieldValueFn.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.bloomberg import java.time.format.DateTimeFormatter import java.time.{LocalDate, OffsetTime, ZoneOffset} import com.bloomberglp.blpapi.{Datetime, Element} import scala.collection.JavaConverters._ => // For SEQUENCE, iterate through each element and we need case (map, e) instead of `element` of existing sequence element. //element.elementIterator().asScala.foldLeft(new java.util.LinkedHashMap[String, Any]) { case (map, `element`) => element.elementIterator().asScala.foldLeft(new java.util.LinkedHashMap[String, Any]) { case (map, e) => map.put(e.name().toString, BloombergFieldValueFn(e)) map } //needs to be a java map because of json serialization case _ => if (element.isArray) { (0 until element.numValues()).map { i => BloombergFieldValueFn(element.getValueAsElement(i)) }.asJava } else { element.toString } } } def offsetDateTime(dt: Datetime): String = { val offsetSeconds = if (dt.hasParts(Datetime.TIME_ZONE_OFFSET)) { dt.timezoneOffsetMinutes() * 60 } else { 0 } val offset = ZoneOffset.ofTotalSeconds(offsetSeconds) OffsetTime.of(dt.hour(), dt.minute(), dt.second(), dt.nanosecond(), offset).toString } def localDate(dt: Datetime): String = { LocalDate.of(dt.year(), dt.month(), dt.dayOfMonth()).format(datetimeFormatter) } }
Example 105
Source File: Rfc3339Util.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.controllers import java.time.format.{ DateTimeFormatter, DateTimeParseException } import java.time.{ LocalDate, ZoneId, ZonedDateTime } object Rfc3339Util { private val fullDate = DateTimeFormatter.ofPattern("yyyy-MM-dd") private val shortDateTime = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssZ") private val shortDTWithTicks = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'") private val fullDTWithTicks = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSS'Z'") private val dateTime = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSZ") def parseDateTime(datestring: String): ZonedDateTime = if (datestring.endsWith("Z") || datestring.endsWith("z")) parseFull(datestring) else parseParts(datestring) def parseDate(datestring: String): LocalDate = LocalDate.parse(datestring, fullDate) def writeDate(date: LocalDate): String = fullDate.format(date) def writeDateTime(date: ZonedDateTime): String = dateTime.format(date) private def parseParts(datestring: String): ZonedDateTime = { //step one, split off the timezone. val sepChar = if (datestring.indexOf('+') > 0) '+' else '-' val firstpart = datestring.substring(0, datestring.lastIndexOf(sepChar.toInt)) val secondpart = datestring.substring(datestring.lastIndexOf(sepChar.toInt)) //step two, remove the colon from the timezone offset val thirdpart = secondpart.substring(0, secondpart.indexOf(':')) + secondpart.substring(secondpart.indexOf(':') + 1) val dstring = firstpart + thirdpart try { ZonedDateTime.parse(dstring, shortDateTime) } catch { case pe: DateTimeParseException => ZonedDateTime.parse(dstring, dateTime) } } private def parseFull(datestring: String): ZonedDateTime = { val z = ZoneId.systemDefault() try { ZonedDateTime.parse(datestring, shortDTWithTicks.withZone(z)) } catch { case p: DateTimeParseException => ZonedDateTime.parse(datestring, fullDTWithTicks.withZone(z)) } } }
Example 106
Source File: string_formats_yaml.scala From api-first-hand with MIT License | 5 votes |
package string_formats.yaml import play.api.mvc.{Action, Controller} import play.api.data.validation.Constraint import de.zalando.play.controllers._ import PlayBodyParsing._ import PlayValidations._ import de.zalando.play.controllers.Base64String import Base64String._ import java.time.ZonedDateTime import java.util.UUID import java.time.LocalDate import de.zalando.play.controllers.BinaryString import BinaryString._ // ----- constraints and wrapper validations ----- class GetBase64OptConstraints(override val instance: String) extends ValidationBase[String] { override val reference = "⌿paths⌿/⌿get⌿base64⌿Opt" override def constraints: Seq[Constraint[String]] = Seq() } class GetBase64OptValidator(instance: Base64String) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿base64⌿Opt" override val validators = Seq(new GetBase64OptConstraints(instance)) } class GetPetIdConstraints(override val instance: String) extends ValidationBase[String] { override val reference = "⌿paths⌿/⌿get⌿petId" override def constraints: Seq[Constraint[String]] = Seq() } class GetPetIdValidator(instance: BinaryString) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿petId" override val validators = Seq(new GetPetIdConstraints(instance)) } class GetDate_timeOptConstraints(override val instance: ZonedDateTime) extends ValidationBase[ZonedDateTime] { override val reference = "⌿paths⌿/⌿get⌿date_time⌿Opt" override def constraints: Seq[Constraint[ZonedDateTime]] = Seq() } class GetDate_timeOptValidator(instance: ZonedDateTime) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿date_time⌿Opt" override val validators = Seq(new GetDate_timeOptConstraints(instance)) } class GetUuidOptConstraints(override val instance: UUID) extends ValidationBase[UUID] { override val reference = "⌿paths⌿/⌿get⌿uuid⌿Opt" override def constraints: Seq[Constraint[UUID]] = Seq() } class GetUuidOptValidator(instance: UUID) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿uuid⌿Opt" override val validators = Seq(new GetUuidOptConstraints(instance)) } class GetDateOptConstraints(override val instance: LocalDate) extends ValidationBase[LocalDate] { override val reference = "⌿paths⌿/⌿get⌿date⌿Opt" override def constraints: Seq[Constraint[LocalDate]] = Seq() } class GetDateOptValidator(instance: LocalDate) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿date⌿Opt" override val validators = Seq(new GetDateOptConstraints(instance)) } // ----- complex type validators ----- // ----- option delegating validators ----- class GetBase64Validator(instance: GetBase64) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿base64" override val validators = instance.toSeq.map { new GetBase64OptValidator(_) } } class GetDate_timeValidator(instance: GetDate_time) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿date_time" override val validators = instance.toSeq.map { new GetDate_timeOptValidator(_) } } class GetUuidValidator(instance: GetUuid) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿uuid" override val validators = instance.toSeq.map { new GetUuidOptValidator(_) } } class GetDateValidator(instance: GetDate) extends RecursiveValidator { override val reference = "⌿paths⌿/⌿get⌿date" override val validators = instance.toSeq.map { new GetDateOptValidator(_) } } // ----- array delegating validators ----- // ----- catch all simple validators ----- // ----- composite validators ----- // ----- call validations ----- class GetValidator(date_time: GetDate_time, date: GetDate, base64: GetBase64, uuid: GetUuid, petId: BinaryString) extends RecursiveValidator { override val reference = "⌿paths⌿⌿get" override val validators = Seq( new GetDate_timeValidator(date_time), new GetDateValidator(date), new GetBase64Validator(base64), new GetUuidValidator(uuid), new GetPetIdValidator(petId) ) }
Example 107
Source File: CustomDerivations.scala From zio-config with Apache License 2.0 | 5 votes |
package zio.config.examples.magnolia import java.time.{ LocalDate, ZonedDateTime } import scala.util.Try import zio.config._ import zio.config.magnolia.DeriveConfigDescriptor._ import zio.config.typesafe._ object CustomDerivations extends App { case class AppConfig(jobName: String, details: Option[Detail], s3Path: S3Path) case class Detail(containerId: String, executionTime: Either[ZonedDateTime, LocalDate]) case class S3Path(s: String) object S3Path { // For some reason you decided to check if the string inside s3Path is empty or not while writing back as well // If this implicit doesn't exist, zio-config-magnolia falls back to its default behaviour // and finds out an instance for S3Path as it is a simple case class. implicit val descriptorOfS3Path: Descriptor[S3Path] = Descriptor[String] .xmapEither( s => validateS3Path(s).toRight(s"Invalid s3 path: ${s}"), value => validateS3Path(value.s).map(_.s).toRight("Cannot write. Invalid S3 path.") ) private def validateS3Path(s3Path: String): Option[S3Path] = if (s3Path.startsWith("s3://")) Some(S3Path(s3Path)) else None } // Good to keep implicit derivations within companion objects. // Preferable to give descriptions to enrich error reporting of zio-config. object Detail val config = """ jobName : "spark" s3Path : "s3://path" details : { containerId : abcdefg executionTime: "2020-06-20T17:15:23.601712+10:00[Australia/Sydney]" } """ // Custom derivation for zoned date time. Since zonedDateTime is external, // we couldn't have a companion object to place this implicit, and hence placed // globally for the automatic derivation to work. implicit val descriptorOfZonedDateTime: Descriptor[ZonedDateTime] = Descriptor[String] .xmapEitherELeftPartial( x => Try(ZonedDateTime.parse(x)).toEither )(_.toString)(_.getMessage) ?? "time in zoned date time" val appConfigDesc = descriptor[AppConfig] val source = TypesafeConfigSource.fromHoconString(config) match { case Right(a) => a case Left(_) => throw new Exception("bad hocon string") } val s = read(appConfigDesc from source) assert( s == Right( AppConfig( "spark", Some(Detail("abcdefg", Left(ZonedDateTime.parse("2020-06-20T17:15:23.601712+10:00[Australia/Sydney]")))), S3Path("s3://path") ) ) ) }
Example 108
Source File: ExecutorService.scala From sparkplug with MIT License | 5 votes |
package springnz.sparkplug.executor import java.net.{ URLDecoder, URLEncoder } import java.time.LocalDate import akka.actor._ import com.typesafe.config.ConfigFactory import springnz.sparkplug.core._ import springnz.sparkplug.util.Logging import scala.util.{ Properties, Try } object Constants { val defaultAkkaRemoteConfigSection = "akkaRemote" val actorSystemName = "sparkplugExecutorSystem" val brokerActorName = "sparkplugRequestBroker" } object ExecutorService extends Logging { import Constants._ lazy val defaultRemoteAkkaConfig = ConfigFactory.load.getConfig(s"sparkplug.$defaultAkkaRemoteConfigSection") // TODO: proper command line parsing to allow richer config options def main(args: Array[String]): Unit = { if (args.length < 4) throw new IllegalArgumentException(s"Expected at least 4 arguments to ExecutorService. Args = : ${args.toList}") val appName = args(1) val sparkClientPath = args(3) log.info(s"Starting Sparkplug ExecutorService: SparkClient = $sparkClientPath: ${LocalDate.now()}") val remoteConfig = if (args.length == 6) { val urlEncodedConfig = args(5) val configString = URLDecoder.decode(urlEncodedConfig, "UTF-8") val config = ConfigFactory.parseString(configString) log.info(s"Using akka remote config:\n$configString") config } else { log.info(s"Using default akka remote config from config section 'sparkplug.$defaultAkkaRemoteConfigSection'") defaultRemoteAkkaConfig } import scala.collection.JavaConversions._ def env = System.getenv().toMap log.debug(s"Environment:\n $env") val system = ActorSystem(actorSystemName, remoteConfig) val executorService = new ExecutorService(appName) executorService.start(system, sparkClientPath) log.info("Terminating the remote application.") } } class ExecutorService(appName: String, brokerName: String = Constants.brokerActorName) extends LongLivedExecutor with Logging { // Note that the SparkConf inherits all its settings from spark-submit override val configurer: Configurer = new LocalConfigurer(appName, Properties.envOrNone("SPARK_MASTER"), None) def start(system: ActorSystem, sparkClientPath: String): Try[Unit] = { val actorOperation = SparkOperation[Unit] { implicit sparkContext ⇒ def postStopAction() = { log.info("Cancelling any jobs (if any are running).") sparkContext.cancelAllJobs() log.info("Stopping Spark context.") sparkContext.stop() } log.info("Creating requestBroker for ExecutorService.") system.actorOf(Props(new RequestBroker(sparkClientPath, postStopAction)), name = brokerName) } log.info("Executing container operation (everything happens inside this method).") val result = execute(actorOperation) log.info("Finished executing container operation (everything happens inside this method).") result } }
Example 109
Source File: FFunctorExample.scala From scala_typeclassopedia with Creative Commons Attribution Share Alike 4.0 International | 5 votes |
package functorfunctor import java.time.LocalDate import educational.types.Id.Id object FFunctorExample { sealed trait CardType case object Visa extends CardType case object AmEx extends CardType case object Mastercard extends CardType object V1 { case class Form(email: String, cardType: CardType, cardNumber: String, cardExpiry: LocalDate) case class DraftForm(email: Option[String], cardType: Option[CardType], cardNumber: Option[String], cardExpiry: Option[LocalDate]) def toForm: DraftForm => Option[Form] = { case DraftForm(Some(email), Some(cardType), Some(cardNumber), Some(cardExpiry)) => Some(Form(email, cardType, cardNumber, cardExpiry)) case _ => None } } object V2 { case class FormTemplate[F[_]](email: F[String], cardType: F[CardType], cardNumber: F[String], cardExpiry: F[LocalDate]) type Form = FormTemplate[Id] type DraftForm = FormTemplate[Option] def toForm: FormTemplate[Option] => Option[Form] = ??? } }
Example 110
Source File: JavaInstances.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.schemas.instances import java.util import java.time.LocalDate import com.spotify.scio.IsJavaBean import com.spotify.scio.schemas.{ArrayType, MapType, RawRecord, Schema, Type} import org.apache.beam.sdk.schemas.JavaBeanSchema import org.apache.beam.sdk.schemas.Schema.{FieldType, LogicalType} import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils import scala.reflect.ClassTag trait JavaInstances { implicit val jByteSchema: Type[java.lang.Byte] = Type[java.lang.Byte](FieldType.BYTE) implicit val jBytesSchema: Type[Array[java.lang.Byte]] = Type[Array[java.lang.Byte]](FieldType.BYTES) implicit val jShortSchema: Type[java.lang.Short] = Type[java.lang.Short](FieldType.INT16) implicit val jIntegerSchema: Type[java.lang.Integer] = Type[java.lang.Integer](FieldType.INT32) implicit val jLongSchema: Type[java.lang.Long] = Type[java.lang.Long](FieldType.INT64) implicit val jFloatSchema: Type[java.lang.Float] = Type[java.lang.Float](FieldType.FLOAT) implicit val jDoubleSchema: Type[java.lang.Double] = Type[java.lang.Double](FieldType.DOUBLE) implicit val jBigDecimalSchema: Type[java.math.BigDecimal] = Type[java.math.BigDecimal](FieldType.DECIMAL) implicit val jBooleanSchema: Type[java.lang.Boolean] = Type[java.lang.Boolean](FieldType.BOOLEAN) implicit def jListSchema[T](implicit s: Schema[T]): Schema[java.util.List[T]] = ArrayType(s, identity, identity) implicit def jArrayListSchema[T](implicit s: Schema[T]): Schema[java.util.ArrayList[T]] = ArrayType(s, identity, l => new util.ArrayList[T](l)) implicit def jMapSchema[K, V](implicit ks: Schema[K], vs: Schema[V] ): Schema[java.util.Map[K, V]] = MapType(ks, vs, identity, identity) implicit def javaBeanSchema[T: IsJavaBean: ClassTag]: RawRecord[T] = RawRecord[T](new JavaBeanSchema()) implicit def javaEnumSchema[T <: java.lang.Enum[T]: ClassTag]: Schema[T] = Type[T](FieldType.logicalType(new LogicalType[T, String] { private val clazz = scala.reflect.classTag[T].runtimeClass.asInstanceOf[Class[T]] private val className = clazz.getCanonicalName override def getIdentifier: String = className override def getBaseType: FieldType = FieldType.STRING override def toBaseType(input: T): String = input.name() override def toInputType(base: String): T = java.lang.Enum.valueOf[T](clazz, base) override def toString: String = s"EnumLogicalType($className, String)" override def getArgumentType: FieldType = FieldType.STRING })) implicit def jLocalDate: Type[LocalDate] = Type(CalciteUtils.DATE) } private[schemas] object JavaInstances extends JavaInstances
Example 111
Source File: TweetSearchParameters.scala From twitter4s with Apache License 2.0 | 5 votes |
package com.danielasfregola.twitter4s.http.clients.rest.search.parameters import java.time.LocalDate import com.danielasfregola.twitter4s.entities.GeoCode import com.danielasfregola.twitter4s.entities.enums.Language.Language import com.danielasfregola.twitter4s.entities.enums.ResultType.ResultType import com.danielasfregola.twitter4s.entities.enums.TweetMode.TweetMode import com.danielasfregola.twitter4s.http.marshalling.Parameters private[twitter4s] final case class TweetSearchParameters(q: String, count: Int, include_entities: Boolean, result_type: ResultType, geocode: Option[GeoCode], lang: Option[Language], locale: Option[String], until: Option[LocalDate], since_id: Option[Long], max_id: Option[Long], callback: Option[String], tweet_mode: TweetMode) extends Parameters
Example 112
Source File: Invoice.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.common.domain import java.time.LocalDate case class Invoice(customer: Customer, issueDate: LocalDate, dueDate: LocalDate, lineItems: Vector[LineItem], status: InvoiceStatus, paid: BigDecimal) { def setCustomer(name: String, email: String): Invoice = copy(customer = Customer(name, email)) def setDates(newIssueDate: LocalDate, newDueDate: LocalDate): Invoice = copy(issueDate = newIssueDate, dueDate = newDueDate) def addLineItem(description: String, quantity: BigDecimal, price: BigDecimal): Invoice = { val lineItem = LineItem(description, quantity, price) copy(lineItems = lineItems :+ lineItem) } def removeLineItem(index: Int): Invoice = { val before = lineItems.take(index) val after = lineItems.drop(index + 1) copy(lineItems = before ++ after) } def pay(amount: BigDecimal): Invoice = { val newStatus = if (amount == balance) InvoiceStatus.Paid else status copy(paid = paid + amount, status = newStatus) } def delete: Invoice = copy(status = InvoiceStatus.Deleted) def hasLineItem(index: Int): Boolean = lineItems.indices contains index def total: BigDecimal = lineItems.foldLeft[BigDecimal](0)(_ + _.total) def balance: BigDecimal = total - paid } object Invoice { val Draft = Invoice( customer = Customer.Empty, issueDate = LocalDate.MIN, dueDate = LocalDate.MAX, lineItems = Vector.empty, status = InvoiceStatus.New, paid = 0) } case class Customer(name: String, email: String) object Customer { val Empty = Customer("", "") } case class LineItem(description: String, quantity: BigDecimal, price: BigDecimal) { def total: BigDecimal = quantity * price } sealed trait InvoiceStatus object InvoiceStatus { case object New extends InvoiceStatus case object Paid extends InvoiceStatus case object Deleted extends InvoiceStatus }
Example 113
Source File: Event.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.common.domain import java.time.{Instant, LocalDate} import java.util.UUID case class Event(version: Int, timestamp: Instant, commandId: UUID, payload: Event.Payload) object Event { sealed trait Payload { def apply(invoice: Invoice): Invoice = invoice } case class InvoiceCreated(customerName: String, customerEmail: String, issueDate: LocalDate, dueDate: LocalDate) extends Payload { override def apply(invoice: Invoice): Invoice = invoice .setCustomer(customerName, customerEmail) .setDates(issueDate, dueDate) } case class LineItemAdded(description: String, quantity: BigDecimal, price: BigDecimal) extends Payload { override def apply(invoice: Invoice): Invoice = invoice.addLineItem(description, quantity, price) } case class LineItemRemoved(index: Int) extends Payload { override def apply(invoice: Invoice): Invoice = invoice.removeLineItem(index) } case class PaymentReceived(amount: BigDecimal) extends Payload { override def apply(invoice: Invoice): Invoice = invoice.pay(amount) } case class InvoiceDeleted() extends Payload { override def apply(invoice: Invoice): Invoice = invoice.delete } case class InvoiceSentToCustomer() extends Payload }
Example 114
Source File: Command.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.common.domain import java.time.{Instant, LocalDate} import java.util.UUID import scala.collection.immutable.Seq case class Command(originId: UUID, commandId: UUID, expectedVersion: Option[Int], payload: Command.Payload) { def apply(timestamp: Instant, snapshot: InvoiceSnapshot): CommandResult = { val outcome = snapshot .validateVersion(expectedVersion) .flatMap(payload(_)) .fold( CommandResult.Failure, success(timestamp, snapshot, _)) CommandResult(originId, commandId, outcome) } private def success(timestamp: Instant, snapshot: InvoiceSnapshot, payloads: Seq[Event.Payload]): CommandResult.Outcome = { payloads.foldLeft(CommandResult.Success(snapshot)) { (acc, payload) => acc.update(timestamp, commandId, payload) } } } object Command { type Result = Either[InvoiceError, Seq[Event.Payload]] sealed trait Payload { def apply(invoice: Invoice): Result } case class CreateInvoice(customerName: String, customerEmail: String, issueDate: LocalDate, dueDate: LocalDate, lineItems: List[LineItem]) extends Payload { override def apply(invoice: Invoice): Result = { val createdEvent = Event.InvoiceCreated(customerName, customerEmail, issueDate, dueDate) val lineItemEvents = lineItems.map(toLineItemEvent) success(createdEvent :: lineItemEvents) } private def toLineItemEvent(lineItem: LineItem): Event.Payload = Event.LineItemAdded( description = lineItem.description, quantity = lineItem.quantity, price = lineItem.price) } case class AddLineItem(description: String, quantity: Double, price: Double) extends Payload { override def apply(invoice: Invoice): Result = success(Event.LineItemAdded(description, quantity, price)) } case class RemoveLineItem(index: Int) extends Payload { override def apply(invoice: Invoice): Result = { if (invoice.hasLineItem(index)) success(Event.LineItemRemoved(index)) else failure(LineItemDoesNotExist(index)) } } case class PayInvoice() extends Payload { override def apply(invoice: Invoice): Result = success(Event.PaymentReceived(invoice.total)) } case class DeleteInvoice() extends Payload { override def apply(invoice: Invoice): Result = success(Event.InvoiceDeleted()) } private def success(events: Event.Payload*): Result = success(events.toList) private def success(events: List[Event.Payload]): Result = Right(events) private def failure(error: InvoiceError): Result = Left(error) }
Example 115
Source File: AppRouter.scala From full-scala-stack with Apache License 2.0 | 5 votes |
package routes import java.time.LocalDate import components.AbstractComponent import japgolly.scalajs.react.ReactMouseEventFrom import japgolly.scalajs.react.extra.router.StaticDsl.RouteB import japgolly.scalajs.react.extra.router._ import japgolly.scalajs.react.vdom.html_<^._ import org.scalajs.dom.raw.HTMLAnchorElement import pages.MainPage import typingsJapgolly.semanticDashUiDashReact.components._ import typingsJapgolly.semanticDashUiDashReact.distCommonjsCollectionsMenuMenuItemMod._ object AppRouter extends AbstractComponent { case class State() sealed trait AppPageData case object MainPageData extends AppPageData private def setEH(c: RouterCtl[AppPageData], target: AppPageData) = { (event: ReactMouseEventFrom[HTMLAnchorElement], data: MenuItemProps) => c.setEH(target)(event) } private def layout(page: RouterCtl[AppPageData], resolution: Resolution[AppPageData]) = { assert(page != null) <.div( ^.height := 100.pct, <.div( ^.height := 100.pct, ^.className := "full height", ^.display := "flex", ^.flexDirection := "row", <.div( ^.height := 100.pct, ^.className := "no-print", ^.flex := "0 0 auto", ^.position := "relative", Menu(vertical = true)( MenuItem( active = resolution.page == MainPageData, onClick = { (event: ReactMouseEventFrom[HTMLAnchorElement], data: MenuItemProps) => page.setEH(MainPageData)(event) } )("Main Page") ) ), <.div(^.flex := "1 1 auto", resolution.render()) ) ) } private val config: RouterConfig[AppPageData] = RouterConfigDsl[AppPageData].buildConfig { dsl => import dsl._ val seqInt = new RouteB[Seq[Int]]( regex = "(-?[\\d,]+)", matchGroups = 1, parse = { groups => Some(groups(0).split(",").map(_.toInt)) }, build = _.mkString(",") ) val dateRange = new RouteB[(LocalDate, LocalDate)]( regex = "\\((.*),(.*)\\)", matchGroups = 2, parse = { groups => Some((LocalDate.parse(groups(0)), LocalDate.parse(groups(1)))) }, build = { tuple => s"(${tuple._1.toString},${tuple._2.toString})" } ) (trimSlashes | staticRoute("#mainPage", MainPageData) ~> renderR(ctrl => MainPage())) .notFound(redirectToPage(MainPageData)(Redirect.Replace)) .renderWith(layout) } private val baseUrl: BaseUrl = BaseUrl.fromWindowOrigin_/ val router: Router[AppPageData] = Router.apply(baseUrl, config) }
Example 116
Source File: KnowledgeGraph.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package edu.neu.coe.scala.spark.graphx import org.apache.spark._ import java.time._ import java.time.LocalDate import org.apache.spark.graphx._ import org.apache.spark.rdd.RDD abstract class Knowledge[A](what: String, term: String, value: A, lang: String = "en") case class Concept(term: String, value: String, lang: String = "en") extends Knowledge[String]("concept",term,value,lang) case class NamedEntity(term: String, value: String, what: String, lang: String = "en") extends Knowledge[String](what,term,value,lang) case class Quantity[A](term: String, value: A, lang: String = "en") extends Knowledge[String]("quantity",term,value.toString,lang) case class Relationship(which: String, tense: String) case class Triple(from: Long, to: Long, relationship: Relationship) object KnowledgeGraph extends App { implicit val time = java.time.LocalDate.now val conf = new SparkConf().setAppName("news") val sc = new SparkContext(conf) def createGraph(knowledge: List[Knowledge[String]], tripls: List[Triple]) = { val vertices: List[(VertexId,Knowledge[String])] = for (k <- knowledge; i <- 0 to knowledge.length) yield (i.toLong+1,k) val vArray = vertices.toArray val nodes = sc.parallelize(vArray) val pairs = for (r <- triplesDoc1) yield Edge(r.from.toLong,r.to.toLong,r.relationship) val eArray = pairs.toArray val arcs = sc.parallelize(eArray) Graph(nodes, arcs) } val knowledgeDoc1: List[Knowledge[String]] = List( Quantity[Instant]("November 10th",Instant.parse("2015-11-10T10:15:30.00Z")), NamedEntity("Senator John Kerry","John Kerry, U.S. Senator and Secretary of State","person"), NamedEntity("President Vladimir Putin","Vladimir Putin, President of Russia","person"), NamedEntity("Syria","Syria","nation"), Concept("invitation to meet","meet"), Concept("subject","the subject of"), Quantity[Instant]("December 1st",Instant.parse("2015-12-01T10:15:30.00Z")), NamedEntity("Washington","Washington D.D.","city") ) val triplesDoc1 = List( Triple(4,0,Relationship("timestamp","past")), // Triple(4,2,Relationship("invitee","future")), // Triple(4,1,Relationship("invited","past")), // Triple(1,4,Relationship("make","past")), // Triple(2,1,Relationship("meet with","future")), // Triple(4,6,Relationship("timestamp","future")), // Triple(4,5,Relationship("propose","past")), // Triple(4,7,Relationship("at","future")), Triple(5,3,Relationship("is","future")) ) val doc1 = createGraph(knowledgeDoc1,triplesDoc1) println(doc1.triplets.collect) }
Example 117
Source File: ParameterConversions.scala From scruid with Apache License 2.0 | 5 votes |
package ing.wbaa.druid.sql import java.sql.Timestamp import java.time.{ Instant, LocalDate, LocalDateTime } import scala.language.implicitConversions import ing.wbaa.druid.{ DruidConfig, SQLQueryParameter, SQLQueryParameterType } trait ParameterConversions { implicit def char2Param(v: Char): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Char, v.toString) implicit def string2Param(v: String): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Varchar, v) implicit def byte2Param(v: Byte): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Tinyint, v.toString) implicit def short2Param(v: Short): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Smallint, v.toString) implicit def int2Param(v: Int): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Integer, v.toString) implicit def long2Param(v: Long): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Bigint, v.toString) implicit def float2Param(v: Float): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Float, v.toString) implicit def double2Param(v: Double): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Double, v.toString) implicit def boolean2Param(v: Boolean): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Boolean, v.toString) implicit def localDate2Param(v: LocalDate)(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Date, v.format(config.FormatterDate)) implicit def localDateTime2Param( v: LocalDateTime )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, v.format(config.FormatterDateTime)) implicit def timestamp2Param(v: Timestamp)(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v.toInstant)) implicit def instant2Param( v: Instant )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v)) }