org.joda.time.Duration Scala Examples
The following examples show how to use org.joda.time.Duration.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TriggerExampleTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.examples.cookbook import com.spotify.scio.testing._ import org.joda.time.{Duration, Instant} class TriggerExampleTest extends PipelineSpec { // #TriggerExampleTest_example "TriggerExample.extractFlowInfo" should "work" in { val data = Seq( "01/01/2010 00:00:00,1108302,94,E,ML,36,100,29,0.0065,66,9,1,0.001,74.8,1,9,3,0.0028,71,1,9," + "12,0.0099,67.4,1,9,13,0.0121,99.0,1,,,,,0,,,,,0,,,,,0,,,,,0", "01/01/2010 00:00:00," + "1100333,5,N,FR,9,0,39,,,9,,,,0,,,,,0,,,,,0,,,,,0,,,,,0,,,,,0,,,,,0,,,," ) runWithContext { sc => val r = TriggerExample.extractFlowInfo(sc.parallelize(data)) r should haveSize(1) r should containSingleValue(("94", 29)) } } // #TriggerExampleTest_example "TriggerExample.totalFlow" should "work" in { val data = Seq( ( "01/01/2010 00:00:00,1108302,5,W,ML,36,100,30,0.0065,66,9,1,0.001," + "74.8,1,9,3,0.0028,71,1,9,12,0.0099,87.4,1,9,13,0.0121,99.0,1,,,,,0,,,,,0,,,,,0,,," + ",,0", new Instant(60000) ), ( "01/01/2010 00:00:00,1108302,110,E,ML,36,100,40,0.0065,66,9,1,0.001," + "74.8,1,9,3,0.0028,71,1,9,12,0.0099,67.4,1,9,13,0.0121,99.0,1,,,,,0,,,,,0,,,,,0,,," + ",,0", new Instant(1) ), ( "01/01/2010 00:00:00,1108302,110,E,ML,36,100,50,0.0065,66,9,1," + "0.001,74.8,1,9,3,0.0028,71,1,9,12,0.0099,97.4,1,9,13,0.0121,50.0,1,,,,,0,,,,,0" + ",,,,,0,,,,,0", new Instant(1) ) ) val expected = Seq( TriggerExample.Record( "default", "5", 30, 1, "[1970-01-01T00:01:00.000Z..1970-01-01T00:02:00.000Z)", true, true, "ON_TIME", new Instant(1), new Instant(1) ), TriggerExample.Record( "default", "110", 90, 2, "[1970-01-01T00:00:00.000Z..1970-01-01T00:01:00.000Z)", true, true, "ON_TIME", new Instant(1), new Instant(1) ) ) runWithContext { sc => val flowInfo = TriggerExample .extractFlowInfo(sc.parallelizeTimestamped(data)) .withFixedWindows(Duration.standardMinutes(1)) val r = TriggerExample .totalFlow(flowInfo, "default") .map(_.copy(event_time = new Instant(1), processing_time = new Instant(1))) r should containInAnyOrder(expected) } } }
Example 2
Source File: TimeBasedAggregatorTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.aggregators import com.salesforce.op.features.FeatureBuilder import com.salesforce.op.features.types._ import com.salesforce.op.stages.FeatureGeneratorStage import com.salesforce.op.test.TestCommon import org.joda.time.Duration import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class TimeBasedAggregatorTest extends FlatSpec with TestCommon { private val data = Seq(TimeBasedTest(100L, 1.0, "a", Map("a" -> "a")), TimeBasedTest(200L, 2.0, "b", Map("b" -> "b")), TimeBasedTest(300L, 3.0, "c", Map("c" -> "c")), TimeBasedTest(400L, 4.0, "d", Map("d" -> "d")), TimeBasedTest(500L, 5.0, "e", Map("e" -> "e")), TimeBasedTest(600L, 6.0, "f", Map("f" -> "f")) ) private val timeExt = Option((d: TimeBasedTest) => d.time) Spec[LastAggregator[_]] should "return the most recent event" in { val feature = FeatureBuilder.Real[TimeBasedTest].extract(_.real.toRealNN) .aggregate(LastReal).asPredictor val aggregator = feature.originStage.asInstanceOf[FeatureGeneratorStage[TimeBasedTest, _]].featureAggregator val extracted = aggregator.extract(data, timeExt, CutOffTime.NoCutoff()) extracted shouldBe Real(Some(6.0)) } it should "return the most recent event within the time window" in { val feature = FeatureBuilder.Text[TimeBasedTest].extract(_.string.toText) .aggregate(LastText).asResponse val aggregator = feature.originStage.asInstanceOf[FeatureGeneratorStage[TimeBasedTest, _]].featureAggregator val extracted = aggregator.extract(data, timeExt, CutOffTime.UnixEpoch(300L), responseWindow = Option(new Duration(201L))) extracted shouldBe Text(Some("e")) } it should "return the feature type empty value when no events are passed in" in { val feature = FeatureBuilder.TextMap[TimeBasedTest].extract(_.map.toTextMap) .aggregate(LastTextMap).asPredictor val aggregator = feature.originStage.asInstanceOf[FeatureGeneratorStage[TimeBasedTest, _]].featureAggregator val extracted = aggregator.extract(Seq(), timeExt, CutOffTime.NoCutoff()) extracted shouldBe TextMap.empty } Spec[FirstAggregator[_]] should "return the first event" in { val feature = FeatureBuilder.TextAreaMap[TimeBasedTest].extract(_.map.toTextAreaMap) .aggregate(FirstTextAreaMap).asResponse val aggregator = feature.originStage.asInstanceOf[FeatureGeneratorStage[TimeBasedTest, _]].featureAggregator val extracted = aggregator.extract(data, timeExt, CutOffTime.UnixEpoch(301L)) extracted shouldBe TextAreaMap(Map("d" -> "d")) } it should "return the first event within the time window" in { val feature = FeatureBuilder.Currency[TimeBasedTest].extract(_.real.toCurrency) .aggregate(FirstCurrency).asPredictor val aggregator = feature.originStage.asInstanceOf[FeatureGeneratorStage[TimeBasedTest, _]].featureAggregator val extracted = aggregator.extract(data, timeExt, CutOffTime.UnixEpoch(400L), predictorWindow = Option(new Duration(201L))) extracted shouldBe Currency(Some(2.0)) } it should "return the feature type empty value when no events are passed in" in { val feature = FeatureBuilder.State[TimeBasedTest].extract(_.string.toState) .aggregate(FirstState).asPredictor val aggregator = feature.originStage.asInstanceOf[FeatureGeneratorStage[TimeBasedTest, _]].featureAggregator val extracted = aggregator.extract(Seq(), timeExt, CutOffTime.NoCutoff()) extracted shouldBe State.empty } } case class TimeBasedTest(time: Long, real: Double, string: String, map: Map[String, String])
Example 3
Source File: PassengerFeaturesTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.test import com.salesforce.op.aggregators.MaxReal import com.salesforce.op.features.types._ import com.salesforce.op.features.{FeatureBuilder, OPFeature} import org.joda.time.Duration import PassengerFeaturesTest._ trait PassengerFeaturesTest { val age = FeatureBuilder.Real[Passenger].extract(new AgeExtract).aggregate(MaxReal).asPredictor val gender = FeatureBuilder.MultiPickList[Passenger].extract(new GenderAsMultiPickListExtract).asPredictor val genderPL = FeatureBuilder.PickList[Passenger].extract(new GenderAsPickListExtract).asPredictor val height = FeatureBuilder.RealNN[Passenger].extract(new HeightToRealNNExtract) .window(Duration.millis(300)).asPredictor val heightNoWindow = FeatureBuilder.Real[Passenger].extract(new HeightToRealExtract).asPredictor val weight = FeatureBuilder.Real[Passenger].extract(new WeightToRealExtract).asPredictor val description = FeatureBuilder.Text[Passenger].extract(new DescriptionExtract).asPredictor val boarded = FeatureBuilder.DateList[Passenger].extract(new BoardedToDateListExtract).asPredictor val stringMap = FeatureBuilder.TextMap[Passenger].extract(new StringMapExtract).asPredictor val numericMap = FeatureBuilder.RealMap[Passenger].extract(new NumericMapExtract).asPredictor val booleanMap = FeatureBuilder.BinaryMap[Passenger].extract(new BooleanMapExtract).asPredictor val survived = FeatureBuilder.Binary[Passenger].extract(new SurvivedExtract).asResponse val boardedTime = FeatureBuilder.Date[Passenger].extract(new BoardedToDateExtract).asPredictor val boardedTimeAsDateTime = FeatureBuilder.DateTime[Passenger].extract(new BoardedToDateTimeExtract).asPredictor val rawFeatures: Array[OPFeature] = Array( survived, age, gender, height, weight, description, boarded, stringMap, numericMap, booleanMap ) } object PassengerFeaturesTest { class GenderAsMultiPickListExtract extends Function1[Passenger, MultiPickList] with Serializable { def apply(p: Passenger): MultiPickList = Set(p.getGender).toMultiPickList } class GenderAsPickListExtract extends Function1[Passenger, PickList] with Serializable { def apply(p: Passenger): PickList = p.getGender.toPickList } class HeightToRealNNExtract extends Function1[Passenger, RealNN] with Serializable { def apply(p: Passenger): RealNN = Option(p.getHeight).map(_.toDouble).toRealNN(0.0) } class HeightToRealExtract extends Function1[Passenger, Real] with Serializable { def apply(p: Passenger): Real = p.getHeight.toReal } class WeightToRealExtract extends Function1[Passenger, Real] with Serializable { def apply(p: Passenger): Real = p.getWeight.toReal } class DescriptionExtract extends Function1[Passenger, Text] with Serializable { def apply(p: Passenger): Text = p.getDescription.toText } class BoardedToDateListExtract extends Function1[Passenger, DateList] with Serializable { def apply(p: Passenger): DateList = Seq(p.getBoarded.toLong).toDateList } class BoardedToDateExtract extends Function1[Passenger, Date] with Serializable { def apply(p: Passenger): Date = p.getBoarded.toLong.toDate } class BoardedToDateTimeExtract extends Function1[Passenger, DateTime] with Serializable { def apply(p: Passenger): DateTime = p.getBoarded.toLong.toDateTime } class SurvivedExtract extends Function1[Passenger, Binary] with Serializable { def apply(p: Passenger): Binary = Option(p.getSurvived).map(_ == 1).toBinary } class StringMapExtract extends Function1[Passenger, TextMap] with Serializable { def apply(p: Passenger): TextMap = p.getStringMap.toTextMap } class NumericMapExtract extends Function1[Passenger, RealMap] with Serializable { def apply(p: Passenger): RealMap = p.getNumericMap.toRealMap } class BooleanMapExtract extends Function1[Passenger, BinaryMap] with Serializable { def apply(p: Passenger): BinaryMap = p.getBooleanMap.toBinaryMap } class AgeExtract extends Function1[Passenger, Real] with Serializable { def apply(p: Passenger): Real = p.getAge.toReal } }
Example 4
Source File: JoinedReadersTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.readers import com.salesforce.op.aggregators.CutOffTime import com.salesforce.op.test._ import org.joda.time.{DateTimeConstants, Duration} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FlatSpec, Matchers} @RunWith(classOf[JUnitRunner]) class JoinedReadersTest extends FlatSpec with PassengerSparkFixtureTest { val sparkReader = DataReaders.Aggregate.csv[SparkExample]( path = Some("../test-data/SparkExample.csv"), schema = SparkExample.getClassSchema.toString, key = _.getLabel.toString, aggregateParams = AggregateParams(None, CutOffTime.NoCutoff()) ) val passengerReader = DataReaders.Conditional.avro[Passenger]( path = Some(passengerAvroPath), // Path should be optional so can also pass in as a parameter key = _.getPassengerId.toString, // Entity to score conditionalParams = ConditionalParams( timeStampFn = _.getRecordDate.toLong, // Record field which defines the date for the rest of the columns targetCondition = _.getBoarded >= 1471046600, // Function to figure out if target event has occurred responseWindow = None, // How many days after target event to include in response aggregation predictorWindow = None, // How many days before target event to include in predictor aggregation timeStampToKeep = TimeStampToKeep.Min ) ) Spec[JoinedReader[_, _]] should "take any kind of reader as the leftmost input" in { profileReader.innerJoin(sparkReader) shouldBe a[JoinedDataReader[_, _]] dataReader.outerJoin(sparkReader) shouldBe a[JoinedDataReader[_, _]] passengerReader.leftOuterJoin(sparkReader) shouldBe a[JoinedDataReader[_, _]] } it should "allow simple readers for right inputs" in { sparkReader.innerJoin(profileReader).joinType shouldBe JoinTypes.Inner sparkReader.outerJoin(profileReader).joinType shouldBe JoinTypes.Outer sparkReader.leftOuterJoin(profileReader).joinType shouldBe JoinTypes.LeftOuter } it should "have all subreaders correctly ordered" in { val joinedReader = profileReader.innerJoin(sparkReader).outerJoin(dataReader) joinedReader.subReaders should contain theSameElementsAs Seq(profileReader, sparkReader, dataReader) } it should "correctly set leftKey in left outer and inner joins" in { dataReader.leftOuterJoin(sparkReader, joinKeys = JoinKeys(leftKey = "id")).joinKeys.leftKey shouldBe "id" dataReader.innerJoin(sparkReader, joinKeys = JoinKeys(leftKey = "id")).joinKeys.leftKey shouldBe "id" } it should "throw an error if you try to perform a self join" in { a[IllegalArgumentException] should be thrownBy { dataReader.innerJoin(dataReader) } } it should "throw an error if you try to use the same reader twice" in { a[IllegalArgumentException] should be thrownBy { dataReader.innerJoin(sparkReader).innerJoin(dataReader) } } it should "throw an error if you try to read the same data type twice with different readers" in { a[IllegalArgumentException] should be thrownBy { passengerReader.innerJoin(sparkReader).outerJoin(dataReader) } } it should "throw an error if you try to use an invalid key combination" in { a[RuntimeException] should be thrownBy { dataReader.innerJoin(sparkReader, joinKeys = JoinKeys(resultKey = DataFrameFieldNames.KeyFieldName)) .generateDataFrame(Array.empty) } } it should "produce a JoinedAggregateDataReader when withSecondaryAggregation is called" in { val joinedReader = profileReader.innerJoin(sparkReader) val timeFilter = TimeBasedFilter( condition = new TimeColumn(boardedTime), primary = new TimeColumn(boardedTime), timeWindow = Duration.standardDays(DateTimeConstants.DAYS_PER_WEEK) ) joinedReader.withSecondaryAggregation(timeFilter) shouldBe a[JoinedAggregateDataReader[_, _]] } }
Example 5
Source File: FeatureAsserts.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.test import com.salesforce.op.aggregators.{Event, MonoidAggregatorDefaults} import com.salesforce.op.features.FeatureLike import com.salesforce.op.features.types.FeatureType import com.salesforce.op.stages.FeatureGeneratorStage import com.twitter.algebird.MonoidAggregator import org.joda.time.Duration import org.scalatest.Matchers import scala.reflect.runtime.universe.WeakTypeTag def assertFeature[I, O <: FeatureType](f: FeatureLike[O])( in: I, out: O, name: String, isResponse: Boolean = false, aggregator: WeakTypeTag[O] => MonoidAggregator[Event[O], _, O] = (wtt: WeakTypeTag[O]) => MonoidAggregatorDefaults.aggregatorOf[O](wtt), aggregateWindow: Option[Duration] = None )(implicit tti: WeakTypeTag[I], wtt: WeakTypeTag[O]): Unit = { f.name shouldBe name f.isResponse shouldBe isResponse f.parents shouldBe Nil f.uid.startsWith(wtt.tpe.dealias.toString.split("\\.").last) shouldBe true f.wtt.tpe =:= wtt.tpe shouldBe true f.isRaw shouldBe true f.typeName shouldBe wtt.tpe.typeSymbol.fullName f.originStage shouldBe a[FeatureGeneratorStage[_, _ <: FeatureType]] val fg = f.originStage.asInstanceOf[FeatureGeneratorStage[I, O]] fg.tti shouldBe tti fg.aggregator shouldBe aggregator(wtt) fg.extractFn(in) shouldBe out fg.extractSource.nonEmpty shouldBe true // TODO we should eval the code here: eval(fg.extractSource)(in) fg.getOutputFeatureName shouldBe name fg.outputIsResponse shouldBe isResponse fg.aggregateWindow shouldBe aggregateWindow fg.uid.startsWith(classOf[FeatureGeneratorStage[I, O]].getSimpleName) shouldBe true } }
Example 6
Source File: JsonFormatsTest.scala From courscala with Apache License 2.0 | 5 votes |
package org.coursera.common.jsonformat import org.coursera.common.collection.Enum import org.coursera.common.collection.EnumSymbol import org.coursera.common.stringkey.StringKey import org.coursera.common.stringkey.StringKeyFormat import org.joda.time.DateTime import org.joda.time.DateTimeZone import org.joda.time.Duration import org.joda.time.Instant import org.junit.Test import org.scalatest.junit.AssertionsForJUnit import play.api.libs.json.Format import play.api.libs.json.JsNumber import play.api.libs.json.JsString import play.api.libs.json.JsSuccess import play.api.libs.json.Json class JsonFormatsTest extends AssertionsForJUnit { import JsonFormatsTest._ @Test def stringKey(): Unit = { val id = TestId(2, "test") val idString = StringKey.stringify(id) assert(JsString(idString) === Json.toJson(id)) assert(JsSuccess(id) === Json.fromJson[TestId](JsString(idString))) assert(JsString(s"invalid stuff $idString").validate[TestId].isError) } @Test def enums(): Unit = { assertResult(Color.Amber)(JsString("Amber").as[Color]) assertResult(JsString("Green"))(Json.toJson(Color.Green)) } @Test def instant(): Unit = { import JsonFormats.Implicits.instantFormat val testInstant = new Instant(137) assertResult(JsNumber(137))(Json.toJson(testInstant)) assertResult(Some(testInstant))(Json.parse("137").asOpt[Instant]) } @Test def duration(): Unit = { import JsonFormats.Implicits.durationFormat val testDuration = Duration.millis(137L) assertResult(JsNumber(137))(Json.toJson(testDuration)) assertResult(Some(testDuration))(Json.parse("137").asOpt[Duration]) } @Test def dateTime(): Unit = { import JsonFormats.Implicits.dateTimeFormat val testDatetime = new DateTime(2010, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC) assertResult(JsNumber(1262304000000L))(Json.toJson(testDatetime)) assertResult(Some(testDatetime))(Json.parse("1262304000000").asOpt[DateTime].map(_.withZone(DateTimeZone.UTC))) } } object JsonFormatsTest { case class TestId(part1: Int, part2: String) object TestId { implicit val stringKeyFormat: StringKeyFormat[TestId] = StringKeyFormat.caseClassFormat((apply _).tupled, unapply) implicit val format: Format[TestId] = JsonFormats.stringKeyFormat[TestId] } sealed trait Color extends EnumSymbol object Color extends Enum[Color] { case object Red extends Color case object Amber extends Color case object Green extends Color implicit val format: Format[Color] = JsonFormats.enumFormat(Color) } }
Example 7
Source File: RefreshingSideInputExample.scala From scio with Apache License 2.0 | 5 votes |
// Example: Demonstrates a streaming job with periodically refreshing side input // Usage: // `sbt "scio-examples/runMain com.spotify.scio.examples.extra.RefreshingSideInputExample // --project=[PROJECT] --runner=[RUNNER] --zone=[ZONE] --input=[PUBSUB_SUBSCRIPTION]"` package com.spotify.scio.examples.extra import com.spotify.scio._ import com.spotify.scio.values.WindowOptions import org.apache.beam.sdk.io.GenerateSequence import org.apache.beam.sdk.options.StreamingOptions import org.apache.beam.sdk.transforms.windowing.Window.ClosingBehavior import org.apache.beam.sdk.transforms.windowing.{AfterPane, Repeatedly} import org.apache.beam.sdk.values.WindowingStrategy.AccumulationMode import org.joda.time.{Duration, Instant} import org.slf4j.LoggerFactory import scala.util.{Random, Success, Try} object RefreshingSideInputExample { case class LotteryTicket(numbers: Seq[Int]) case class LotteryResult( eventTime: Instant, processTime: Instant, isWinner: Boolean, ticket: Seq[Int], winningNumbers: Seq[Int] ) private lazy val logger = LoggerFactory.getLogger(this.getClass) private val ticketSize = 5 def main(cmdlineArgs: Array[String]): Unit = { val (sc, args) = ContextAndArgs(cmdlineArgs) sc.optionsAs[StreamingOptions].setStreaming(true) // An unbounded input that produces a sequence of 5 randomly generated winning lottery numbers, // refreshed every 10 seconds. Materialized as a singleton `SideInput`. val winningLotteryNumbers = sc .customInput( "winningLotteryNumbers", GenerateSequence .from(0) .withRate(1, Duration.standardSeconds(10)) ) .withFixedWindows( duration = Duration.standardSeconds(10), offset = Duration.ZERO, options = WindowOptions( trigger = Repeatedly.forever(AfterPane.elementCountAtLeast(1)), accumulationMode = AccumulationMode.DISCARDING_FIRED_PANES, closingBehavior = ClosingBehavior.FIRE_IF_NON_EMPTY, allowedLateness = Duration.standardSeconds(0) ) ) .map(_ => Seq.fill(ticketSize)(Random.nextInt(100))) // A default is needed in case an empty pane is fired .asSingletonSideInput(Seq.fill(ticketSize)(-1)) // Sample PubSub topic modeling lottery tickets as a comma-separated list of numbers. // For example, a message might contain the string "10,7,3,1,9" sc.pubsubTopic[String](args("input")) .flatMap(toLotteryTicket) .withFixedWindows(Duration.standardSeconds(5)) .withTimestamp .withSideInputs(winningLotteryNumbers) .map { case ((lotteryTicket, eventTime), side) => val currentWinningNumbers = side(winningLotteryNumbers) val isWinner = lotteryTicket.numbers == currentWinningNumbers val result = LotteryResult( eventTime, Instant.now(), isWinner, lotteryTicket.numbers, currentWinningNumbers ) logger.info(s"Lottery result: $result") } // Can save output to PubSub, BigQuery, etc. sc.run() () } private def toLotteryTicket(message: String): Option[LotteryTicket] = Try(LotteryTicket(message.split(",").map(_.toInt))) match { case Success(s) if s.numbers.size == ticketSize => Some(s) case _ => logger.error(s"Malformed message: $message") None } }
Example 8
Source File: LeaderBoardTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.examples.complete.game import com.spotify.scio.examples.complete.game.UserScore.GameActionInfo import com.spotify.scio.testing._ import org.apache.beam.sdk.transforms.windowing.IntervalWindow import org.apache.beam.sdk.values.TimestampedValue import org.joda.time.{Duration, Instant} class LeaderBoardTest extends PipelineSpec { private val allowedLateness = Duration.standardHours(1) private val teamWindowDuration = Duration.standardMinutes(20) private val baseTime = new Instant(0) case class TestUser(user: String, team: String) private val redOne = TestUser("scarlet", "red") private val redTwo = TestUser("burgundy", "red") private val blueOne = TestUser("navy", "blue") private val blueTwo = TestUser("sky", "blue") private def event( user: TestUser, score: Int, baseTimeOffset: Duration ): TimestampedValue[GameActionInfo] = { val t = baseTime.plus(baseTimeOffset) TimestampedValue.of(GameActionInfo(user.user, user.team, score, t.getMillis), t) } "LeaderBoard.calculateTeamScores" should "work with on time elements" in { // #LeaderBoardTest_example_1 val stream = testStreamOf[GameActionInfo] // Start at the epoch .advanceWatermarkTo(baseTime) // add some elements ahead of the watermark .addElements( event(blueOne, 3, Duration.standardSeconds(3)), event(blueOne, 2, Duration.standardMinutes(1)), event(redTwo, 3, Duration.standardSeconds(22)), event(blueTwo, 5, Duration.standardSeconds(3)) ) // #LeaderBoardTest_example_1 // #LeaderBoardTest_example_2 // The watermark advances slightly, but not past the end of the window .advanceWatermarkTo(baseTime.plus(Duration.standardMinutes(3))) .addElements( event(redOne, 1, Duration.standardMinutes(4)), event(blueOne, 2, Duration.standardSeconds(270)) ) // The window should close and emit an ON_TIME pane .advanceWatermarkToInfinity // #LeaderBoardTest_example_2 // #LeaderBoardTest_example_3 runWithContext { sc => val teamScores = LeaderBoard.calculateTeamScores(sc.testStream(stream), teamWindowDuration, allowedLateness) val window = new IntervalWindow(baseTime, teamWindowDuration) teamScores should inOnTimePane(window) { containInAnyOrder(Seq((blueOne.team, 12), (redOne.team, 4))) } } // #LeaderBoardTest_example_3 } }
Example 9
Source File: AutoCompleteTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.examples.complete import com.spotify.scio.testing._ import org.joda.time.{Duration, Instant} class AutoCompleteTest extends PipelineSpec { "AutoComplete" should "work" in { val data = Seq( "apple", "apple", "apricot", "banana", "blackberry", "blackberry", "blackberry", "blueberry", "blueberry", "cherry" ) val expected = Seq( ("a", Map("apple" -> 2L, "apricot" -> 1L)), ("ap", Map("apple" -> 2L, "apricot" -> 1L)), ("b", Map("blackberry" -> 3L, "blueberry" -> 2L)), ("ba", Map("banana" -> 1L)), ("bl", Map("blackberry" -> 3L, "blueberry" -> 2L)), ("c", Map("cherry" -> 1L)), ("ch", Map("cherry" -> 1L)) ) runWithContext { sc => val in = sc.parallelize(data) for (recursive <- Seq(true, false)) { val r = AutoComplete .computeTopCompletions(in, 2, recursive) .filter(_._1.length <= 2) .mapValues(_.toMap) r should containInAnyOrder(expected) } } } it should "work with tiny input" in { val data = Seq("x", "x", "x", "xy", "xy", "xyz") val expected = Seq( ("x", Map("x" -> 3L, "xy" -> 2L)), ("xy", Map("xy" -> 2L, "xyz" -> 1L)), ("xyz", Map("xyz" -> 1L)) ) runWithContext { sc => val in = sc.parallelize(data) for (recursive <- Seq(true, false)) { val r = AutoComplete .computeTopCompletions(in, 2, recursive) .mapValues(_.toMap) r should containInAnyOrder(expected) } } } it should "work with windowed input" in { val data = Seq( ("xA", new Instant(1)), ("xA", new Instant(1)), ("xB", new Instant(1)), ("xB", new Instant(2)), ("xB", new Instant(2)) ) val expected = Seq( // window [0, 2) ("x", Map("xA" -> 2L, "xB" -> 1L)), ("xA", Map("xA" -> 2L)), ("xB", Map("xB" -> 1L)), // window [1, 3) ("x", Map("xA" -> 2L, "xB" -> 3L)), ("xA", Map("xA" -> 2L)), ("xB", Map("xB" -> 3L)), // window [2, 3) ("x", Map("xB" -> 2L)), ("xB", Map("xB" -> 2L)) ) runWithContext { sc => val in = sc.parallelizeTimestamped(data).withSlidingWindows(new Duration(2)) for (recursive <- Seq(true, false)) { val r = AutoComplete .computeTopCompletions(in, 2, recursive) .mapValues(_.toMap) r should containInAnyOrder(expected) } } } }
Example 10
Source File: WindowedWordCountTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.examples.extra import com.spotify.scio.io.TextIO import com.spotify.scio.testing._ import org.joda.time.{Duration, Instant} class WindowedWordCountTest extends PipelineSpec { private val baseTime = new Instant(0) "WindowedWordCount" should "work" in { JobTest[com.spotify.scio.examples.WindowedWordCount.type] .args( "--input=input.txt", s"--windowSize=PT0.1S", // 100 ms, in ISO-8601 standard used by Joda for Duration parsing "--outputGlobalWindow=true", "--output=output.txt" ) .inputStream( TextIO("input.txt"), testStreamOf[String] .advanceWatermarkTo(baseTime) .addElements("a b b c") .advanceWatermarkTo(baseTime.plus(Duration.millis(150))) .addElements("b e") .advanceWatermarkToInfinity() ) .output(TextIO("output.txt"))( _ should containInAnyOrder(Seq("(a,1)", "(b,2)", "(b,1)", "(c,1)", "(e,1)")) ) .run() } }
Example 11
Source File: SlidingWindowGeneratorTest.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.test.generators.composite import be.cetic.tsimulus.config.GeneratorFormat import org.scalatest.{FlatSpec, Matchers} import spray.json._ import be.cetic.tsimulus.generators.composite.SlidingWindowGenerator import org.joda.time.{Duration, LocalDateTime} class SlidingWindowGeneratorTest extends FlatSpec with Matchers { val source = """ |{ | "name": "window-generator", | "type": "window", | "aggregator": "sum", | "n": 5, | "window-length" : 5000, | "generator": "daily-generator" |} """.stripMargin "A Sliding Window generator" should "be correctly read from a json document" in { val generator = SlidingWindowGenerator(source.parseJson) generator.name shouldBe Some("window-generator") generator.aggregator shouldBe "sum" generator.duration shouldBe new Duration(5000) generator.generator shouldBe Left("daily-generator") } it should "be extracted from the global extractor without any error" in { noException should be thrownBy GeneratorFormat.read(source.parseJson) } it should "be correctly extracted from the global extractor" in { GeneratorFormat.read(source.parseJson) shouldBe SlidingWindowGenerator(source.parseJson) } it should "be correctly exported to a json document" in { val generator = new SlidingWindowGenerator( Some("window-generator"), "sum", Left("daily-generator"), 5, new Duration(5000) ) generator shouldBe SlidingWindowGenerator(generator.toJson) } it should "have a correct textual representation" in { val generator = new SlidingWindowGenerator( Some("window-generator"), "sum", Left("daily-generator"), 5, new Duration(5000) ) generator.toString shouldBe """SlidingWindow(Some(window-generator), sum, Left(daily-generator), PT5S)""" } }
Example 12
Source File: ElasticsearchIO.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.elasticsearch import java.lang.{Iterable => JIterable} import com.spotify.scio.values.SCollection import com.spotify.scio.ScioContext import com.spotify.scio.io.{EmptyTap, EmptyTapOf, ScioIO, Tap} import org.elasticsearch.action.DocWriteRequest import org.apache.beam.sdk.io.{elasticsearch => beam} import org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.Write.BulkExecutionException import org.apache.beam.sdk.transforms.SerializableFunction import org.joda.time.Duration import scala.jdk.CollectionConverters._ final case class ElasticsearchIO[T](esOptions: ElasticsearchOptions) extends ScioIO[T] { override type ReadP = Nothing override type WriteP = ElasticsearchIO.WriteParam[T] override val tapT = EmptyTapOf[T] override protected def read(sc: ScioContext, params: ReadP): SCollection[T] = throw new UnsupportedOperationException("Can't read from Elasticsearch") override protected def write(data: SCollection[T], params: WriteP): Tap[Nothing] = { val shards = if (params.numOfShards > 0) { params.numOfShards } else { esOptions.nodes.size } data.applyInternal( beam.ElasticsearchIO.Write .withNodes(esOptions.nodes.toArray) .withFunction(new SerializableFunction[T, JIterable[DocWriteRequest[_]]]() { override def apply(t: T): JIterable[DocWriteRequest[_]] = params.f(t).asJava }) .withFlushInterval(params.flushInterval) .withNumOfShard(shards) .withMaxBulkRequestSize(params.maxBulkRequestSize) .withMaxRetries(params.retry.maxRetries) .withRetryPause(params.retry.retryPause) .withError(new beam.ThrowingConsumer[BulkExecutionException] { override def accept(t: BulkExecutionException): Unit = params.errorFn(t) }) ) EmptyTap } override def tap(params: ReadP): Tap[Nothing] = EmptyTap } object ElasticsearchIO { object WriteParam { private[elasticsearch] val DefaultErrorFn: BulkExecutionException => Unit = m => throw m private[elasticsearch] val DefaultFlushInterval = Duration.standardSeconds(1) private[elasticsearch] val DefaultNumShards = 0 private[elasticsearch] val DefaultMaxBulkRequestSize = 3000 private[elasticsearch] val DefaultMaxRetries = 3 private[elasticsearch] val DefaultRetryPause = Duration.millis(35000) private[elasticsearch] val DefaultRetryConfig = RetryConfig( maxRetries = WriteParam.DefaultMaxRetries, retryPause = WriteParam.DefaultRetryPause ) } final case class WriteParam[T] private ( f: T => Iterable[DocWriteRequest[_]], errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, flushInterval: Duration = WriteParam.DefaultFlushInterval, numOfShards: Long = WriteParam.DefaultNumShards, maxBulkRequestSize: Int = WriteParam.DefaultMaxBulkRequestSize, retry: RetryConfig = WriteParam.DefaultRetryConfig ) final case class RetryConfig(maxRetries: Int, retryPause: Duration) }
Example 13
Source File: package.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio import com.spotify.scio.coders.Coder import com.spotify.scio.elasticsearch.ElasticsearchIO.{RetryConfig, WriteParam} import com.spotify.scio.io.ClosedTap import com.spotify.scio.values.SCollection import org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.Write.BulkExecutionException import org.apache.http.HttpHost import org.elasticsearch.action.DocWriteRequest import org.joda.time.Duration def saveAsElasticsearch( esOptions: ElasticsearchOptions, flushInterval: Duration = WriteParam.DefaultFlushInterval, numOfShards: Long = WriteParam.DefaultNumShards, maxBulkRequestSize: Int = WriteParam.DefaultMaxBulkRequestSize, errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, retry: RetryConfig = WriteParam.DefaultRetryConfig )(f: T => Iterable[DocWriteRequest[_]])(implicit coder: Coder[T]): ClosedTap[Nothing] = { val param = WriteParam(f, errorFn, flushInterval, numOfShards, maxBulkRequestSize, retry) self.write(ElasticsearchIO[T](esOptions))(param) } } }
Example 14
Source File: ElasticsearchIO.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.elasticsearch import java.lang.{Iterable => JIterable} import com.spotify.scio.values.SCollection import com.spotify.scio.ScioContext import com.spotify.scio.io.{EmptyTap, EmptyTapOf, ScioIO, Tap} import org.elasticsearch.action.DocWriteRequest import org.apache.beam.sdk.io.{elasticsearch => beam} import org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.Write.BulkExecutionException import org.apache.beam.sdk.transforms.SerializableFunction import org.joda.time.Duration import scala.jdk.CollectionConverters._ final case class ElasticsearchIO[T](esOptions: ElasticsearchOptions) extends ScioIO[T] { override type ReadP = Nothing override type WriteP = ElasticsearchIO.WriteParam[T] override val tapT = EmptyTapOf[T] override protected def read(sc: ScioContext, params: ReadP): SCollection[T] = throw new UnsupportedOperationException("Can't read from Elacticsearch") override protected def write(data: SCollection[T], params: WriteP): Tap[Nothing] = { val shards = if (params.numOfShards > 0) { params.numOfShards } else { esOptions.servers.size } data.applyInternal( beam.ElasticsearchIO.Write .withClusterName(esOptions.clusterName) .withServers(esOptions.servers.toArray) .withFunction(new SerializableFunction[T, JIterable[DocWriteRequest[_]]]() { override def apply(t: T): JIterable[DocWriteRequest[_]] = params.f(t).asJava }) .withFlushInterval(params.flushInterval) .withNumOfShard(shards) .withMaxBulkRequestSize(params.maxBulkRequestSize) .withMaxRetries(params.retry.maxRetries) .withRetryPause(params.retry.retryPause) .withError(new beam.ThrowingConsumer[BulkExecutionException] { override def accept(t: BulkExecutionException): Unit = params.errorFn(t) }) ) EmptyTap } override def tap(params: ReadP): Tap[Nothing] = EmptyTap } object ElasticsearchIO { object WriteParam { private[elasticsearch] val DefaultErrorFn: BulkExecutionException => Unit = m => throw m private[elasticsearch] val DefaultFlushInterval = Duration.standardSeconds(1) private[elasticsearch] val DefaultNumShards = 0 private[elasticsearch] val DefaultMaxBulkRequestSize = 3000 private[elasticsearch] val DefaultMaxRetries = 3 private[elasticsearch] val DefaultRetryPause = Duration.millis(35000) private[elasticsearch] val DefaultRetryConfig = RetryConfig( maxRetries = WriteParam.DefaultMaxRetries, retryPause = WriteParam.DefaultRetryPause ) } final case class WriteParam[T] private ( f: T => Iterable[DocWriteRequest[_]], errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, flushInterval: Duration = WriteParam.DefaultFlushInterval, numOfShards: Long = WriteParam.DefaultNumShards, maxBulkRequestSize: Int = WriteParam.DefaultMaxBulkRequestSize, retry: RetryConfig = WriteParam.DefaultRetryConfig ) final case class RetryConfig(maxRetries: Int, retryPause: Duration) }
Example 15
Source File: package.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio import java.net.InetSocketAddress import com.spotify.scio.io.ClosedTap import com.spotify.scio.values.SCollection import com.spotify.scio.coders.Coder import com.spotify.scio.elasticsearch.ElasticsearchIO.{RetryConfig, WriteParam} import org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.Write.BulkExecutionException import org.elasticsearch.action.DocWriteRequest import org.joda.time.Duration def saveAsElasticsearch( esOptions: ElasticsearchOptions, flushInterval: Duration = WriteParam.DefaultFlushInterval, numOfShards: Long = WriteParam.DefaultNumShards, maxBulkRequestSize: Int = WriteParam.DefaultMaxBulkRequestSize, errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, retry: RetryConfig = WriteParam.DefaultRetryConfig )(f: T => Iterable[DocWriteRequest[_]])(implicit coder: Coder[T]): ClosedTap[Nothing] = { val param = WriteParam(f, errorFn, flushInterval, numOfShards, maxBulkRequestSize, retry) self.write(ElasticsearchIO[T](esOptions))(param) } } }
Example 16
Source File: ElasticsearchIO.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.elasticsearch import java.lang.{Iterable => JIterable} import com.spotify.scio.values.SCollection import com.spotify.scio.ScioContext import com.spotify.scio.io.{EmptyTap, EmptyTapOf, ScioIO, Tap} import org.elasticsearch.action.DocWriteRequest import org.apache.beam.sdk.io.{elasticsearch => beam} import org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.Write.BulkExecutionException import org.apache.beam.sdk.transforms.SerializableFunction import org.joda.time.Duration import scala.jdk.CollectionConverters._ final case class ElasticsearchIO[T](esOptions: ElasticsearchOptions) extends ScioIO[T] { override type ReadP = Nothing override type WriteP = ElasticsearchIO.WriteParam[T] override val tapT = EmptyTapOf[T] override protected def read(sc: ScioContext, params: ReadP): SCollection[T] = throw new UnsupportedOperationException("Can't read from Elacticsearch") override protected def write(data: SCollection[T], params: WriteP): Tap[Nothing] = { val shards = if (params.numOfShards > 0) { params.numOfShards } else { esOptions.servers.size } data.applyInternal( beam.ElasticsearchIO.Write .withClusterName(esOptions.clusterName) .withServers(esOptions.servers.toArray) .withFunction(new SerializableFunction[T, JIterable[DocWriteRequest[_]]]() { override def apply(t: T): JIterable[DocWriteRequest[_]] = params.f(t).asJava }) .withFlushInterval(params.flushInterval) .withNumOfShard(shards) .withMaxBulkRequestSize(params.maxBulkRequestSize) .withMaxRetries(params.retry.maxRetries) .withRetryPause(params.retry.retryPause) .withError(new beam.ThrowingConsumer[BulkExecutionException] { override def accept(t: BulkExecutionException): Unit = params.errorFn(t) }) ) EmptyTap } override def tap(params: ReadP): Tap[Nothing] = EmptyTap } object ElasticsearchIO { object WriteParam { private[elasticsearch] val DefaultErrorFn: BulkExecutionException => Unit = m => throw m private[elasticsearch] val DefaultFlushInterval = Duration.standardSeconds(1) private[elasticsearch] val DefaultNumShards = 0 private[elasticsearch] val DefaultMaxBulkRequestSize = 3000 private[elasticsearch] val DefaultMaxRetries = 3 private[elasticsearch] val DefaultRetryPause = Duration.millis(35000) private[elasticsearch] val DefaultRetryConfig = RetryConfig( maxRetries = WriteParam.DefaultMaxRetries, retryPause = WriteParam.DefaultRetryPause ) } final case class WriteParam[T] private ( f: T => Iterable[DocWriteRequest[_]], errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, flushInterval: Duration = WriteParam.DefaultFlushInterval, numOfShards: Long = WriteParam.DefaultNumShards, maxBulkRequestSize: Int = WriteParam.DefaultMaxBulkRequestSize, retry: RetryConfig = WriteParam.DefaultRetryConfig ) final case class RetryConfig(maxRetries: Int, retryPause: Duration) }
Example 17
Source File: package.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio import java.net.InetSocketAddress import com.spotify.scio.io.ClosedTap import com.spotify.scio.values.SCollection import com.spotify.scio.coders.Coder import com.spotify.scio.elasticsearch.ElasticsearchIO.{RetryConfig, WriteParam} import org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.Write.BulkExecutionException import org.elasticsearch.action.DocWriteRequest import org.joda.time.Duration def saveAsElasticsearch( esOptions: ElasticsearchOptions, flushInterval: Duration = WriteParam.DefaultFlushInterval, numOfShards: Long = WriteParam.DefaultNumShards, maxBulkRequestSize: Int = WriteParam.DefaultMaxBulkRequestSize, errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, retry: RetryConfig = WriteParam.DefaultRetryConfig )(f: T => Iterable[DocWriteRequest[_]])(implicit coder: Coder[T]): ClosedTap[Nothing] = { val param = WriteParam(f, errorFn, flushInterval, numOfShards, maxBulkRequestSize, retry) self.write(ElasticsearchIO[T](esOptions))(param) } } }
Example 18
Source File: SCollectionSyntax.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.bigtable.syntax import com.google.bigtable.v2._ import com.google.cloud.bigtable.config.BigtableOptions import com.google.protobuf.ByteString import com.spotify.scio.coders.Coder import com.spotify.scio.io.ClosedTap import com.spotify.scio.values.SCollection import org.joda.time.Duration import com.spotify.scio.bigtable.BigtableWrite def saveAsBigtable( bigtableOptions: BigtableOptions, tableId: String, numOfShards: Int, flushInterval: Duration = BigtableWrite.Bulk.DefaultFlushInterval )(implicit coder: Coder[T]): ClosedTap[Nothing] = self.write(BigtableWrite[T](bigtableOptions, tableId))( BigtableWrite.Bulk(numOfShards, flushInterval) ) } trait SCollectionSyntax { implicit def bigtableMutationOps[T <: Mutation]( sc: SCollection[(ByteString, Iterable[T])] ): SCollectionMutationOps[T] = new SCollectionMutationOps[T](sc) }
Example 19
Source File: PerformanceReporting.scala From Scala-High-Performance-Programming with MIT License | 5 votes |
package highperfscala.clientreports.views import org.joda.time.{Duration, Instant, Interval} object PerformanceReporting { def trend( now: () => Instant, findOrders: (Interval, Ticker) => List[Order], findExecutions: (Interval, Ticker) => List[Execution], request: GenerateTradingPerformanceTrend): List[TradingPerformanceTrend] = { def periodPnL( duration: Duration): Map[Ticker, PeriodPnL] = { val currentTime = now() val interval = new Interval(currentTime.minus(duration), currentTime) (for { ticker <- request.tickers orders = findOrders(interval, ticker) executions = findExecutions(interval, ticker) idToExecPrice = executions.groupBy(_.id).mapValues(es => Price.average(es.map(_.price))) signedExecutionPrices = for { o <- orders if o.clientId == request.clientId price <- idToExecPrice.get(o.id).map(p => o match { case _: BuyOrder => Price(p.value * -1) case _: SellOrder => p }).toList } yield price trend = signedExecutionPrices.foldLeft(PnL.zero) { case (pnl, p) => PnL(pnl.value + p.value) } match { case p if p.value >= PnL.zero.value => PeriodPositive case _ => PeriodNegative } } yield ticker -> trend).toMap } val tickerToLastHour = periodPnL(Duration.standardHours(1)).mapValues { case PeriodPositive => LastHourPositive case PeriodNegative => LastHourNegative } val tickerToLastDay = periodPnL(Duration.standardDays(1)).mapValues { case PeriodPositive => LastDayPositive case PeriodNegative => LastDayNegative } val tickerToLastSevenDays = periodPnL(Duration.standardDays(7)).mapValues { case PeriodPositive => LastSevenDayPositive case PeriodNegative => LastSevenDayNegative } tickerToLastHour.zip(tickerToLastDay).zip(tickerToLastSevenDays).map({ case (((t, lastHour), (_, lastDay)), (_, lastSevenDays)) => TradingPerformanceTrend(t, lastHour, lastDay, lastSevenDays) }).toList } }
Example 20
Source File: ViewPerformanceReporting.scala From Scala-High-Performance-Programming with MIT License | 5 votes |
package highperfscala.clientreports.views import org.joda.time.{Duration, Instant, Interval} object ViewPerformanceReporting { def trend( now: () => Instant, findOrders: (Interval, Ticker) => List[Order], findExecutions: (Interval, Ticker) => List[Execution], request: GenerateTradingPerformanceTrend): List[TradingPerformanceTrend] = { def periodPnL( duration: Duration): Map[Ticker, PeriodPnL] = { val currentTime = now() val interval = new Interval(currentTime.minus(duration), currentTime) (for { ticker <- request.tickers orders = findOrders(interval, ticker) executions = findExecutions(interval, ticker) idToExecPrice = executions.groupBy(_.id).mapValues(es => Price.average(es.map(_.price))) signedExecutionPrices = for { o <- orders.view if o.clientId == request.clientId price <- idToExecPrice.get(o.id).map(p => o match { case _: BuyOrder => Price(p.value * -1) case _: SellOrder => p }).toList } yield price trend = signedExecutionPrices.foldLeft(PnL.zero) { case (pnl, p) => PnL(pnl.value + p.value) } match { case p if p.value >= PnL.zero.value => PeriodPositive case _ => PeriodNegative } } yield ticker -> trend).toMap } val tickerToLastHour = periodPnL(Duration.standardHours(1)).mapValues { case PeriodPositive => LastHourPositive case PeriodNegative => LastHourNegative } val tickerToLastDay = periodPnL(Duration.standardDays(1)).mapValues { case PeriodPositive => LastDayPositive case PeriodNegative => LastDayNegative } val tickerToLastSevenDays = periodPnL(Duration.standardDays(7)).mapValues { case PeriodPositive => LastSevenDayPositive case PeriodNegative => LastSevenDayNegative } tickerToLastHour.zip(tickerToLastDay).zip(tickerToLastSevenDays).map({ case (((t, lastHour), (_, lastDay)), (_, lastSevenDays)) => TradingPerformanceTrend(t, lastHour, lastDay, lastSevenDays) }).toList } }
Example 21
Source File: ARMAGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.primary import be.cetic.tsimulus.config.ARMAModel import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.primary.{ARMA, RandomWalkTimeSeries} import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json._ import scala.util.Random class ARMAGenerator(name: Option[String], val model: ARMAModel, val origin: LocalDateTime, val timestep: Duration) extends Generator[Double](name, "arma") { implicit val armaModelFormat = jsonFormat5(ARMAModel) override def timeseries(generators: String => Generator[Any]) = RandomWalkTimeSeries( ARMA( model.phi.getOrElse(Seq()).toArray, model.theta.getOrElse(Seq()).toArray, model.std, model.c, model.seed.getOrElse(Random.nextLong()) ), origin, timestep ) override def toString = "ARMAGenerator(" + model + ", " + origin + ", " + timestep + ")" override def equals(o: Any) = o match { case that: ARMAGenerator => that.name == this.name && that.model == this.model && that.timestep == this.timestep && that.origin == this.origin case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "origin" -> origin.toJson, "model" -> model.toJson, "timestep" -> timestep.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object ARMAGenerator extends DefaultJsonProtocol with TimeToJson { implicit val armaModelFormat = jsonFormat5(ARMAModel) def apply(json: JsValue): ARMAGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val model = fields("model").convertTo[ARMAModel] val origin = fields("origin").convertTo[LocalDateTime] val timestep = fields("timestep").convertTo[Duration] new ARMAGenerator(name, model, origin, timestep) } }
Example 22
Source File: WindowedWordCount.scala From beam-scala-examples with Apache License 2.0 | 5 votes |
package org.apache.beam.examples import java.util.concurrent.ThreadLocalRandom import org.apache.beam.sdk.Pipeline import org.apache.beam.sdk.io.TextIO import org.apache.beam.sdk.options._ import org.apache.beam.sdk.transforms.DoFn.ProcessElement import org.apache.beam.sdk.transforms.windowing.{FixedWindows, Window} import org.apache.beam.sdk.transforms.{Count, DoFn, MapElements, ParDo} import org.joda.time.{Duration, Instant} object WindowedWordCount { def main(args: Array[String]): Unit = { val options = PipelineOptionsFactory .fromArgs(args: _*) .withValidation() .as(classOf[WindowedWordCountOptions]) val minTimestamp = new Instant(options.getMinTimestampMillis) val maxTimestamp = new Instant(options.getMaxTimestampMillis) val pipeline = Pipeline.create(options) pipeline.apply("ReadFiles", TextIO.read().from(options.getInputFile)) .apply(ParDo.of(new AddTimestampFn(minTimestamp, maxTimestamp))) .apply(Window.into[String](FixedWindows.of(Duration.standardMinutes(options.getWindowSize)))) .apply(ParDo.of(new ExtractWords)) .apply(Count.perElement()) .apply(MapElements.via(new FormatResult)) .apply("WriteWords", TextIO.write() .to(options.getOutput) .withWindowedWrites() .withNumShards(options.getNumShards)) pipeline.run().waitUntilFinish() } } // ======================================= Options ============================================= trait WindowedWordCountOptions extends WordCountOptions { @Description("Fixed window duration, in minutes") @Default.Long(1) def getWindowSize: Long def setWindowSize(value: Long): Unit @Description("Minimum randomly assigned timestamp, in milliseconds-since-epoch") @Default.InstanceFactory(classOf[DefaultToCurrentSystemTime]) def getMinTimestampMillis: Long def setMinTimestampMillis(value: Long): Unit @Description("Maximum randomly assigned timestamp, in milliseconds-since-epoch") @Default.InstanceFactory(classOf[DefaultToMinTimestampPlusOneHour]) def getMaxTimestampMillis: Long def setMaxTimestampMillis(value: Long): Unit @Description("Fixed number of shards to produce per window, or null for runner-chosen sharding") @Default.Integer(1) def getNumShards: Integer def setNumShards(numShards: Integer): Unit } // ======================================== UDFs ================================================ class AddTimestampFn(minTimestamp: Instant, maxTimestamp: Instant) extends DoFn[String, String] { @ProcessElement def processElement(c: ProcessContext): Unit = { val randomTS = new Instant(ThreadLocalRandom.current.nextLong(minTimestamp.getMillis, maxTimestamp.getMillis)) c.outputWithTimestamp(c.element(), new Instant(randomTS)) } } // ====================================== Defaults ============================================== class DefaultToCurrentSystemTime extends DefaultValueFactory[Long] { override def create(options: PipelineOptions) = { System.currentTimeMillis() } } class DefaultToMinTimestampPlusOneHour extends DefaultValueFactory[Long] { override def create(options: PipelineOptions): Long = { options.as(classOf[WindowedWordCountOptions]) .getMinTimestampMillis + Duration.standardHours(1).getMillis } }
Example 23
Source File: package.scala From pureconfig with Mozilla Public License 2.0 | 5 votes |
package pureconfig.module import org.joda.time.{ DateTimeZone, Duration, Instant, Interval } import org.joda.time.format.{ DateTimeFormat, DateTimeFormatter } import pureconfig.{ ConfigConvert, ConfigReader } import pureconfig.ConfigConvert.{ catchReadError, viaNonEmptyString } package object joda { implicit def instantConfigConvert: ConfigConvert[Instant] = ConfigConvert[Long].xmap(new Instant(_), _.getMillis) implicit def intervalConfigConvert: ConfigConvert[Interval] = viaNonEmptyString[Interval]( catchReadError(Interval.parseWithOffset), _.toString) implicit def durationConfigConvert: ConfigConvert[Duration] = viaNonEmptyString[Duration]( catchReadError(Duration.parse), _.toString) implicit def dateTimeFormatterConfigConvert: ConfigReader[DateTimeFormatter] = ConfigReader.fromNonEmptyString[DateTimeFormatter]( catchReadError(DateTimeFormat.forPattern)) implicit def dateTimeZoneConfigConvert: ConfigConvert[DateTimeZone] = viaNonEmptyString[DateTimeZone]( catchReadError(DateTimeZone.forID), _.getID) }
Example 24
Source File: DateTimeDifferenceTimeSeries.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.timeseries.dt import be.cetic.tsimulus.timeseries.TimeSeries import org.joda.time.{DateTimeZone, LocalDateTime, Duration} class DateTimeDifferenceTimeSeries(a: TimeSeries[LocalDateTime], b: TimeSeries[LocalDateTime]) extends TimeSeries[Duration] { override def compute(times: Stream[LocalDateTime]): Stream[(LocalDateTime, Option[Duration])] = { val z = a.compute(times) zip b.compute(times) z.map(s => { if(s._1._2.isEmpty || s._2._2.isEmpty) (s._1._1, None) else (s._1._1, Some(new Duration(s._1._2.get.toDateTime(DateTimeZone.UTC), s._2._2.get.toDateTime(DateTimeZone.UTC)))) }) } override def compute(time: LocalDateTime): Option[Duration] = { val aTime = a.compute(time) val bTime = b.compute(time) if (aTime.isEmpty || bTime.isEmpty) None else Some(new Duration(aTime.get.toDateTime(DateTimeZone.UTC), bTime.get.toDateTime(DateTimeZone.UTC))) } }
Example 25
Source File: TransitionTimeSeries.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.timeseries.composite import be.cetic.tsimulus.timeseries.TimeSeries import com.github.nscala_time.time.Imports._ import org.joda.time.{Duration, LocalDateTime} case class TransitionTimeSeries[T](first: TimeSeries[T], second: TimeSeries[T], time: LocalDateTime, transition: Option[(Duration, (T,T,Double) => T)]) extends TimeSeries[T] { override def compute(times: Stream[LocalDateTime]): Stream[(LocalDateTime, Option[T])] = { val vFirst = first.compute(times) val vSecond = second.compute(times) (vFirst zip vSecond).map { case (s1, s2) => { val t = s1._1 val v1 = s1._2 val v2 = s2._2 (t, process(t, v1, v2)) }} } override def compute(time: LocalDateTime): Option[T] = process(time, first.compute(time), second.compute(time)) private def process(t: LocalDateTime, v1: Option[T], v2: Option[T]): Option[T] = { if (t <= time) v1 else { transition match { case None => v2 case Some((duration, f)) => { if (t > time + duration) { v2 } else // Real mixing { if (v1.isEmpty) { v2 } else if (v2.isEmpty) { v1 } else { val ratio = new Duration( time.toDateTime(DateTimeZone.UTC), t.toDateTime(DateTimeZone.UTC) ).getMillis / duration.getMillis.toDouble Some(f(v1.get, v2.get, ratio)) } } } } } } }
Example 26
Source File: SlidingWindowTimeSeries.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.timeseries.composite import be.cetic.tsimulus.Utils import be.cetic.tsimulus.timeseries.{IndependantTimeSeries, TimeSeries} import org.joda.time.{Duration, LocalDateTime} import com.github.nscala_time.time.Imports._ case class SlidingWindowTimeSeries[T](base: TimeSeries[T], duration: Duration, n: Int, aggregator: Seq[(Duration, T)] => Option[T]) extends IndependantTimeSeries[T] { override def compute(time: LocalDateTime): Option[T] = { val start = time - duration val dates = Utils.sampling(start, time, n) val values = base.compute(dates).map(v => v match { case (l: LocalDateTime, Some(x)) => Some((new Duration(l.toDateTime(DateTimeZone.UTC), time.toDateTime(DateTimeZone.UTC)), x)) case (l: LocalDateTime, None) => None } ).flatten .toSeq aggregator(values) } }
Example 27
Source File: Series.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.config import be.cetic.tsimulus.generators.{Generator, TimeToJson} import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json.{JsObject, JsString, JsValue, _} case class Series[T](name: String, generator: Either[String, Generator[Any]], frequency: Duration) extends TimeToJson { def toJson: JsValue = { val _generator = generator match { case Left(s) => s.toJson case Right(g) => g.toJson } new JsObject(Map( "name" -> name.toJson, "generator" -> _generator, "frequency" -> frequency.toJson )) } } object Series extends TimeToJson { def apply[T](value: JsValue): Series[T] = { val fields = value.asJsObject.fields val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val frequency = fields("frequency").convertTo[Duration] val name = fields("name").convertTo[String] Series(name, generator, frequency) } }
Example 28
Source File: DateTimeDifferenceGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.DateTimeDifferenceTimeSeries import org.joda.time.{Duration, LocalDateTime} import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class DateTimeDifferenceGenerator(name: Option[String], val a: Either[String, Generator[LocalDateTime]], val b: Either[String, Generator[LocalDateTime]]) extends Generator[Duration](name, "dt::diff") { override def timeseries(generators: String => Generator[Any]) = { val aTS = Model.generator(generators)(a).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] val bTS = Model.generator(generators)(b).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new DateTimeDifferenceTimeSeries(aTS, bTS) } override def toString = s"DateTimeDifferenceGenerator(${a}, ${b})" override def equals(o: Any) = o match { case that: DateTimeDifferenceGenerator => that.name == this.name && this.a == that.a && this.b == that.b case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "a" -> either2json(a), "b" -> either2json(b) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DateTimeDifferenceGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], a: String, b: String) = new DateTimeDifferenceGenerator(name, Left(a), Left(b)) def apply(name: Option[String], a: String, b: Generator[LocalDateTime]) = new DateTimeDifferenceGenerator(name, Left(a), Right(b)) def apply(name: Option[String], a: Generator[LocalDateTime], b: String) = new DateTimeDifferenceGenerator(name, Right(a), Left(b)) def apply(name: Option[String], a: Generator[LocalDateTime], b: Generator[LocalDateTime]) = new DateTimeDifferenceGenerator(name, Right(a), Right(b)) def apply(json: JsValue): DateTimeDifferenceGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new DateTimeDifferenceGenerator(name, a, b) } }
Example 29
Source File: YearGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.YearTimeSeries import org.joda.time.{Duration, LocalDateTime} import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue} import spray.json._ class YearGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "year") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new YearTimeSeries(ts) } override def toString = "YearGenerator()" override def equals(o: Any) = o match { case that: YearGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object YearGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new YearGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new YearGenerator(name, Right(base)) def apply(json: JsValue): YearGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new YearGenerator(name, base) } }
Example 30
Source File: GaussianNoiseGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.primary import be.cetic.tsimulus.config.ARMAModel import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.primary.GaussianNoiseTimeSeries import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue} import spray.json._ import scala.util.Random class GaussianNoiseGenerator(name: Option[String], val seed: Int, val std: Double) extends Generator[Double](name, "gaussian") { override def timeseries(generators: String => Generator[Any]) = GaussianNoiseTimeSeries(seed, std) override def toString = "GaussianNoise(" + seed + ", " + std + ")" override def equals(o: Any) = o match { case that: GaussianNoiseGenerator => that.name == this.name && that.seed == this.seed && Math.abs(that.std - this.std) < 0.0001 case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "seed" -> seed.toJson, "std" -> std.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object GaussianNoiseGenerator extends DefaultJsonProtocol with TimeToJson { def apply(json: JsValue): GaussianNoiseGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val seed = fields("seed").convertTo[Int] val std = fields("std").convertTo[Double] new GaussianNoiseGenerator(name, seed, std) } }
Example 31
Source File: EventStatsServlet.scala From spark-streaming-demo with Apache License 2.0 | 5 votes |
package com.datastax.examples.meetup import org.joda.time.{DateTimeZone, DateTime, Duration} import org.scalatra.scalate.ScalateSupport import org.scalatra.{CorsSupport, ScalatraServlet} import scala.concurrent.Await import scala.concurrent.duration._ import org.json4s.{DefaultFormats, Formats} import org.scalatra.json._ class EventStatsServlet() extends ScalatraServlet with CorsSupport with JacksonJsonSupport with ScalateSupport { protected implicit val jsonFormats: Formats = DefaultFormats before() { contentType = formats("json") } options("/*"){ response.setHeader("Access-Control-Allow-Headers", request.getHeader("Access-Control-Request-Headers")); } get("/trending") { val time = new DateTime(DateTimeZone.UTC) // Scan 5 second intervals within the past 1 minute. // Stop as soon as first successful found. val result = (for (i <- Stream range (0,12); v = getTrendingTopics(i, time); if v.nonEmpty) yield v).headOption // Order topics by count in desc order and take top 20 result.map(r => r.toIndexedSeq.sortBy(_._2).reverse.take(20)) } get("/countries") { val attendeesByCountry = Event.dimensions("attending", "ALL") Await.result(attendeesByCountry, 5 seconds) .map{ case (a,b) => Map("code" -> a.toUpperCase, "value" -> b)} } get("/") { contentType="text/html" layoutTemplate("dashboard.ssp") } def roundDateTime(t: DateTime, d: Duration) = { t minus (t.getMillis - (t.getMillis.toDouble / d.getMillis).round * d.getMillis) } def getTrendingTopics(i:Int, time:DateTime) = { val t = roundDateTime(time minusSeconds 5*i, Duration.standardSeconds(5)) val trendingTopics = Event.dimensions("trending", "S" + t.toString("yyyyMMddHHmmss")) Await.result(trendingTopics, 5 seconds) } }
Example 32
Source File: TimeShiftGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.composite.TimeShiftTimeSeries import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json.{JsObject, JsString, JsValue, _} class TimeShiftGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val shift: Duration) extends Generator[Any](name, "time-shift") with TimeToJson { override def timeseries(generators: (String) => Generator[Any]) = { val ts = Model.generator(generators)(generator).timeseries(generators) TimeShiftTimeSeries(ts, shift) } override def toString = "TimeShift(" + name + ", " + generator + ", " + shift.getMillis + ")" override def equals(o: Any) = o match { case that: TimeShiftGenerator => that.name == this.name && that.shift == this.shift case _ => false } override def toJson: JsValue = { var t = Map( "generator" -> either2json(generator), "shift" -> DurationFormat.write(shift), "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object TimeShiftGenerator extends DefaultJsonProtocol with TimeToJson { def apply(value: JsValue): TimeShiftGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val shift = fields("shift").convertTo[Duration] new TimeShiftGenerator(name, generator, shift) } }
Example 33
Source File: BinaryTransitionGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.composite.TransitionTimeSeries import org.joda.time.{Duration, LocalDateTime} import spray.json.{JsObject, JsString, JsValue, _} class BinaryTransitionGenerator(name: Option[String], val first: Either[String, Generator[Any]], val second: Either[String, Generator[Any]], val time: LocalDateTime) extends Generator[Boolean](name, "binary-transition") { override def timeseries(generators: (String) => Generator[Any]) = { val firstBase = Model.generator(generators)(first).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val secondBase = Model.generator(generators)(second).timeseries(generators) match { case t: TimeSeries[Boolean] => t } TransitionTimeSeries[Boolean](firstBase, secondBase, time, None) } override def toString = "BinaryTransitionGenerator(" + name + "," + first + "," + second + "," + time + ")" override def equals(o: Any) = o match { case that: BinaryTransitionGenerator => that.name == this.name && that.first == this.first && that.second == this.second && that.time == this.time case _ => false } override def toJson: JsValue = { val _first = (first match { case Left(s) => s.toJson case Right(g) => g.toJson }).toJson val _second = (second match { case Left(s) => s.toJson case Right(g) => g.toJson }).toJson var t = Map( "type" -> `type`.toJson, "first" -> _first, "second" -> _second, "time" -> time.toJson ) if(name.isDefined) t = t.updated("name", name.get.toJson) new JsObject(t) } } object BinaryTransitionGenerator extends TimeToJson { def apply(value: JsValue): BinaryTransitionGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val first = fields("first") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val second = fields("second") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val time = fields("time").convertTo[LocalDateTime] new BinaryTransitionGenerator(name, first, second, time) } }
Example 34
Source File: TimeToJson.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators import com.github.nscala_time.time.Imports._ import org.joda.time.format.DateTimeFormatterBuilder import org.joda.time.{Duration, LocalDateTime, LocalTime} import spray.json.{JsString, JsValue, RootJsonFormat, _} trait TimeToJson extends DefaultJsonProtocol { val dtf = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss.SSS") val ttf = DateTimeFormat.forPattern("HH:mm:ss.SSS") val datetimeFormatter = { val parsers = Array( DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss.SSS").getParser, DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss").getParser ) new DateTimeFormatterBuilder().append(null, parsers).toFormatter() } val timeFormatter = { val parsers = Array( DateTimeFormat.forPattern("HH:mm:ss.SSS").getParser, DateTimeFormat.forPattern("HH:mm:ss").getParser ) new DateTimeFormatterBuilder().append(null, parsers).toFormatter() } implicit object LocalDateTimeJsonFormat extends RootJsonFormat[LocalDateTime] { def write(d: LocalDateTime) = JsString(dtf.print(d)) def read(value: JsValue) = value match { case JsString(s) => datetimeFormatter.parseLocalDateTime(s) case unrecognized => serializationError(s"Serialization problem $unrecognized") } } implicit object LocalTimeJsonFormat extends RootJsonFormat[LocalTime] { def write(t: LocalTime) = JsString(ttf.print(t)) def read(value: JsValue) = value match { case JsString(s) => timeFormatter.parseLocalTime(s) case unknown => deserializationError(s"unknown LocalTime object: $unknown") } } implicit object DurationFormat extends RootJsonFormat[Duration] { def write(d: Duration) = d.getMillis.toJson def read(value: JsValue) = new Duration(value.toString.toLong) } def either2json(element: Either[String,Generator[Any]]) = element match { case Left(s) => s.toJson case Right(g) => g.toJson } }
Example 35
Source File: DateTimeDifferenceTimeSeriesTest.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.test.timeseries.dt import be.cetic.tsimulus.test.RTSTest import be.cetic.tsimulus.timeseries.dt.DateTimeDifferenceTimeSeries import be.cetic.tsimulus.timeseries.primary.NowTimeSeries import org.joda.time.Duration import org.scalatest.{FlatSpec, Inspectors, Matchers} class DateTimeDifferenceTimeSeriesTest extends FlatSpec with Matchers with Inspectors with RTSTest { "A DateTimeDifference time series" should "produce a duration of 0 if both base time series are equals" in { (new DateTimeDifferenceTimeSeries(NowTimeSeries(), NowTimeSeries()).compute(fixedDate)) match { case Some(x) => x == Duration.ZERO case _ => false } } }
Example 36
Source File: SeriesReaderTest.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.test import be.cetic.tsimulus.config.Series import org.joda.time.Duration import org.scalatest.{FlatSpec, Matchers} import spray.json._ class SeriesReaderTest extends FlatSpec with Matchers { val seriesSource = """ |{ | "name": "myName", | "generator": "daily-generator", | "frequency": 60000 |} """.stripMargin "A series" should "be correctly read from a json document" in { val document = seriesSource.parseJson val series = Series(document) series.name shouldBe "myName" series.generator shouldBe Left("daily-generator") series.frequency shouldBe new Duration(60000) } it should "be correctly exported to a json document" in { val series = Series[Any]( "myName", Left("daily-generator"), new Duration(60000) ) series shouldBe Series(series.toJson) } }
Example 37
Source File: GaussianNoiseGeneratorTest.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.test.generators.primary import be.cetic.tsimulus.config.{ARMAModel, GeneratorFormat} import be.cetic.tsimulus.generators.primary.{ARMAGenerator, GaussianNoiseGenerator} import org.joda.time.{Duration, LocalDateTime} import org.scalatest.{FlatSpec, Matchers} import spray.json._ class GaussianNoiseGeneratorTest extends FlatSpec with Matchers { val source = """ |{ | "name": "generator", | "type": "gaussian", | "seed": 42, | "std": 0.5 |} """.stripMargin "An Gaussian Noise generator" should "be correctly read from a json document" in { val generator = GaussianNoiseGenerator(source.parseJson) generator.name shouldBe Some("generator") generator.`type` shouldBe "gaussian" generator.seed shouldBe 42 generator.std should equal (0.5 +- 0.0001) } it should "be extracted from the global extractor without any error" in { noException should be thrownBy GeneratorFormat.read(source.parseJson) } it should "be correctly extracted from the global extractor" in { GeneratorFormat.read(source.parseJson) shouldBe GaussianNoiseGenerator(source.parseJson) } it should "be correctly exported to a json document" in { val generator = new GaussianNoiseGenerator( Some("generator"), 42, 0.5F ) generator shouldBe GaussianNoiseGenerator(generator.toJson) } }
Example 38
Source File: ARMAGeneratorTest.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.test.generators.primary import be.cetic.tsimulus.config.{ARMAModel, GeneratorFormat} import org.joda.time.{Duration, LocalDateTime} import org.scalatest.{FlatSpec, Matchers} import spray.json._ import be.cetic.tsimulus.generators.primary.ARMAGenerator class ARMAGeneratorTest extends FlatSpec with Matchers { val source = """ |{ | "name": "g3", | "type": "arma", | "model": { | "phi": [1,2,3], | "theta": [4,3,2,1], | "std": 0.5, | "c": 4.2, | "seed": 1809 | }, | "timestep": 180000, | "origin": "2016-01-01 12:34:56.789" |} """.stripMargin "An ARMA generator" should "be correctly read from a json document" in { val generator = ARMAGenerator(source.parseJson) generator.name shouldBe Some("g3") generator.`type` shouldBe "arma" generator.timestep shouldBe new Duration(180000) } it should "be extracted from the global extractor without any error" in { noException should be thrownBy GeneratorFormat.read(source.parseJson) } it should "be correctly extracted from the global extractor" in { GeneratorFormat.read(source.parseJson) shouldBe ARMAGenerator(source.parseJson) } it should "be correctly exported to a json document" in { val generator = new ARMAGenerator( Some("g3"), ARMAModel( Some(Seq(1, 2, 3)), Some(Seq(4, 3, 2, 1)), 0.5, 4.2, Some(1809) ), new LocalDateTime(2016, 1, 2, 12, 34, 56, 789), new Duration(180000) ) generator shouldBe ARMAGenerator(generator.toJson) } }
Example 39
Source File: TimeShiftGeneratorTest.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.test.generators.composite import be.cetic.tsimulus.config.GeneratorFormat import org.joda.time.Duration import spray.json._ import be.cetic.tsimulus.generators.composite.TimeShiftGenerator import org.scalatest.{FlatSpec, Matchers} class TimeShiftGeneratorTest extends FlatSpec with Matchers { val source = """ |{ | "name": "time-shifted-generator", | "type": "time-shift", | "generator": "daily-generator", | "shift": -8000 |} """.stripMargin "A time shifted generator" should "be correctly read from a json document" in { val generator = TimeShiftGenerator(source.parseJson) generator.name shouldBe Some("time-shifted-generator") generator.generator shouldBe Left("daily-generator") generator.shift shouldBe new Duration(-8000) } it should "be extracted from the global extractor without any error" in { noException should be thrownBy GeneratorFormat.read(source.parseJson) } it should "be correctly extracted from the global extractor" in { GeneratorFormat.read(source.parseJson) shouldBe TimeShiftGenerator(source.parseJson) } it should "be correctly exported to a json document" in { val generator = new TimeShiftGenerator( Some("time-shifted-generator"), Left("daily-generator"), new Duration(-8000) ) generator shouldBe TimeShiftGenerator(generator.toJson) } it should "have a correct textual representation" in { val generator = new TimeShiftGenerator( Some("time-shifted-generator"), Left("daily-generator"), new Duration(-8000) ) generator.toString shouldBe """TimeShift(Some(time-shifted-generator), Left(daily-generator), -8000)""" } }