org.joda.time.Period Scala Examples

The following examples show how to use org.joda.time.Period. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Utils.scala    From spark-datetime   with Apache License 2.0 5 votes vote down vote up
package org.sparklinedata.spark.dateTime

import org.joda.time.{DateTime, Period, Interval}

import scala.collection.mutable.ArrayBuffer


object Utils {

  def intervalToSeq(i : Interval, p : Period) : Seq[DateTime] = {
    import com.github.nscala_time.time.Imports._
    val b = new ArrayBuffer[DateTime]
    var d = i.getStart
    while(d < i.getEnd) {
      b += d
      d = d + p
    }
    b.toSeq
  }
} 
Example 2
Source File: ReceiptRendererTest.scala    From apple-of-my-iap   with MIT License 5 votes vote down vote up
package com.meetup.iap.receipt

import com.meetup.iap.AppleApi
import AppleApi.{ReceiptResponse, ReceiptInfo}

import org.scalatest.{Matchers, PropSpec}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.joda.time.{Period, DateTime}

class ReceiptRendererTest extends PropSpec with GeneratorDrivenPropertyChecks with Matchers {

  property("Renderer should produce valid dates") {
    val purchaseDate = new DateTime().withMillis(0).toDate
    val expiresDate = new DateTime().withMillis(0).plus(Period.days(7)).toDate
    val cancellationDate = new DateTime().withMillis(0).plus(Period.days(3)).toDate

    println(s"Orig purchaseDate: $purchaseDate, $expiresDate, $cancellationDate")

    val transactionId = "10022345304"
    val receiptInfo = ReceiptInfo(
          purchaseDate,
          transactionId,
          transactionId,
          purchaseDate,
          expiresDate,
          "123943451",
          isTrialPeriod = false,
          isInIntroOfferPeriod = None,
          Some(cancellationDate),
          1)

    val json = ReceiptRenderer(ReceiptResponse(None, List(receiptInfo)))
    val response = AppleApi.parseResponse(json)

    response.latestInfo.isDefined should equal (true)
    response.latestInfo.map { info =>
      info.purchaseDate should equal (purchaseDate)
      info.expiresDate should equal (expiresDate)

      info.cancellationDate.isDefined should equal (true)
      info.cancellationDate.map(_ should equal (cancellationDate))
    }
  }
} 
Example 3
Source File: ReceiptGenerator.scala    From apple-of-my-iap   with MIT License 5 votes vote down vote up
package com.meetup.iap.receipt

import com.meetup.iap.AppleApi
import com.meetup.iap.Plan
import AppleApi.{ReceiptResponse, ReceiptInfo}
import org.joda.time.{DateTime, Period}

object ReceiptGenerator {

  def genEncoding(plan: Plan,
                  existingEncodings: Set[String]): String = {
    def helper: String = {
      val id = java.util.UUID.randomUUID.toString.split("-")
      val id1 = id(0)
      val id2 = id(1)
      val receipt = s"${plan.name}_$id1-$id2"

      if(existingEncodings.contains(receipt)) helper
      else receipt
    }

    helper
  }

  def apply(plan: Plan,
            receiptOrSub: Either[String, Subscription] ): (String, ReceiptInfo) = {

    val purchaseDateTime = new DateTime()
    val purchaseDate = purchaseDateTime.toDate
    val productId = plan.productId
    val transactionId = s"$productId-$purchaseDateTime"
    val expiresDate = calculateEndDate(purchaseDateTime, plan.billInterval, plan.billIntervalUnit).toDate

    val (origPurchaseDate, origTransId, receiptToken) = receiptOrSub match {
      case Left(receipt) =>
        (purchaseDate, transactionId, receipt)
      case Right(subscription) =>
        val orig = subscription.originalReceiptInfo
        val origReceipt = subscription.receiptTokenMap.get(orig.transactionId).getOrElse("ERROR_no_receipt_token_found")
        val id = subscription.receiptsList.size
        (orig.purchaseDate, orig.transactionId, f"$origReceipt-${id}%03d")
    }

    (receiptToken, ReceiptInfo(
                    origPurchaseDate,
                    origTransId,
                    transactionId,
                    purchaseDate,
                    expiresDate,
                    productId,
                    cancellationDate = None,
                    isTrialPeriod = false,
                    isInIntroOfferPeriod = None,
                    quantity = 1))
  }

  def apply(sub: Subscription): ReceiptResponse = {
    ReceiptResponse(
      sub.latestReceiptToken,
      sub.receiptsList,
      sub.status)
  }

  def calculateEndDate(startDate: DateTime, interval: Int, intervalUnit: String): DateTime = {
    startDate.plus(getPeriod(intervalUnit, interval))
  }

  private def getPeriod(interval: String, length: Int): Period = {
    interval match {
      case "seconds" => Period.seconds(length)
      case "minutes" => Period.minutes(length)
      case "hours"   => Period.hours(length)
      case "days"    => Period.days(length)
      case "weeks"   => Period.weeks(length)
      case "months"  => Period.months(length)
      case "years"   => Period.years(length)
      case _         => throw new IllegalStateException(s"Could not create period for interval: $interval")
    }
  }
} 
Example 4
Source File: ConditionEvaluator.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.evaluator

import com.cave.metrics.data.influxdb.InfluxClientFactory
import com.cave.metrics.data.{MetricCheckRequest, MetricData, MetricDataBulk}
import org.joda.time.{DateTime, Period}

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}

class ConditionEvaluator(clusterName: Option[String], databaseName: String, request: MetricCheckRequest) extends AbstractEvaluator(request.condition) {

  def evaluate(clientFactory: InfluxClientFactory)(implicit ec: ExecutionContext): Future[Option[MetricDataBulk]] = {

    val fetcher = new DataFetcher(clientFactory)
    val step = Period.minutes(request.interval)
    val dateRange = Iterator.iterate(request.start)(_.plus(step)).takeWhile(!_.isAfter(request.end))

    def evaluateOnce(rangeStartDate: DateTime): Future[Option[MetricData]] = {
      val rangeEndDate = rangeStartDate.plus(step).minusSeconds(1)

      evaluateRange(clusterName, databaseName, rangeEndDate)(fetcher, ec) map {
        case util.Success(value) =>
          Some(MetricData(rangeStartDate, if (value) 1.0 else 0.0))
        case _ => None
      }
    }

    // If the result of one evaluation is None, it means the metric does not exist!
    // In that case, there's no point in evaluating any other dates in the range.
    evaluateOnce(dateRange.next()) flatMap {
      case Some(value) =>
        val results = dateRange map evaluateOnce
        Future.sequence(results) map(seq => Some(MetricDataBulk(value +: seq.flatten.toSeq)))

      case None =>
        Future.successful(None)
    }
  }

  override def getData(clusterName: Option[String], databaseName: String, metricName: String,
                       metricTags: Map[String, String], repeats: Int, delay: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    fetcher.fetchData(clusterName, databaseName, metricName, metricTags, repeats, delay, end)(ec)

  override def getData(clusterName: Option[String], databaseName: String, metricName: String,
                       metricTags: Map[String, String], duration: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    fetcher.fetchData(clusterName, databaseName, metricName, metricTags, duration, end)(ec)

  override def getData(clusterName: Option[String], databaseName: String, agg: AggregatedSource, repeats: Int, delay: FiniteDuration, end: DateTime)
                      (implicit fetcher: DataFetcher, ec: ExecutionContext): Future[Option[List[Double]]] =
    fetcher.fetchData(clusterName, databaseName, Aggregator.toInflux(agg.aggregator),
      agg.duration, agg.metricSource.metric, agg.metricSource.tags, repeats, delay, end)(ec)
} 
Example 5
Source File: GraphDataSpec.scala    From cave   with MIT License 5 votes vote down vote up
package controllers

import org.joda.time.{DateTime, Period}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}

class GraphDataSpec extends FlatSpec with Matchers with BeforeAndAfter {

  "Metric Time Range" should "find the first data point timestamp" in {
    val graphs = new Graphs
    val now = new DateTime()

    val period = Period.minutes(3)
    val firstResultTimestampFromDb = now.minusDays(1)
    val startTimeRange = now.minusDays(2)

    val start = graphs.findStartDate(firstResultTimestampFromDb, startTimeRange, period)

    println(s"FIRST RESULT TIME=      $firstResultTimestampFromDb")
    println(s"START TIME RANGE=       $startTimeRange")
    println(s"CALCULATED START TIME=  $start")

    assert(firstResultTimestampFromDb.toDate.getTime >= startTimeRange.toDate.getTime, "FIRST RESULT Start time is NOT after expected Start time range")
    assert(firstResultTimestampFromDb.toDate.getTime >= start.toDate.getTime, "FIRST RESULT Start time is NOT after expected CALCULATED Start time")

    assert(start.toDate.getTime >= startTimeRange.toDate.getTime, "Start time is NOT after expected CALCULATED Start time")
    assert(start.minus(period).toDate.getTime <= startTimeRange.toDate.getTime, "Start time MINUS PERIOD is NOT before expected Start time")

  }

  it should "find the first data point timestamp when the first DB result timestamp is the same as the beginning of the time range" in {
    val graphs = new Graphs
    val now = new DateTime()

    val period = Period.minutes(3)
    val firstResultTimestampFromDb = now.minusDays(1)
    val startTimeRange = firstResultTimestampFromDb

    val start = graphs.findStartDate(firstResultTimestampFromDb, startTimeRange, period)

    println(s"FIRST RESULT TIME=      $firstResultTimestampFromDb")
    println(s"START TIME RANGE=       $startTimeRange")
    println(s"CALCULATED START TIME=  $start")

    assert(firstResultTimestampFromDb.toDate.getTime >= startTimeRange.toDate.getTime, "FIRST RESULT Start time is NOT after expected Start time range")
    assert(firstResultTimestampFromDb.toDate.getTime >= start.toDate.getTime, "FIRST RESULT Start time is NOT after expected CALCULATED Start time")

    assert(start.toDate.getTime >= startTimeRange.toDate.getTime, "Start time is NOT after expected CALCULATED Start time")
    assert(start.minus(period).toDate.getTime <= startTimeRange.toDate.getTime, "Start time MINUS PERIOD is NOT before expected Start time")
    assert(start.isEqual(startTimeRange))
  }

  it should "find the first expected result time when the first result date is the same as the start" in {
    val graphs = new Graphs
    val now = new DateTime()

    val period = Period.minutes(3)
    val firstResultTimestampFromDb = now.minusDays(1)
    val startTimeRange = firstResultTimestampFromDb.plus(period)

    val start = graphs.findStartDate(firstResultTimestampFromDb, startTimeRange, period)

    println(s"FIRST RESULT TIME=      $firstResultTimestampFromDb")
    println(s"START TIME RANGE=       $startTimeRange")
    println(s"CALCULATED START TIME=  $start")

    assert(firstResultTimestampFromDb.toDate.getTime >= start.toDate.getTime, "FIRST RESULT Start time is NOT after expected CALCULATED Start time")
    assert(start.minus(period).toDate.getTime <= startTimeRange.toDate.getTime, "Start time MINUS PERIOD is NOT before expected Start time")
    assert(start.isEqual(firstResultTimestampFromDb))
  }

} 
Example 6
Source File: TwitterBatchTimely.scala    From Mastering-Spark-for-Data-Science   with MIT License 5 votes vote down vote up
package io.gzet.timeseries

import java.sql.Timestamp

import com.cloudera.sparkts.{DateTimeIndex, TimeSeriesRDD}
import io.gzet.timeseries.timely.MetricImplicits._
import io.gzet.timeseries.timely.TimelyImplicits._
import io.gzet.timeseries.twitter.Twitter._
import io.gzet.utils.spark.accumulo.AccumuloConfig
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.joda.time.{DateTime, Minutes, Period}

object TwitterBatchTimely extends SimpleConfig {

  case class Observation(
                          hashtag: String,
                          time: Timestamp,
                          count: Double
                        )

  def main(args: Array[String]) = {

    val sparkConf = new SparkConf().setAppName("Twitter Extractor")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val twitterJsonRDD = sc.textFile("file:///Users/antoine/CHAPTER/twitter-trump", 500)
    val tweetRDD = twitterJsonRDD mapPartitions analyzeJson cache()

    // Publish metrics to Timely
    tweetRDD.count()
    tweetRDD.countByState.publish()
    tweetRDD.sentimentByState.publish()

    // Read metrics from Timely
    val conf = AccumuloConfig("GZET", "alice", "alice", "localhost:2181")
    val metricsRDD = sc.timely(conf, Some("io.gzet.count"))

    val minDate = metricsRDD.map(_.time).min()
    val maxDate = metricsRDD.map(_.time).max()

    class TwitterFrequency(val minutes: Int) extends com.cloudera.sparkts.PeriodFrequency(Period.minutes(minutes)) {
      def difference(dt1: DateTime, dt2: DateTime): Int = Minutes.minutesBetween(dt1, dt2).getMinutes / minutes
      override def toString: String = s"minutes $minutes"
    }

    val dtIndex = DateTimeIndex.uniform(minDate, maxDate, new TwitterFrequency(1))

    val metricsDF = metricsRDD.filter({
      metric =>
        metric.tags.keys.toSet.contains("tag")
    }).flatMap({
      metric =>
        metric.tags map {
          case (k, v) =>
            ((v, roundFloorMinute(metric.time, 1)), metric.value)
        }
    }).reduceByKey(_+_).map({
      case ((metric, time), sentiment) =>
        Observation(metric, new Timestamp(time), sentiment)
    }).toDF()

    val tsRDD = TimeSeriesRDD.timeSeriesRDDFromObservations(dtIndex, metricsDF, "time", "hashtag", "count").filter(_._2.toArray.exists(!_.isNaN))

  }

  def roundFloorMinute(time: Long, windowMinutes: Int) = {
    val dt = new DateTime(time)
    dt.withMinuteOfHour((dt.getMinuteOfHour / windowMinutes) * windowMinutes).minuteOfDay().roundFloorCopy().toDate.getTime
  }

} 
Example 7
Source File: JodaLocalDateGenerators.scala    From scalacheck-ops   with Apache License 2.0 5 votes vote down vote up
package org.scalacheck.ops.time.joda

import org.joda.time.chrono.ISOChronology
import org.joda.time.{Chronology, LocalDate, Period, ReadablePeriod}
import org.scalacheck.Gen
import org.scalacheck.ops.time.AbstractTimeGenerators

sealed trait JodaLocalDateGenerators extends AbstractTimeGenerators {
  override type InstantType = LocalDate
  override type DurationType = ReadablePeriod
  override type ParamsType = Chronology

  override def defaultParams: Chronology = ISOChronology.getInstanceUTC

  override val defaultRange: ReadablePeriod = Period.years(1)

  override protected[time] def now(implicit params: Chronology): LocalDate = LocalDate.now(params)

  override protected[time] def addToCeil(
    instant: LocalDate,
    duration: ReadablePeriod
  )(implicit params: Chronology): LocalDate = {
    instant plus duration
  }

  override protected[time] def subtractToFloor(
    instant: LocalDate,
    duration: ReadablePeriod
  )(implicit params: Chronology): LocalDate = {
    instant minus duration
  }

  override def between(start: LocalDate, end: LocalDate)(implicit params: Chronology): Gen[LocalDate] = {
    val startYear = start.getYear
    val startMonthOfYear = start.getMonthOfYear
    for {
      year <- Gen.choose(startYear, end.getYear)
      monthOfYear <- {
        if (year == startYear) Gen.choose(start.getMonthOfYear, end.getMonthOfYear)
        else Gen.choose(params.monthOfYear.getMinimumValue, params.monthOfYear.getMaximumValue)
      }
      dayOfMonth <- {
        if (year == startYear && monthOfYear == startMonthOfYear) Gen.choose(startMonthOfYear, end.getDayOfMonth)
        else Gen.choose(params.dayOfMonth.getMinimumValue, params.dayOfMonth.getMaximumValue)
      }
    } yield new LocalDate(year, monthOfYear, dayOfMonth, params)
  }
}

object JodaLocalDateGenerators extends JodaLocalDateGenerators 
Example 8
Source File: DruidEventBeam.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.druid.util

import com.metamx.tranquility.beam.{Beam, ClusteredBeamTuning}
import com.metamx.tranquility.druid.{DruidBeams, DruidLocation, DruidRollup, SpecificDruidDimensions}
import com.metamx.tranquility.spark.BeamFactory
import com.metamx.tranquility.typeclass.Timestamper
import io.druid.data.input.impl.TimestampSpec
import org.apache.curator.framework.CuratorFrameworkFactory
import org.apache.curator.retry.BoundedExponentialBackoffRetry
import org.joda.time.{DateTime, DateTimeZone, Period}

import com.paypal.gimel.druid.conf.DruidClientConfiguration


  lazy val BeamInstance: Beam[Map[String, Any]] = {

    // Tranquility uses ZooKeeper (through Curator framework) for coordination.
    val curator = CuratorFrameworkFactory.newClient(
      druidConfig.zookeeper,
      new BoundedExponentialBackoffRetry(100, 3000, 5)
    )
    curator.start()

    // Transforms List[DruidDimensions] from the DruidClientConfiguration to List[String]
    val dimensions = druidConfig
      .dimensions
      .map(_.name)

    // Transforms List[DruidMetrics] from the DruidClientConfiguration to List[AggregatorFactory]
    val aggregators = druidConfig
      .metrics
      .map(_.getAggregator)

    // Building a Druid Beam
    DruidBeams
      .builder()
      .curator(curator)
      .discoveryPath(druidConfig.discoveryPath)
      .location(DruidLocation.create(druidConfig.indexService, druidConfig.datasource))
      .rollup(DruidRollup(SpecificDruidDimensions(dimensions),
        aggregators, DruidUtility.fetchQueryGranularity(druidConfig.queryGranularity)))
      .tuning(
        ClusteredBeamTuning (
          segmentGranularity = druidConfig.segmentGranularity,
          windowPeriod = new Period(druidConfig.windowPeriod),
          partitions = druidConfig.numPartitions, replicants = druidConfig.numReplicants
        )
      )
      .timestampSpec(new TimestampSpec(druidConfig.timestamp_field, "iso", null))
      .buildBeam()
  }
}

class DruidEventBeam(config: DruidClientConfiguration) extends BeamFactory[Map[String, Any]] {
  // Return a singleton, so the same connection is shared across all tasks in the same JVM.
  def makeBeam: Beam[Map[String, Any]] = {
    DruidEventBeam.init(config)
    DruidEventBeam.BeamInstance
  }
} 
Example 9
Source File: TimeFormats.scala    From shellbase   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.shellbase.timeutil

import org.joda.time.format.{PeriodFormatter, PeriodFormatterBuilder}
import org.joda.time.{Period, PeriodType}
import org.slf4j.LoggerFactory

import scala.util.control.NonFatal

object TimeFormats {

  private val _logger = LoggerFactory.getLogger(getClass)

  private val tersePeriodFormatter = new PeriodFormatterBuilder().
    appendYears().appendSuffix("y").
    appendDays().appendSuffix("d").
    appendHours().appendSuffix("h").
    appendMinutes().appendSuffix("m").
    appendSeconds().appendSuffix("s").
    toFormatter

  private val compactPeriodFormatterWithMs = new PeriodFormatterBuilder().
    appendDays().appendSuffix("d ").
    appendHours().appendSuffix("h ").
    appendMinutes().appendSuffix("m ").
    appendSeconds().appendSuffix("s ").
    appendMillis().appendSuffix("ms ").
    toFormatter

  val errorString = "<error>"

  private def format(formatter: PeriodFormatter, period: Long) = {
    formatter.print(new Period(period, PeriodType.yearDayTime()).normalizedStandard(PeriodType.dayTime()))
  }

  def formatWithMillis(period: Long): String = if (period == 0) "0" else format(compactPeriodFormatterWithMs, period).trim

  def formatAsTersePeriod(period: Long): String = {
    try {
      if (period == 0) {
        "0"
      } else if (period < 1000) {
        formatWithMillis(period)
      } else {
        format(tersePeriodFormatter, period)
      }
    } catch {
      case NonFatal(e) =>
        _logger.error(s"Failed to parse period: $period", e)
        errorString
    }
  }

  def parseTersePeriod(period: String): Option[Long] = {
    if (period == null || period.trim.isEmpty) {
      return None
    }

    try {
      Some(tersePeriodFormatter.parsePeriod(period).toStandardDuration.getMillis)
    } catch {
      case iae: IllegalArgumentException => {
        try {
          Some(period.toLong)
        } catch {
          case nfe: NumberFormatException => None
        }
      }
    }
  }

} 
Example 10
Source File: TimeUtils.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.util;

import com.github.nscala_time.time.Imports._
import mimir.algebra._
import org.joda.time.{DateTime, Seconds, Days, Period}


object TimeUtils {

  def getDaysBetween(start: PrimitiveValue, end: PrimitiveValue): Long = 
  {
    val startDT = start.asDateTime
    val endDT = end.asDateTime
    if(startDT.isBefore(endDT))  
      Days.daysBetween(startDT, endDT).getDays() 
    else
      Days.daysBetween(startDT, endDT).getDays() 
  }

  def getSecondsBetween(start: PrimitiveValue, end: PrimitiveValue): Long = 
  {
    val startDT = start.asDateTime
    val endDT = end.asDateTime
    if(startDT.isBefore(endDT))
      Seconds.secondsBetween(startDT, endDT).getSeconds()
    else
      Seconds.secondsBetween(startDT, endDT).getSeconds()
  }

  
} 
Example 11
Source File: Calculate.scala    From nisp-frontend   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.nisp.utils

import org.joda.time.{LocalDate, Period}
import uk.gov.hmrc.nisp.models.{SPChartModel, StatePensionAmount}

object Calculate {

  def calculateChartWidths(current: StatePensionAmount, forecast: StatePensionAmount, personalMaximum: StatePensionAmount): (SPChartModel, SPChartModel, SPChartModel) = {
    // scalastyle:off magic.number
    if (personalMaximum.weeklyAmount > forecast.weeklyAmount) {
      val currentChart = SPChartModel((current.weeklyAmount / personalMaximum.weeklyAmount * 100).toInt.max(Constants.chartWidthMinimum), current)
      val forecastChart = SPChartModel((forecast.weeklyAmount / personalMaximum.weeklyAmount * 100).toInt.max(Constants.chartWidthMinimum), forecast)
      val personalMaxChart = SPChartModel(100, personalMaximum)
      (currentChart, forecastChart, personalMaxChart)
    } else {
      if (forecast.weeklyAmount > current.weeklyAmount) {
        val currentPercentage = (current.weeklyAmount / forecast.weeklyAmount * 100).toInt
        val currentChart = SPChartModel(currentPercentage.max(Constants.chartWidthMinimum), current)
        val forecastChart = SPChartModel(100, forecast)
        (currentChart, forecastChart, forecastChart)
      } else {
        val currentChart = SPChartModel(100, current)
        val forecastChart = SPChartModel((forecast.weeklyAmount / current.weeklyAmount * 100).toInt, forecast)
        (currentChart, forecastChart, forecastChart)
      }
    }
  }

  def calculateAge(dateOfBirth: LocalDate, currentDate: LocalDate): Int = {
    new Period(dateOfBirth, currentDate).getYears
  }

} 
Example 12
Source File: AppRFMClient.scala    From frees-rpc-workshop   with Apache License 2.0 5 votes vote down vote up
package scalaexchange
package app

import cats.effect.IO
import freestyle.rpc.protocol.Empty
import freestyle.asyncCatsEffect.implicits._
import freestyle.rpc.client.implicits._
import monix.reactive.Observable

import scala.concurrent.Await
import scala.concurrent.duration._
import scalaexchange.services.protocol._

object AppRFMClient extends Implicits {

  def main(args: Array[String]): Unit = {

    implicit val rfmClient: RFMAnalysisService.Client[IO] =
      RFMAnalysisService.client[IO](channel)

    val (segments: IO[SegmentList], stream: Observable[UserEvent], ack: IO[Ack]) =
      (
        rfmClient.segments(Empty),
        rfmClient.userEvents(Empty),
        rfmClient.orderStream(ordersStreamObs)
      )

    println(s"Segments: \n${segments.unsafeRunSync().list.mkString("\n")}\n")
    println(s"Client Streaming: \n${ack.unsafeRunSync()}\n")
    Await.ready(
      stream
        .map { u =>
          println(u)
          u
        }
        .completedL
        .runAsync,
      Duration.Inf)
  }

  private[this] def ordersStreamObs: Observable[Order] = {
    val orderList: List[Order] = (1 to 1000).map { customerId =>
      import com.fortysevendeg.scalacheck.datetime.GenDateTime
      import org.joda.time.{DateTime, Period}
      import org.scalacheck._
      import com.fortysevendeg.scalacheck.datetime.instances.joda.jodaForPeriod

      (for {
        date    <- GenDateTime.genDateTimeWithinRange(DateTime.parse("2017-12-01"), Period.days(22))
        orderId <- Gen.uuid
        total   <- Gen.choose[Int](5, 200)
      } yield
        Order(
          customerId,
          CustomerData(date.toString, orderId.toString, total)
        )).sample.get
    }.toList

    Observable.fromIterable(orderList)
  }

} 
Example 13
Source File: UserEventProducer.scala    From frees-rpc-workshop   with Apache License 2.0 5 votes vote down vote up
package scalaexchange
package datagenerator

import com.fortysevendeg.scalacheck.datetime.GenDateTime
import com.fortysevendeg.scalacheck.datetime.instances.joda.jodaForPeriod
import monix.execution.Cancelable
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import org.joda.time.{DateTime, Period}

import scala.concurrent.duration.FiniteDuration


class UserEventProducer(interval: FiniteDuration) extends Observable[UserEvent] {

  override def unsafeSubscribeFn(subscriber: Subscriber[UserEvent]): Cancelable = {

    val userEventRandom: Observable[UserEvent] =
      Observable
        .fromStateAction(eventsGen)(Nil)
        .flatMap { a =>
          Observable.now(a).delaySubscription(interval)
        }

    userEventRandom.drop(1).unsafeSubscribeFn(subscriber)
  }

  private[this] def eventsGen(initialState: List[UserEvent]): (UserEvent, List[UserEvent]) = {

    import org.scalacheck._

    val dataGen: Arbitrary[UserEvent] = Arbitrary {
      import Gen._
      for {
        id        <- choose(1, 100)
        eventType <- Gen.oneOf(List(ProcessedCheckout, UnprocessedCheckout, Login))
        date      <- GenDateTime.genDateTimeWithinRange(DateTime.parse("2017-12-01"), Period.days(22))
      } yield UserEvent(id, eventType, date.toString())
    }

    val newEvent: UserEvent = dataGen.arbitrary.sample
      .getOrElse(throw DataGenerationException("Exception creating new random event"))

    (newEvent, initialState :+ newEvent)
  }
} 
Example 14
Source File: Duration.scala    From donut   with MIT License 5 votes vote down vote up
package report.donut.gherkin.model

import org.joda.time.format.PeriodFormatterBuilder
import org.joda.time.{DateTime, Period}


case class Duration(duration: Long, durationStr:String)

object Duration {

  def apply(duration:Long): Duration = {
    Duration(duration, calculateTotalDurationStr(duration))
  }

  def calculateTotalDuration(durations: List[Long]) = durations.sum

  def calculateTotalDurationStr(durations: List[Long]): String = formatDuration(calculateTotalDuration(durations))
  def calculateTotalDurationStr(duration: Long): String = formatDuration(duration)

  def formatDuration(duration: Long) = {
    val formatter = new PeriodFormatterBuilder()
      .appendDays()
      .appendSuffix(" day", " days")
      .appendSeparator(" and ")
      .appendHours()
      .appendSuffix(" hour", " hours")
      .appendSeparator(" and ")
      .appendMinutes()
      .appendSuffix(" min", " mins")
      .appendSeparator(" and ")
      .appendSeconds()
      .appendSuffix(" sec", " secs")
      .appendSeparator(" and ")
      .appendMillis()
      .appendSuffix(" ms", " ms")
      .toFormatter
    formatter.print(new Period(0, duration / 1000000))
  }

  def calculateDurationFrom2Strings(startDateTime: String, endDateTime: String): Long = {
    val start = DateTime.parse(startDateTime).getMillis
    val end = DateTime.parse(endDateTime).getMillis
    (end - start) * 1000000
  }
} 
Example 15
Source File: FavouriteSongSuite.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
import org.apache.spark.SparkContext
import org.joda.time.Period
import org.scalatest.{Matchers, FunSuite}
import purecsv.example.favouritesong.FavouriteSong
import purecsv.example.favouritesong.FavouriteSong.periodStringConverter
import purecsv.safe.converter.StringConverter


class FavouriteSongSuite extends FunSuite with Matchers {

  test("JodaTime Period can be read from String") {
    val period = StringConverter[Period].from("P0000-00-00T00:04:53")
    period.getYears   should be (0)
    period.getMonths  should be (0)
    period.getDays    should be (0)
    period.getHours   should be (0)
    period.getMinutes should be (4)
    period.getSeconds should be (53)
  }

  test("FavouriteSong returns the songs with highest " +
       "like per artist and the number of broken records") {

    // 3 records with 1 broken (Human Nature)
    val rawRecords = Seq(
        "Billie Jean,Michael Jackson,Thriller,P0000-00-00T00:04:53,6430000"
      , "Human Nature,Michael Jackson,Thriller,P012,"
      , "Thriller,Michael Jackson,Thriller,P0000-00-00T00:05:59,5700000"
      )

    val sc = new SparkContext("local[2]", "test favourite song")
    val rawSongs = sc.parallelize(rawRecords)
    val result = FavouriteSong(rawSongs)

    // one record is broken
    result.brokenRecords should be (1)

    // the top song for Michael Jackson is Billie Jean
    result.artistToBestSong should contain theSameElementsAs(Seq("Michael Jackson" -> "Billie Jean"))
  }

} 
Example 16
Source File: FavouriteSong.scala    From PureCSV   with Apache License 2.0 5 votes vote down vote up
package purecsv.example.favouritesong

import au.com.bytecode.opencsv.CSVParser
import org.apache.spark.Accumulator
import org.apache.spark.AccumulatorParam.LongAccumulatorParam
import org.apache.spark.rdd.RDD
import org.joda.time.Period
import org.joda.time.format.ISOPeriodFormat
import purecsv.safe._
import purecsv.safe.converter.{RawFieldsConverter, StringConverter}

import scala.util.{Failure, Success, Try}


case class Song(title: String, artist: String, album: String, length: Period, likes: Int)


  def tryParseSong(brokenAcc: Accumulator[Long], str: String): Option[Song] = {
    // we try to parse the record raw events and then, if successful, we try to convert
    // the raw fields to a Song instance
    val errorOrSong = Try(csvParser.parseLine(str)).flatMap(rawFields => RawFieldsConverter[Song].tryFrom(rawFields))

    errorOrSong match {
      case Success(song) => Some(song)
      case Failure(error) => { brokenAcc += 1; None }
    }
  }

} 
Example 17
Source File: KustoAzureFsSetupCache.scala    From azure-kusto-spark   with Apache License 2.0 5 votes vote down vote up
package com.microsoft.kusto.spark.utils
import org.joda.time.{DateTime, DateTimeZone, Period}
import scala.collection.mutable.Map

private[kusto] object KustoAzureFsSetupCache {
  private var storageAccountKeyMap: Map[String, String] = Map.empty[String,String]
  private var storageSasMap: Map[String, String] = Map.empty[String,String]
  private var nativeAzureFsSet = false
  private var lastRefresh: DateTime = new DateTime(DateTimeZone.UTC)

  // Return 'true' iff the entry exists in the cache. If it doesn't, or differs - update the cache
  // now is typically 'new DateTime(DateTimeZone.UTC)'
  def updateAndGetPrevStorageAccountAccess(account: String, secret: String, now: DateTime): Boolean = {
    var secretCached = storageAccountKeyMap.getOrElse(account, "")
    if (!secretCached.isEmpty && (secretCached != secret)) {
      // Entry exists but with a different secret - remove it and update
      storageAccountKeyMap.remove(account)
      secretCached = ""
    }

    if (secretCached.isEmpty || checkIfRefreshNeeded(now)) {
      storageAccountKeyMap.put(account, secret)
      lastRefresh = now
      false
    } else true
  }

  def updateAndGetPrevSas(container: String, account: String, secret: String, now: DateTime): Boolean = {
    val key = container + "." + account
    var secretCached = storageSasMap.getOrElse(key, "")
    if (!secretCached.isEmpty && (secretCached != secret)) {
      // Entry exists but with a different secret - remove it and update
      storageSasMap.remove(key)
      secretCached = ""
    }

    if (secretCached.isEmpty || checkIfRefreshNeeded(now)) {
      storageSasMap.put(key, secret)
      lastRefresh = now
      false
    } else true
  }

  def updateAndGetPrevNativeAzureFs(now: DateTime): Boolean = {
    if (nativeAzureFsSet || checkIfRefreshNeeded(now)) true else {
      nativeAzureFsSet = true
      false
    }
  }

  private[kusto] def checkIfRefreshNeeded(utcNow: DateTime) = {
    new Period(utcNow, lastRefresh).getMinutes > KustoConstants.sparkSettingsRefreshMinutes
  }
} 
Example 18
Source File: ContainerProvider.scala    From azure-kusto-spark   with Apache License 2.0 5 votes vote down vote up
package com.microsoft.kusto.spark.utils

import com.microsoft.azure.kusto.data.Client
import com.microsoft.kusto.spark.utils.CslCommandsGenerator.generateCreateTmpStorageCommand
import com.microsoft.kusto.spark.utils.{KustoDataSourceUtils => KDSU}

import org.joda.time.{DateTime, DateTimeZone, Period}

import scala.collection.JavaConverters._

class ContainerProvider[A](val dmClient: Client, val clusterAlias: String, val command: String, cacheEntryCreator: ContainerAndSas => A) {
  private var roundRobinIdx = 0
  private var storageUris: Seq[A] = Seq.empty
  private var lastRefresh: DateTime = new DateTime(DateTimeZone.UTC)
  private val myName = this.getClass.getSimpleName

  def getContainer: A = {
    // Refresh if storageExpiryMinutes have passed since last refresh for this cluster as SAS should be valid for at least 120 minutes
    if (storageUris.isEmpty ||
      new Period(new DateTime(DateTimeZone.UTC), lastRefresh).getMinutes > KustoConstants.storageExpiryMinutes) {
      refresh
    } else {
      roundRobinIdx = (roundRobinIdx + 1) % storageUris.size
      storageUris(roundRobinIdx)
    }
  }

  def getAllContainers: Seq[A] = {
    if (storageUris.isEmpty ||
      new Period(new DateTime(DateTimeZone.UTC), lastRefresh).getMinutes > KustoConstants.storageExpiryMinutes){
      refresh
    }
    storageUris
  }

  private def refresh = {
      val res = dmClient.execute(command)
      val storage = res.getPrimaryResults.getData.asScala.map(row => {
        val parts = row.get(0).toString.split('?')
        cacheEntryCreator(ContainerAndSas(parts(0), '?' + parts(1)))
      })

      if (storage.isEmpty) {
        KDSU.reportExceptionAndThrow(myName, new RuntimeException("Failed to allocate temporary storage"), "writing to Kusto", clusterAlias)
      }

      lastRefresh = new DateTime(DateTimeZone.UTC)
      storageUris = scala.util.Random.shuffle(storage)
      roundRobinIdx = 0
      storage(roundRobinIdx)
  }
} 
Example 19
Source File: TimeUnit.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.time

import org.joda.time.Period

sealed trait TimeUnit {
  val labels: Array[String]
  val mainLabel: String

  def forValue(value: Int): Duration =
    this match {
      case TimeUnit.Total =>
        TotalDuration
      case unit: MultiTimeUnit =>
        MultiDuration(value, unit)
    }
}

abstract class MultiTimeUnit(override val labels: Array[String], override val mainLabel: String) extends TimeUnit {
  val asPeriod: Period
  lazy protected[time] val standardMillis: Long = asPeriod.toStandardDuration.getMillis
}


object TimeUnit {

  private lazy val allUnits =
    Seq(Second, Minute, Hour, Day, Week, Month, Quarter, Year, Total)

  case object Second extends MultiTimeUnit(Array("s", "second", "seconds"), "second") {
    override val asPeriod: Period = Period.seconds(1)
  }

  case object Minute extends MultiTimeUnit(Array("m", "minute", "minutes"), "minute") {
    override val asPeriod: Period = Period.minutes(1)
  }

  case object Hour extends MultiTimeUnit(Array("h", "hour", "hours"), "hour") {
    override val asPeriod: Period = Period.hours(1)
  }

  case object Day extends MultiTimeUnit(Array("d", "day", "days"), "day") {
    override val asPeriod: Period = Period.days(1)
  }

  case object Week extends MultiTimeUnit(Array("w", "week", "weeks"), "week") {
    override val asPeriod: Period = Period.weeks(1)
  }

  case object Month extends MultiTimeUnit(Array("M", "month", "months"), "month") {
    override val asPeriod: Period = Period.months(1)
  }

  case object Quarter extends MultiTimeUnit(Array("q", "quarter"), "quarter"){
    override val asPeriod: Period = Period.months(3)
  }

  case object Year extends MultiTimeUnit(Array("y", "year"), "year"){
    override val asPeriod: Period = Period.years(1)
  }

  case object Total extends TimeUnit {
    override val labels: Array[String] = Array("t", "total")
    override val mainLabel: String = "total"
  }


  def lookup(label: String): TimeUnit = allUnits
    .find(_.labels.contains(label))
    .getOrElse(throw new IllegalArgumentException(s"Invalid label $label for time unit."))

  def apply(period: Period): Option[TimeUnit] = period.toString match {
    case "PT1S" => Some(TimeUnit.Second)
    case "PT1M" => Some(TimeUnit.Minute)
    case "PT1H" => Some(TimeUnit.Hour)
    case "P1D"  => Some(TimeUnit.Day)
    case "P1W"  => Some(TimeUnit.Week)
    case "P1M"  => Some(TimeUnit.Month)
    case "P3M"  => Some(TimeUnit.Quarter)
    case "P1Y"  => Some(TimeUnit.Year)
    case _      => None
  }

} 
Example 20
Source File: SddfPipeContext.scala    From sddf   with GNU General Public License v3.0 5 votes vote down vote up
package de.unihamburg.vsis.sddf.pipe.context

import org.apache.spark.rdd.RDD
import org.joda.time.Period

import de.unihamburg.vsis.sddf.visualisation.ModelRouter
import de.unihamburg.vsis.sddf.visualisation.logger.ModelRouterLogging

class SddfPipeContext(
    val name: String = "Unnamed Pipeline",
    modelRouter: ModelRouter = ModelRouterLogging)
  extends AbstractPipeContext(modelRouter)
  with CorpusContext
  with GoldstandardContext
  with ResultContext {
  
  var runtime: Option[Period] = None
  var filepath: Option[String] = None
      
  val persistedRDDs = new scala.collection.mutable.HashMap[String, RDD[_]]()
  
}