akka.stream.actor.ActorPublisher Scala Examples

The following examples show how to use akka.stream.actor.ActorPublisher. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: MemoryEventStore.scala    From sangria-subscriptions-example   with Apache License 2.0 5 votes vote down vote up
package generic

import akka.stream.actor.ActorPublisher
import akka.stream.actor.ActorPublisherMessage.{Cancel, Request}

class MemoryEventStore extends ActorPublisher[Event] {
  import MemoryEventStore._

  // in-memory event storage
  var events = Vector.empty[Event]

  var eventBuffer = Vector.empty[Event]

  def receive = {
    case AddEvent(event) if eventBuffer.size >= MaxBufferCapacity ⇒
      sender() ! OverCapacity(event)

    case LatestEventVersion(id) ⇒
      val entityEvents = events.filter(_.id == id)

      if (entityEvents.nonEmpty)
        sender() ! Some(entityEvents.maxBy(_.version).version)
      else
        sender() ! None

    case AddEvent(event) ⇒
      val entityEvents = events.filter(_.id == event.id)

      if (entityEvents.isEmpty) {
        addEvent(event)
        sender() ! EventAdded(event)
      } else {
        val latestEvent = entityEvents.maxBy(_.version)

        if (latestEvent.version == event.version - 1) {
          addEvent(event)
          sender() ! EventAdded(event)
        } else {
          sender() ! ConcurrentModification(event, latestEvent.version)
        }
      }

    case Request(_) ⇒ deliverEvents()
    case Cancel ⇒ context.stop(self)
  }

  def addEvent(event: Event) = {
    events  = events :+ event
    eventBuffer  = eventBuffer :+ event

    deliverEvents()
  }

  def deliverEvents(): Unit = {
    if (isActive && totalDemand > 0) {
      val (use, keep) = eventBuffer.splitAt(totalDemand.toInt)

      eventBuffer = keep

      use foreach onNext
    }
  }
}

object MemoryEventStore {
  case class AddEvent(event: Event)
  case class LatestEventVersion(id: String)

  case class EventAdded(event: Event)
  case class OverCapacity(event: Event)
  case class ConcurrentModification(event: Event, latestVersion: Long)

  val MaxBufferCapacity = 1000
} 
Example 2
Source File: CamelActorPublisher.scala    From reactive-activemq   with Apache License 2.0 5 votes vote down vote up
package akka.stream.integration
package camel

import akka.actor.{ ActorLogging, ActorRef, Props }
import akka.camel.{ CamelMessage, Consumer }
import akka.event.LoggingReceive
import akka.stream.actor.ActorPublisher
import akka.stream.actor.ActorPublisherMessage.Cancel
import akka.stream.scaladsl.Source

class CamelActorPublisher(val endpointUri: String) extends Consumer with ActorPublisher[(ActorRef, CamelMessage)] with ActorLogging {
  override val autoAck: Boolean = false

  override def receive: Receive = LoggingReceive {
    case CamelMessage if totalDemand == 0 =>
      sender() ! akka.actor.Status.Failure(new IllegalStateException("No demand for new messages"))

    case msg: CamelMessage => onNext((sender(), msg))

    case Cancel            => context stop self
  }
}

class CamelActorPublisherWithExtractor[A: CamelMessageExtractor](val endpointUri: String) extends Consumer with ActorPublisher[(ActorRef, A)] with ActorLogging {
  override val autoAck: Boolean = false

  override def receive: Receive = LoggingReceive {
    case CamelMessage if totalDemand == 0 =>
      sender() ! akka.actor.Status.Failure(new IllegalStateException("No demand for new messages"))

    case msg: CamelMessage =>
      try {
        onNext((sender(), implicitly[CamelMessageExtractor[A]].extract(msg)))
      } catch {
        case t: Throwable =>
          log.error(t, "Removing message from the broker because of error while extracting the message")
          sender() ! akka.camel.Ack
      }

    case Cancel => context stop self
  }
}

object CamelActorPublisher {
  def fromEndpointUri(endpointUri: String): Source[AckRefTup[CamelMessage], ActorRef] =
    Source.actorPublisher[AckRefTup[CamelMessage]](Props(new CamelActorPublisher(endpointUri)))

  def fromEndpointUriWithExtractor[A: CamelMessageExtractor](endpointUri: String): Source[AckRefTup[A], ActorRef] =
    Source.actorPublisher[AckRefTup[A]](Props(new CamelActorPublisherWithExtractor(endpointUri)))
} 
Example 3
Source File: MetricFlow.scala    From akka-visualmailbox   with Apache License 2.0 5 votes vote down vote up
package de.aktey.akka.visualmailbox

import akka.actor.{ActorRef, Props}
import akka.stream.actor.ActorPublisher
import akka.stream.scaladsl.Source

import scala.annotation.tailrec

object MetricFlow {

  // subscriber as flow source
  // that registrates itself to a router
  class MetricsSubscriber(router: ActorRef) extends ActorPublisher[VisualMailboxMetric] {

    import akka.stream.actor.ActorPublisherMessage._

    val MaxBufferSize = 100
    var buf = Vector.empty[VisualMailboxMetric]

    router ! self

    def receive = {
      case metric: VisualMailboxMetric if buf.size == MaxBufferSize =>
      case metric: VisualMailboxMetric =>
        if (buf.isEmpty && totalDemand > 0)
          onNext(metric)
        else {
          buf :+= metric
          deliverBuf()
        }
      case Request(_) =>
        deliverBuf()
      case Cancel =>
        context.stop(self)
    }

    @tailrec
    private def deliverBuf(): Unit =
      if (totalDemand > 0) {
        if (totalDemand <= Int.MaxValue) {
          val (use, keep) = buf.splitAt(totalDemand.toInt)
          buf = keep
          use foreach onNext
        } else {
          val (use, keep) = buf.splitAt(Int.MaxValue)
          buf = keep
          use foreach onNext
          deliverBuf()
        }
      }
  }

  object MetricsSubscriber {
    def props(router: ActorRef) = Props(new MetricsSubscriber(router))
  }

  def metricSource(router: ActorRef): Source[String, ActorRef] =
    Source.actorPublisher[VisualMailboxMetric](MetricsSubscriber.props(router)).map {
      case VisualMailboxMetric(sender, receiver, receiverMailBoxSize, meassureTimeMillies) =>
        s"""{
            |  "sender": "$sender",
            |  "receiver": "$receiver",
            |  "receiverMailBoxSize": $receiverMailBoxSize,
            |  "meassureTimeMillies": $meassureTimeMillies
            |}""".stripMargin
    }
} 
Example 4
Source File: UnfoldPullerAsync.scala    From akka-stream-extensions   with Apache License 2.0 5 votes vote down vote up
package com.mfglabs.stream.internals.source

import akka.pattern.pipe
import akka.actor.{Props, Status, ActorLogging}
import akka.stream.actor.ActorPublisher

import scala.concurrent.Future

class UnfoldPullerAsync[A, B](zero: => B)(f: B => Future[(Option[A], Option[B])]) extends ActorPublisher[A] with ActorLogging {
  import akka.stream.actor.ActorPublisherMessage._
  implicit val ec = context.dispatcher

  def receive = waitingForDownstreamReq(zero)

  case object Pull

  def waitingForDownstreamReq(s: B): Receive = {
    case Request(_) | Pull =>
      if (totalDemand > 0 && isActive) {
        f(s).pipeTo(self)
        context.become(waitingForFut(s))
      }

    case Cancel => context.stop(self)
  }

  def waitingForFut(s: B): Receive = {
    case (maybeA: Option[A], maybeB: Option[B]) =>
      maybeA.foreach(onNext)
      maybeB match {
        case Some(b) =>
          if (totalDemand > 0) self ! Pull
          context.become(waitingForDownstreamReq(b))
        case None =>
          onComplete()
      }

    case Request(_) | Pull => // ignoring until we receive the future response

    case Status.Failure(err) =>
      context.become(waitingForDownstreamReq(s))
      onError(err)

    case Cancel => context.stop(self)
  }

}

object UnfoldPullerAsync {
  def props[A, B](zero: => B)(f: B => Future[(Option[A], Option[B])]) = Props(new UnfoldPullerAsync[A, B](zero)(f))
} 
Example 5
Source File: UnicomplexActorPublisher.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.stream

import java.lang.Boolean

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.stream.actor.{ActorPublisher, ActorPublisherMessage}
import akka.stream.javadsl
import akka.stream.scaladsl.Source
import org.squbs.stream.TriggerEvent._
import org.squbs.unicomplex.{Active, Stopping, _}

final class UnicomplexActorPublisher extends ActorPublisher[LifecycleState] {

  override def receive = {
    case ActorPublisherMessage.Request(_) =>
      Unicomplex() ! SystemState
      Unicomplex() ! ObtainLifecycleEvents()
    case ActorPublisherMessage.Cancel | ActorPublisherMessage.SubscriptionTimeoutExceeded =>
      context.stop(self)
    case SystemState => Unicomplex() ! SystemState
    case element: LifecycleState if demand_? => onNext(element)
  }

  private def demand_? : Boolean = totalDemand > 0
}

case class LifecycleManaged[T, M]() {
  val trigger = Source.actorPublisher[LifecycleState](Props.create(classOf[UnicomplexActorPublisher]))
    .collect {
      case Active => ENABLE
      case Stopping => DISABLE
    }

  val source = (in: Source[T, M]) => new Trigger(eagerComplete = true).source(in, trigger)

  // for Java
  def source(in: javadsl.Source[T, M]): javadsl.Source[T, akka.japi.Pair[M, ActorRef]] = source(in.asScala)
    .mapMaterializedValue {
      case (m1, m2) => akka.japi.Pair(m1, m2)
    }.asJava
} 
Example 6
Source File: TweetPublisher.scala    From intro-to-dcos   with Apache License 2.0 5 votes vote down vote up
package de.codecentric.dcos_intro

import akka.stream.actor.ActorPublisher
import akka.stream.actor.ActorPublisherMessage.{Cancel, Request}

class TweetPublisher extends ActorPublisher[Tweet] {
  override def receive: Receive = {
    case t: Tweet => {
      if (isActive && totalDemand > 0) {
        onNext(t)
      }
    }
    case Cancel => context.stop(self)
    case Request(_) => {}

  }
} 
Example 7
Source File: LogApiController.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.operation.controller

import java.time.{ OffsetDateTime, ZoneId }
import java.util.Date

import akka.actor.{ ActorRef, Props }
import akka.stream.actor.ActorPublisher
import akka.stream.actor.ActorPublisherMessage.{ Cancel, Request }
import akka.stream.scaladsl.Source
import ch.qos.logback.classic.spi.ILoggingEvent
import akka.http.scaladsl.model.sse.ServerSentEvent
import io.vamp.common.Namespace
import io.vamp.common.akka._
import io.vamp.common.json.{ OffsetDateTimeSerializer, SerializationFormat }
import org.json4s.native.Serialization._

import scala.concurrent.duration.FiniteDuration

case class LogEvent(logger: String, level: String, message: String, timestamp: OffsetDateTime)

trait LogApiController extends AbstractController {

  private val eventType = "log"

  def sourceLog(level: String, logger: Option[String], keepAlivePeriod: FiniteDuration)(implicit namespace: Namespace): Source[ServerSentEvent, ActorRef] = {
    Source.actorPublisher[ServerSentEvent](Props(new ActorPublisher[ServerSentEvent] {
      def receive: Receive = {
        case Request(_) ⇒ openLogStream(self, level, logger, { event ⇒
          ServerSentEvent(write(encode(event))(SerializationFormat(OffsetDateTimeSerializer)), eventType)
        })
        case Cancel                                  ⇒ closeLogStream(self)
        case sse: ServerSentEvent if totalDemand > 0 ⇒ onNext(sse)
        case _                                       ⇒
      }

    })).keepAlive(keepAlivePeriod, () ⇒ ServerSentEvent.heartbeat)
  }

  def openLogStream(to: ActorRef, level: String, logger: Option[String], encoder: (ILoggingEvent) ⇒ AnyRef)(implicit namespace: Namespace): Unit = {
    LogPublisherHub.subscribe(to, level, logger, encoder)
  }

  def closeLogStream(to: ActorRef): Unit = LogPublisherHub.unsubscribe(to)

  def encode(loggingEvent: ILoggingEvent) = LogEvent(
    loggingEvent.getLoggerName,
    loggingEvent.getLevel.toString,
    loggingEvent.getFormattedMessage,
    OffsetDateTime.ofInstant(new Date(loggingEvent.getTimeStamp).toInstant, ZoneId.of("UTC"))
  )
}