akka.stream.testkit.scaladsl.TestSink Scala Examples
The following examples show how to use akka.stream.testkit.scaladsl.TestSink.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: CassandraQueryJournalOverrideSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import akka.actor.ExtendedActorSystem import akka.persistence.PersistentRepr import akka.persistence.cassandra.TestTaggingActor.Ack import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, TestTaggingActor } import akka.persistence.query.{ PersistenceQuery, ReadJournalProvider } import akka.stream.testkit.scaladsl.TestSink import com.typesafe.config.{ Config, ConfigFactory } import scala.concurrent.duration._ class JournalOverride(as: ExtendedActorSystem, config: Config, configPath: String) extends CassandraReadJournal(as, config, configPath) { override private[akka] def mapEvent(pr: PersistentRepr) = PersistentRepr("cat", pr.sequenceNr, pr.persistenceId, pr.manifest, pr.deleted, pr.sender, pr.writerUuid) } class JournalOverrideProvider(as: ExtendedActorSystem, config: Config, configPath: String) extends ReadJournalProvider { override def scaladslReadJournal() = new JournalOverride(as, config, configPath) override def javadslReadJournal() = null } object CassandraQueryJournalOverrideSpec { val config = ConfigFactory.parseString(""" akka.persistence.cassandra.query { class = "akka.persistence.cassandra.query.JournalOverrideProvider" } """.stripMargin).withFallback(CassandraLifecycle.config) } class CassandraQueryJournalOverrideSpec extends CassandraSpec(CassandraQueryJournalOverrideSpec.config) { lazy val journal = PersistenceQuery(system).readJournalFor[JournalOverride](CassandraReadJournal.Identifier) "Cassandra query journal override" must { "map events" in { val pid = "p1" val p1 = system.actorOf(TestTaggingActor.props(pid)) p1 ! "not a cat" expectMsg(Ack) val currentEvents = journal.currentEventsByPersistenceId(pid, 0, Long.MaxValue) val currentProbe = currentEvents.map(_.event.toString).runWith(TestSink.probe[String]) currentProbe.request(2) currentProbe.expectNext("cat") currentProbe.expectComplete() val liveEvents = journal.eventsByPersistenceId(pid, 0, Long.MaxValue) val liveProbe = liveEvents.map(_.event.toString).runWith(TestSink.probe[String]) liveProbe.request(2) liveProbe.expectNext("cat") liveProbe.expectNoMessage(100.millis) liveProbe.cancel() val internalEvents = journal.eventsByPersistenceIdWithControl(pid, 0, Long.MaxValue, None) val internalProbe = internalEvents.map(_.event.toString).runWith(TestSink.probe[String]) internalProbe.request(2) internalProbe.expectNext("cat") liveProbe.expectNoMessage(100.millis) liveProbe.cancel() } } }
Example 2
Source File: GitHubSpec.scala From akka-api-gateway-example with MIT License | 5 votes |
package jp.co.dzl.example.akka.api.service import akka.actor.ActorSystem import akka.http.scaladsl.model.headers.RawHeader import akka.http.scaladsl.model.{ HttpMethods, HttpRequest, HttpResponse } import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Flow, Source } import akka.stream.testkit.scaladsl.TestSink import org.scalamock.scalatest.MockFactory import org.scalatest.concurrent.ScalaFutures import org.scalatest.{ BeforeAndAfterAll, FlatSpec, Matchers } import scala.concurrent.Await import scala.concurrent.duration.Duration class GitHubSpec extends FlatSpec with Matchers with ScalaFutures with BeforeAndAfterAll with MockFactory { implicit val system = ActorSystem("github-spec") implicit val executor = system.dispatcher implicit val materializer = ActorMaterializer() override protected def afterAll: Unit = { Await.result(system.terminate(), Duration.Inf) } "#from" should "merge original headers to github request" in { val github = new GitHubImpl("127.0.0.1", 8000, 5, mock[HttpClient]) val request = HttpRequest(HttpMethods.GET, "/") .addHeader(RawHeader("host", "dummy")) .addHeader(RawHeader("timeout-access", "dummy")) val result = Source.single(HttpRequest(HttpMethods.GET, "/v1/github/users/xxxxxx")) .via(github.from(request)) .runWith(TestSink.probe[HttpRequest]) .request(1) .expectNext() result.headers.filter(_.lowercaseName() == "host") shouldBe empty result.headers.filter(_.lowercaseName() == "timeout-access") shouldBe empty result.headers.filter(_.lowercaseName() == "x-forwarded-host") shouldNot be(empty) } "#send" should "connect using http client" in { val httpResponse = HttpResponse() val httpClient = mock[HttpClient] (httpClient.connectionHttps _).expects(*, *, *).returning(Flow[HttpRequest].map(_ => httpResponse)) val github = new GitHubImpl("127.0.0.1", 8000, 5, httpClient) val result = Source.single(HttpRequest(HttpMethods.GET, "/")) .via(github.send) .runWith(TestSink.probe[HttpResponse]) .request(1) .expectNext() result shouldBe httpResponse } }
Example 3
Source File: ActiveMqTestSpec.scala From reactive-activemq with Apache License 2.0 | 5 votes |
package akka.stream.integration package activemq import akka.NotUsed import akka.actor.ActorRef import akka.stream.integration.PersonDomain.Person import akka.stream.scaladsl.{ Flow, Keep } import akka.stream.testkit.scaladsl.{ TestSink, TestSource } import akka.stream.testkit.{ TestPublisher, TestSubscriber } import akka.testkit.TestActor.AutoPilot import akka.testkit.TestProbe import JsonCamelMessageExtractor._ import JsonCamelMessageBuilder._ import scala.util.{ Failure, Success, Try } implicit def function1ToAutoPilot[S, T](f: S => T): AutoPilot = new AutoPilot { override def run(sender: ActorRef, msg: Any): AutoPilot = msg match { case s: S => val tryT: Try[T] = Try(f(s)) tryT match { case Success(t) => sender ! t function1ToAutoPilot(f) case Failure(f) => fail(s"Failed to apply supplied function to received message: $s", f) } case _ => fail(s"Received message is not of the required type: $msg") } } }
Example 4
Source File: BakerServiceSpec.scala From Learn-Scala-Programming with MIT License | 5 votes |
package ch15 import akka.NotUsed import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import ch15.model.{RawCookies, ReadyCookies} import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import org.scalatest.{AsyncWordSpec, Matchers} class BakerServiceSpec extends AsyncWordSpec with Matchers { "The BakerService" should { "bake cookies" in ServiceTest.withServer(ServiceTest.defaultSetup) { ctx => new BakerApplication(ctx) with LocalServiceLocator } { server => implicit val as: Materializer = server.materializer val input: Source[RawCookies, NotUsed] = Source(List(RawCookies(10), RawCookies(10), RawCookies(10))) .concat(Source.maybe) val client = server.serviceClient.implement[BakerService] client.bake.invoke(input).map { output => val probe = output.runWith(TestSink.probe(server.actorSystem)) probe.request(10) probe.expectNext(ReadyCookies(12)) probe.expectNext(ReadyCookies(12)) // because the oven is not full for the 6 other probe.cancel succeed } } } }
Example 5
Source File: TestSpec.scala From akka-http-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestProbe import akka.util.Timeout import org.scalatest._ import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatestplus.play.guice.GuiceOneServerPerSuite import play.api.inject.BindingKey import play.api.test.WsTestClient import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.reflect.ClassTag import scala.util.Try class TestSpec extends FlatSpec with Matchers with GivenWhenThen with OptionValues with TryValues with ScalaFutures with WsTestClient with BeforeAndAfterAll with BeforeAndAfterEach with Eventually with GuiceOneServerPerSuite { def getComponent[A: ClassTag] = app.injector.instanceOf[A] def getAnnotatedComponent[A](name: String)(implicit ct: ClassTag[A]): A = app.injector.instanceOf[A](BindingKey(ct.runtimeClass.asInstanceOf[Class[A]]).qualifiedWith(name)) // set the port number of the HTTP server override lazy val port: Int = 8080 implicit val timeout: Timeout = 10.seconds implicit val pc: PatienceConfig = PatienceConfig(timeout = 30.seconds, interval = 300.millis) implicit val system: ActorSystem = getComponent[ActorSystem] implicit val ec: ExecutionContext = getComponent[ExecutionContext] implicit val mat: Materializer = getComponent[Materializer] // ================================== Supporting Operations ==================================== implicit class PimpedByteArray(self: Array[Byte]) { def getString: String = new String(self) } implicit class PimpedFuture[T](self: Future[T]) { def toTry: Try[T] = Try(self.futureValue) } implicit class SourceOps[A](src: Source[A, _]) { def testProbe(f: TestSubscriber.Probe[A] => Unit): Unit = f(src.runWith(TestSink.probe(system))) } def killActors(actors: ActorRef*): Unit = { val tp = TestProbe() actors.foreach { (actor: ActorRef) => tp watch actor actor ! PoisonPill tp.expectTerminated(actor) } } override protected def beforeEach(): Unit = { } }
Example 6
Source File: PublishServiceSpec.scala From lagom with Apache License 2.0 | 5 votes |
package docs.scaladsl.mb import com.lightbend.lagom.scaladsl.api.broker.Topic import com.lightbend.lagom.scaladsl.server.LagomApplication import com.lightbend.lagom.scaladsl.server.LagomApplicationContext import com.lightbend.lagom.scaladsl.server.LagomServer import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import com.lightbend.lagom.scaladsl.testkit.TestTopicComponents import play.api.libs.ws.ahc.AhcWSComponents import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec import akka.NotUsed import akka.Done import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.TestSubscriber import akka.stream.testkit.TestSubscriber.Probe abstract class PublishApplication(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents { override lazy val lagomServer = serverFor[service.PublishService](new service.PublishServiceImpl()) } package service { import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.broker.TopicProducer object PublishService { val TOPIC_NAME = "events" } trait PublishService extends Service { final override def descriptor = { import Service._ named("brokerdocs") .withTopics(topic(PublishService.TOPIC_NAME, events)) .withAutoAcl(true) } def events(): Topic[PubMessage] } case class PubMessage(message: String) object PubMessage { import play.api.libs.json.Format import play.api.libs.json.Json implicit val format: Format[PubMessage] = Json.format[PubMessage] } class PublishServiceImpl() extends PublishService { override def events(): Topic[PubMessage] = TopicProducer.singleStreamWithOffset { offset => Source((1 to 10)).map(i => (PubMessage(s"msg $i"), offset)) } } } class PublishServiceSpec extends AsyncWordSpec with Matchers { import service._ //#topic-test-publishing-into-a-topic "The PublishService" should { "publish events on the topic" in ServiceTest.withServer(ServiceTest.defaultSetup) { ctx => new PublishApplication(ctx) with LocalServiceLocator with TestTopicComponents } { server => implicit val system = server.actorSystem implicit val mat = server.materializer val client: PublishService = server.serviceClient.implement[PublishService] val source = client.events().subscribe.atMostOnceSource source .runWith(TestSink.probe[PubMessage]) .request(1) .expectNext should ===(PubMessage("msg 1")) } } //#topic-test-publishing-into-a-topic }
Example 7
Source File: TopicPublishingSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.testkit import akka.persistence.query.Offset import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import com.lightbend.lagom.scaladsl.api.broker.Topic import com.lightbend.lagom.scaladsl.broker.TopicProducer import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraPersistenceComponents import com.lightbend.lagom.scaladsl.playjson.EmptyJsonSerializerRegistry import com.lightbend.lagom.scaladsl.server.LagomApplication import com.lightbend.lagom.scaladsl.server.LagomApplicationContext import com.lightbend.lagom.scaladsl.server.LagomServer import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.services.AlphaEvent import com.lightbend.lagom.scaladsl.testkit.services.AlphaService import play.api.libs.ws.ahc.AhcWSComponents import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec abstract class AlphaApplication(context: LagomApplicationContext) extends LagomApplication(context) with CassandraPersistenceComponents with TestTopicComponents with AhcWSComponents { override lazy val lagomServer = serverFor[AlphaService](new AlphaServiceImpl()) override lazy val jsonSerializerRegistry = EmptyJsonSerializerRegistry } class AlphaServiceImpl extends AlphaService { override def messages: Topic[AlphaEvent] = TopicProducer.singleStreamWithOffset { offset => val events = (1 to 10).filter(_ % 2 == 0).map(AlphaEvent.apply) Source(events).map(event => (event, Offset.sequence(event.message / 2))) } } class TopicPublishingSpec extends AsyncWordSpec with Matchers { "The AlphaService" should { "publish events on alpha topic" in ServiceTest.withServer(ServiceTest.defaultSetup.withCluster()) { ctx => new AlphaApplication(ctx) with LocalServiceLocator } { server => implicit val system = server.actorSystem implicit val mat = server.materializer val client: AlphaService = server.serviceClient.implement[AlphaService] val source = client.messages.subscribe.atMostOnceSource source .runWith(TestSink.probe[AlphaEvent]) .request(1) .expectNext should ===(AlphaEvent(2)) } } }
Example 8
Source File: PersistenceIdsQuerySpec.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package akka.persistence.couchbase.scaladsl import akka.persistence.couchbase.TestActor import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestProbe import scala.concurrent.duration._ class PersistenceIdsQuerySpec extends AbstractCouchbaseSpec("PersistenceIdsQuerySpec") { "currentPersistenceIds" must { "work" in { val senderProbe = TestProbe() implicit val sender = senderProbe.ref val pa1 = system.actorOf(TestActor.props("p1")) pa1 ! "p1-evt-1" senderProbe.expectMsg("p1-evt-1-done") val pa2 = system.actorOf(TestActor.props("p2")) pa2 ! "p2-evt-1" senderProbe.expectMsg("p2-evt-1-done") awaitAssert( { val probe: TestSubscriber.Probe[String] = queries.currentPersistenceIds().runWith(TestSink.probe) probe.requestNext("p1") probe.requestNext("p2") probe.expectComplete() }, readOurOwnWritesTimeout ) } } "live persistenceIds" must { "show new persistence ids" in { val senderProbe = TestProbe() implicit val sender = senderProbe.ref val queryProbe: TestSubscriber.Probe[String] = queries.persistenceIds().runWith(TestSink.probe) queryProbe.request(10) val pa3 = system.actorOf(TestActor.props("p3")) pa3 ! "p3-evt-1" senderProbe.expectMsg("p3-evt-1-done") awaitAssert({ queryProbe.expectNext("p3") }, 5.seconds) val pa4 = system.actorOf(TestActor.props("p4")) pa4 ! "p4-evt-1" senderProbe.expectMsg("p4-evt-1-done") // we shouldn't see p3 again queryProbe.expectNext("p4") // also not after p4 (it could come out of order) queryProbe.expectNoMessage(noMsgTimeout) } } }
Example 9
Source File: StreamSpec.scala From akka-stream-eventsourcing with Apache License 2.0 | 5 votes |
package com.github.krasserm.ases import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Flow, Keep} import akka.stream.testkit.scaladsl.{TestSink, TestSource} import akka.stream.testkit.{TestPublisher, TestSubscriber} import akka.testkit.TestKit import org.scalatest.{BeforeAndAfterAll, Suite} import scala.collection.immutable.Seq trait StreamSpec extends BeforeAndAfterAll { this: TestKit with Suite => implicit val materializer = ActorMaterializer() val emitterId = "emitter" override def afterAll(): Unit = { materializer.shutdown() TestKit.shutdownActorSystem(system) super.afterAll() } def probes[I, O, M](flow: Flow[I, O, M]): (TestPublisher.Probe[I], TestSubscriber.Probe[O]) = TestSource.probe[I].viaMat(flow)(Keep.left).toMat(TestSink.probe[O])(Keep.both).run() def durables[A](emitted: Seq[Emitted[A]], offset: Int = 0): Seq[Durable[A]] = emitted.zipWithIndex.map { case (e, i) => e.durable(i + offset) } }
Example 10
Source File: PubSubSourceIT.scala From akka-cloudpubsub with Apache License 2.0 | 5 votes |
package com.qubit.pubsub.akka import akka.NotUsed import akka.actor.ActorSystem import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import akka.stream.{ActorMaterializer, Attributes, Graph, SourceShape} import com.google.common.base.Charsets import com.qubit.pubsub.PubSubIntegrationTest import com.qubit.pubsub.akka.attributes.{ PubSubClientAttribute, PubSubStageBufferSizeAttribute } import com.qubit.pubsub.client.PubSubMessage import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} import scala.concurrent.Await import scala.concurrent.duration._ class PubSubSourceIT extends FunSuite with Matchers with BeforeAndAfterAll with PubSubIntegrationTest { implicit val actorSystem = ActorSystem("pubsub-stream-test") implicit val materializer = ActorMaterializer() override def testName = "pubsubsource" override def beforeAll(): Unit = { Await.ready(client.createTopic(testTopic), timeout) Await .ready(client.createSubscription(testSubscription, testTopic), timeout) } override def afterAll(): Unit = { actorSystem.terminate() Await.ready(client.deleteSubscription(testSubscription), timeout) Await.ready(client.deleteTopic(testTopic), timeout) } test("PubSubSource success") { val data = Range(0, 100) .map(i => s"msg$i".getBytes(Charsets.UTF_8)) .map(PubSubMessage(_)) Await.ready(client.publish(testTopic, data), timeout) val sourceGraph: Graph[SourceShape[PubSubMessage], NotUsed] = new PubSubSource(testSubscription, 1.millisecond) val sourceAttributes = Attributes( List(PubSubClientAttribute(client), PubSubStageBufferSizeAttribute(30))) val pubsubSource = Source.fromGraph(sourceGraph).withAttributes(sourceAttributes) val msgList = pubsubSource .runWith(TestSink.probe[PubSubMessage]) .request(100) .expectNextN(100) msgList should not be (null) msgList should have size (100) msgList .map(m => new String(m.payload, Charsets.UTF_8)) .forall(_.startsWith("msg")) should be(true) } }
Example 11
Source File: CassandraReadJournalSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query.javadsl import akka.persistence.cassandra.query.{ javadsl, scaladsl, TestActor } import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec } import akka.persistence.journal.{ Tagged, WriteEventAdapter } import akka.persistence.query.{ Offset, PersistenceQuery } import akka.stream.testkit.scaladsl.TestSink import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ object CassandraReadJournalSpec { val config = ConfigFactory.parseString(s""" akka.actor.serialize-messages=off akka.persistence.cassandra.query.max-buffer-size = 10 akka.persistence.cassandra.query.refresh-interval = 0.5s akka.persistence.cassandra.journal.event-adapters { test-tagger = akka.persistence.cassandra.query.javadsl.TestTagger } akka.persistence.cassandra.journal.event-adapter-bindings = { "java.lang.String" = test-tagger } """).withFallback(CassandraLifecycle.config) } class TestTagger extends WriteEventAdapter { override def manifest(event: Any): String = "" override def toJournal(event: Any): Any = event match { case s: String if s.startsWith("a") => Tagged(event, Set("a")) case _ => event } } class CassandraReadJournalSpec extends CassandraSpec(CassandraReadJournalSpec.config) { lazy val javaQueries = PersistenceQuery(system) .getReadJournalFor(classOf[javadsl.CassandraReadJournal], scaladsl.CassandraReadJournal.Identifier) "Cassandra Read Journal Java API" must { "start eventsByPersistenceId query" in { val a = system.actorOf(TestActor.props("a")) a ! "a-1" expectMsg("a-1-done") val src = javaQueries.eventsByPersistenceId("a", 0L, Long.MaxValue) src.asScala.map(_.persistenceId).runWith(TestSink.probe[Any]).request(10).expectNext("a").cancel() } "start current eventsByPersistenceId query" in { val a = system.actorOf(TestActor.props("b")) a ! "b-1" expectMsg("b-1-done") val src = javaQueries.currentEventsByPersistenceId("b", 0L, Long.MaxValue) src.asScala.map(_.persistenceId).runWith(TestSink.probe[Any]).request(10).expectNext("b").expectComplete() } "start eventsByTag query" in { val src = javaQueries.eventsByTag("a", Offset.noOffset) src.asScala .map(_.persistenceId) .runWith(TestSink.probe[Any]) .request(10) .expectNext("a") .expectNoMessage(100.millis) .cancel() } "start current eventsByTag query" in { val src = javaQueries.currentEventsByTag("a", Offset.noOffset) src.asScala.map(_.persistenceId).runWith(TestSink.probe[Any]).request(10).expectNext("a").expectComplete() } } }
Example 12
Source File: CassandraReadJournalSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query.scaladsl import akka.persistence.cassandra.query.TestActor import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec } import akka.persistence.journal.{ Tagged, WriteEventAdapter } import akka.persistence.query.NoOffset import akka.stream.alpakka.cassandra.CassandraMetricsRegistry import akka.stream.testkit.scaladsl.TestSink import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ object CassandraReadJournalSpec { val config = ConfigFactory.parseString(s""" akka.actor.serialize-messages=off akka.persistence.cassandra.query.max-buffer-size = 10 akka.persistence.cassandra.query.refresh-interval = 0.5s akka.persistence.cassandra.journal.event-adapters { test-tagger = akka.persistence.cassandra.query.scaladsl.TestTagger } akka.persistence.cassandra.journal.event-adapter-bindings = { "java.lang.String" = test-tagger } akka.persistence.cassandra.log-queries = off """).withFallback(CassandraLifecycle.config) } class TestTagger extends WriteEventAdapter { override def manifest(event: Any): String = "" override def toJournal(event: Any): Any = event match { case s: String if s.startsWith("a") => Tagged(event, Set("a")) case _ => event } } class CassandraReadJournalSpec extends CassandraSpec(CassandraReadJournalSpec.config) { "Cassandra Read Journal Scala API" must { "start eventsByPersistenceId query" in { val a = system.actorOf(TestActor.props("a")) a ! "a-1" expectMsg("a-1-done") val src = queries.eventsByPersistenceId("a", 0L, Long.MaxValue) src.map(_.persistenceId).runWith(TestSink.probe[Any]).request(10).expectNext("a").cancel() } "start current eventsByPersistenceId query" in { val a = system.actorOf(TestActor.props("b")) a ! "b-1" expectMsg("b-1-done") val src = queries.currentEventsByPersistenceId("b", 0L, Long.MaxValue) src.map(_.persistenceId).runWith(TestSink.probe[Any]).request(10).expectNext("b").expectComplete() } // these tests rely on events written in previous tests "start eventsByTag query" in { val src = queries.eventsByTag("a", NoOffset) src .map(_.persistenceId) .runWith(TestSink.probe[Any]) .request(10) .expectNext("a") .expectNoMessage(100.millis) .cancel() } "start current eventsByTag query" in { val src = queries.currentEventsByTag("a", NoOffset) src.map(_.persistenceId).runWith(TestSink.probe[Any]).request(10).expectNext("a").expectComplete() } "insert Cassandra metrics to Cassandra Metrics Registry" in { val registry = CassandraMetricsRegistry(system).getRegistry val snapshots = registry.getNames.toArray().filter(value => value.toString.startsWith("akka.persistence.cassandra")) snapshots.length should be > 0 } } }
Example 13
Source File: EventsByTagPubsubSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import java.time.{ LocalDate, ZoneOffset } import akka.cluster.Cluster import akka.persistence.cassandra.CassandraSpec import akka.persistence.cassandra.journal.JournalSettings import akka.persistence.query.{ EventEnvelope, NoOffset } import akka.stream.testkit.scaladsl.TestSink import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ object EventsByTagPubsubSpec { val today = LocalDate.now(ZoneOffset.UTC) val config = ConfigFactory.parseString(s""" akka.actor.provider = "akka.cluster.ClusterActorRefProvider" akka.actor.serialize-messages = off akka.actor.serialize-creators = off akka.remote.netty.tcp.port = 0 akka.remote.artery.canonical.port = 0 akka.remote.netty.tcp.hostname = "127.0.0.1" akka.persistence.cassandra { query.refresh-interval = 10s events-by-tag { pubsub-notification = on flush-interval = 0ms eventual-consistency-delay = 0s } } """).withFallback(EventsByTagSpec.config) } class EventsByTagPubsubSpec extends CassandraSpec(EventsByTagPubsubSpec.config) { val journalSettings = new JournalSettings(system, system.settings.config.getConfig("akka.persistence.cassandra")) override protected def beforeAll(): Unit = { super.beforeAll() Cluster(system).join(Cluster(system).selfAddress) } "Cassandra query getEventsByTag when running clustered with pubsub enabled" must { "present new events to an ongoing getEventsByTag stream long before polling would kick in" in { val actor = system.actorOf(TestActor.props("EventsByTagPubsubSpec_a")) val blackSrc = queries.eventsByTag(tag = "black", offset = NoOffset) val probe = blackSrc.runWith(TestSink.probe[Any]) probe.request(2) probe.expectNoMessage(300.millis) actor ! "a black car" probe.within(5.seconds) { // long before refresh-interval, which is 10s probe.expectNextPF { case e @ EventEnvelope(_, _, _, "a black car") => e } } } } }
Example 14
Source File: ProgressSourceSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.stream import akka.actor.ActorSystem import akka.pattern.AskTimeoutException import akka.stream.scaladsl._ import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import akka.stream._ import akka.testkit._ import com.rbmhtechnology.eventuate.ReplicationProtocol._ import com.rbmhtechnology.eventuate.VectorTime import com.typesafe.config.ConfigFactory import org.scalatest._ object ProgressSourceSpec { val SrcLogId = "A" val config = ConfigFactory.parseString("eventuate.log.read-timeout = 500ms") } class ProgressSourceSpec extends TestKit(ActorSystem("test", ProgressSourceSpec.config)) with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach { import ProgressSourceSpec._ implicit val materializer: Materializer = ActorMaterializer() private var log: TestProbe = _ private var snk: TestSubscriber.Probe[Long] = _ override def beforeEach(): Unit = { log = TestProbe() snk = Source.fromGraph(ProgressSource(SrcLogId, log.ref)).toMat(TestSink.probe[Long])(Keep.right).run() } override def afterAll(): Unit = { TestKit.shutdownActorSystem(system) super.afterAll() } "A ProgressSource" must { "complete after emitting a stored replication progress" in { snk.request(1) log.expectMsg(GetReplicationProgress(SrcLogId)) log.sender() ! GetReplicationProgressSuccess(SrcLogId, 17, VectorTime.Zero) snk.expectNext(17) snk.expectComplete() } "fail if replication progress reading fails" in { snk.request(1) log.expectMsg(GetReplicationProgress(SrcLogId)) log.sender() ! GetReplicationProgressFailure(TestException) snk.expectError(TestException) } "fail on replication progress reading timeout" in { snk.request(1) log.expectMsg(GetReplicationProgress(SrcLogId)) snk.expectError should be(an[AskTimeoutException]) } } }
Example 15
Source File: EventsByPersistenceIdFastForwardSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import java.util.UUID import akka.persistence.PersistentRepr import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec } import akka.stream.scaladsl.Keep import akka.stream.testkit.scaladsl.TestSink import com.typesafe.config.ConfigFactory import org.scalatest.time.{ Milliseconds, Seconds, Span } object EventsByPersistenceIdFastForwardSpec { // separate from EventsByPersistenceIdWithControlSpec since it needs the refreshing enabled val config = ConfigFactory.parseString(s""" akka.persistence.cassandra.journal.keyspace=EventsByPersistenceIdFastForwardSpec akka.persistence.cassandra.query.refresh-interval = 250ms akka.persistence.cassandra.query.max-result-size-query = 2 akka.persistence.cassandra.journal.target-partition-size = 15 """).withFallback(CassandraLifecycle.config) } class EventsByPersistenceIdFastForwardSpec extends CassandraSpec(EventsByPersistenceIdFastForwardSpec.config) with DirectWriting { override implicit val patience = PatienceConfig(timeout = Span(5, Seconds), interval = Span(100, Milliseconds)) "be able to fast forward when currently looking for missing sequence number" in { val w1 = UUID.randomUUID().toString val evt1 = PersistentRepr("e-1", 1L, "f", "", writerUuid = w1) writeTestEvent(evt1) val src = queries.eventsByPersistenceIdWithControl("f", 0L, Long.MaxValue) val (futureControl, probe) = src.map(_.event).toMat(TestSink.probe[Any])(Keep.both).run() val control = futureControl.futureValue probe.request(5) val evt3 = PersistentRepr("e-3", 3L, "f", "", writerUuid = w1) writeTestEvent(evt3) probe.expectNext("e-1") system.log.debug("Sleeping for query to go into look-for-missing-seqnr-mode") Thread.sleep(2000) // then we fast forward past the gap control.fastForward(3L) probe.expectNext("e-3") val evt2 = PersistentRepr("e-2", 2L, "f", "", writerUuid = w1) val evt4 = PersistentRepr("e-4", 4L, "f", "", writerUuid = w1) writeTestEvent(evt2) writeTestEvent(evt4) probe.expectNext("e-4") probe.cancel() } }
Example 16
Source File: EventsByTagStressSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra import akka.persistence.cassandra.query.TestActor import akka.persistence.cassandra.query._ import akka.persistence.journal.Tagged import akka.persistence.query.NoOffset import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import scala.collection.immutable import scala.concurrent.Future class EventsByTagStressSpec extends CassandraSpec(s""" akka.persistence.cassandra { events-by-tag { max-message-batch-size = 25 } } """) { implicit val ec = system.dispatcher val writers = 10 val readers = 20 val messages = 5000 "EventsByTag" must { "work under load" in { val pas = (0 until writers).map { i => system.actorOf(TestActor.props(s"pid$i")) } val eventsByTagQueries: immutable.Seq[(Int, TestSubscriber.Probe[(String, Int)])] = (0 until readers).map { i => val probe = queryJournal .eventsByTag("all", NoOffset) .map(i => { (i.persistenceId, i.event.asInstanceOf[Int]) }) .runWith(TestSink.probe) (i, probe) } system.log.info("Started events by tag queries") val writes: Future[Unit] = Future { system.log.info("Sending messages") (0 until messages).foreach { i => pas.foreach(ref => { ref ! Tagged(i, Set("all")) expectMsg(s"$i-done") }) } system.log.info("Sent messages") } writes.onComplete(result => system.log.info("{}", result)) system.log.info("Reading messages") var latestValues: Map[(Int, String), Int] = Map.empty.withDefault(_ => -1) (0 until messages).foreach { _ => (0 until writers).foreach { _ => eventsByTagQueries.foreach { case (probeNr, probe) => // should be in order per persistence id per probe val (pid, msg) = probe.requestNext() latestValues((probeNr, pid)) shouldEqual (msg - 1) latestValues += (probeNr, pid) -> msg } } } system.log.info("Received all messages {}", latestValues) } } }
Example 17
Source File: EventsByTagCrashSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra import akka.NotUsed import akka.persistence.cassandra.TestTaggingActor.{ Ack, Crash } import akka.persistence.query.{ EventEnvelope, NoOffset } import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import scala.concurrent.duration._ class EventsByTagCrashSpec extends CassandraSpec(EventsByTagRestartSpec.config) { val waitTime = 100.milliseconds "EventsByTag" must { "should handle crashes of the persistent actor" in { // crash the actor many times, persist 5 events each time val crashEvery = 5 val crashNr = 20 val msgs = crashEvery * crashNr val p2 = system.actorOf(TestTaggingActor.props("p2", Set("blue"))) (1 to msgs).foreach { cn => if (cn % crashEvery == 0) { p2 ! Crash } val msg = s"msg $cn" p2 ! msg expectMsg(Ack) } val blueTags: Source[EventEnvelope, NotUsed] = queryJournal.eventsByTag(tag = "blue", offset = NoOffset) val tagProbe = blueTags.runWith(TestSink.probe[EventEnvelope](system)) (1L to msgs).foreach { m => val expected = s"msg $m" tagProbe.request(1) tagProbe.expectNext().event shouldEqual expected } tagProbe.expectNoMessage(250.millis) tagProbe.cancel() } } }
Example 18
Source File: MapInitAndLastTests.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.util.streams.test import akka.stream._ import akka.stream.scaladsl.{GraphDSL, RunnableGraph, Source} import akka.stream.testkit.scaladsl.{TestSink, TestSource} import akka.stream.testkit.TestPublisher.{Probe => SrcProbe} import akka.stream.testkit.TestSubscriber.{Probe => SnkProbe} import cmwell.util.stream.MapInitAndLast import scala.concurrent.duration.DurationInt class MapInitAndLastTests extends StreamSpec { def generateGraph[In](): (SrcProbe[In],SnkProbe[(In,Boolean)]) = { val src = TestSource.probe[In] val snk = TestSink.probe[(In,Boolean)] RunnableGraph.fromGraph(GraphDSL.create(src, snk)((a, b) => (a, b)) { implicit b => { (s1, s2) => { import GraphDSL.Implicits._ val mial = b.add(new MapInitAndLast[In, (In,Boolean)](_ -> false, _ -> true)) s1 ~> mial ~> s2 ClosedShape } } }).run() } describe("MapInitAndLast Stage"){ it("should buffer a single element"){ val (src,snk) = generateGraph[Int]() snk.request(99) src.sendNext(1) snk.expectNoMessage(300.millis) src.sendComplete() snk.expectNext((1,true)) snk.expectComplete() } it("should treat last element differently") { val (src,snk) = generateGraph[Int]() snk.request(99) src.sendNext(1) snk.expectNoMessage(300.millis) src.sendNext(2) snk.expectNext((1,false)) src.sendNext(3) snk.expectNext((2,false)) src.sendComplete() snk.expectNext((3,true)) snk.expectComplete() } it("should propagate back-pressure"){ val (src,snk) = generateGraph[Int]() snk.ensureSubscription() src.sendNext(1) snk.expectNoMessage(300.millis) src.sendNext(1) snk.expectNoMessage(300.millis) src.sendComplete() snk.expectNoMessage(300.millis) snk.request(1) snk.expectNext((1,false)) snk.request(1) snk.expectNext((1,true)) snk.expectComplete() } } }
Example 19
Source File: GroupChunkerSpec.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.tools.data.utils.chunkers import akka.stream.scaladsl.Keep import akka.stream.testkit.scaladsl.{TestSink, TestSource} import akka.util.ByteString import cmwell.tools.data.helpers.BaseStreamSpec import scala.concurrent.duration._ class GroupSpecAutoFusingOn extends { val autoFusing = true } with GroupChunkerSpec class GroupSpecAutoFusingOff extends { val autoFusing = false } with GroupChunkerSpec trait GroupChunkerSpec extends BaseStreamSpec { "GroupChunker" should "emit elements when new group has arrived" in { val (pub, sub) = TestSource.probe[String] .map(x => ByteString(x.toString)) .via(GroupChunker(b => ByteString(b.size), 2.seconds)) // group byte-strings by size .map(_.map(_.utf8String)) .toMat(TestSink.probe[Seq[String]])(Keep.both) .run() sub.request(100) pub.sendNext("hello") pub.sendNext("world") pub.sendNext("nba") pub.sendNext("ibm") pub.sendNext("what") pub.sendNext("is") pub.sendNext("life") pub.sendComplete() sub.expectNext(Seq("hello", "world")) sub.expectNext(Seq("nba", "ibm")) sub.expectNext(Seq("what")) sub.expectNext(Seq("is")) sub.expectNext(Seq("life")) sub.expectComplete() } it should "emit elements when time threshold has reached" in { val (pub, sub) = TestSource.probe[String] .map(x => ByteString(x.toString)) .via(GroupChunker(b => ByteString(b.size), 2.seconds)) // group byte-strings by size .map(_.map(_.utf8String)) .toMat(TestSink.probe[Seq[String]])(Keep.both) .run() sub.request(4) pub.sendNext("one") sub.expectNext(Seq("one")) pub.sendNext("two") sub.expectNext(Seq("two")) pub.sendNext("four") pub.sendNext("five") pub.sendComplete() sub.expectNext(Seq("four","five")) sub.expectComplete() } }
Example 20
Source File: LagSim.scala From kafka-lag-exporter with Apache License 2.0 | 5 votes |
package com.lightbend.kafkalagexporter.integration import akka.actor.Cancellable import akka.actor.typed.scaladsl.Behaviors import akka.actor.typed.{Behavior, PostStop} import akka.kafka.{CommitterSettings, Subscriptions} import akka.kafka.scaladsl.{Committer, Consumer} import akka.kafka.testkit.scaladsl.KafkaSpec import akka.stream.OverflowStrategy import akka.stream.scaladsl.Keep import akka.stream.testkit.scaladsl.TestSink import org.scalatest.concurrent.ScalaFutures import scala.concurrent.Await import scala.concurrent.duration._ trait LagSim extends KafkaSpec with ScalaFutures { private implicit val patience: PatienceConfig = PatienceConfig(30.seconds, 1.second) class LagSimulator(topic: String, group: String) { private var offset: Int = 0 private val committerSettings = CommitterSettings(system).withMaxBatch(1).withParallelism(1) private lazy val (consumerControl, consumerProbe) = Consumer .committableSource(consumerDefaults.withGroupId(group), Subscriptions.topics(topic)) .buffer(size = 1, OverflowStrategy.backpressure) .map { elem => log.debug("Committing elem with offset: {}", elem.committableOffset.partitionOffset) elem.committableOffset.commitScaladsl() } .toMat(TestSink.probe)(Keep.both) .run() def produceElements(num: Int): Unit = { Await.result(produce(topic, offset to (offset + num)), remainingOrDefault) offset += num + 1 } // TODO: Replace this with regular Kafka Consumer for more fine-grained control over committing def consumeElements(num: Int): Unit = { consumerProbe .request(num) .expectNextN(num) } def shutdown(): Unit = { consumerControl.shutdown().futureValue consumerProbe.cancel() } } sealed trait Simulator case class Tick(produce: Int, consume: Int) extends Simulator def lagSimActor(simulator: LagSimulator, scheduledTick: Cancellable = Cancellable.alreadyCancelled): Behavior[Simulator] = Behaviors.receive[Simulator] { case (context, tick @ Tick(produce, consume)) => simulator.produceElements(produce) simulator.consumeElements(consume) lagSimActor(simulator, context.scheduleOnce(1 second, context.self, tick)) } receiveSignal { case (_, PostStop) => simulator.shutdown() scheduledTick.cancel() Behaviors.same } }
Example 21
Source File: AllPersistenceIdsSpec.scala From akka-persistence-redis with Apache License 2.0 | 5 votes |
package akka.persistence.query.journal.redis import scala.concurrent.duration._ import akka.persistence.query.PersistenceQuery import akka.persistence.query.scaladsl.PersistenceIdsQuery import akka.stream.ActorMaterializer import akka.stream.testkit.scaladsl.TestSink import akka.testkit.AkkaSpec import akka.testkit.ImplicitSender object AllPersistenceIdsSpec { val config = """ akka.loglevel = INFO akka.persistence.journal.plugin = "akka-persistence-redis.journal" akka.test.single-expect-default = 10s """ } class AllPersistenceIdsSpec extends AkkaSpec(AllPersistenceIdsSpec.config) with Cleanup with ImplicitSender { implicit val mat = ActorMaterializer()(system) val queries = PersistenceQuery(system).readJournalFor[ScalaReadJournal](RedisReadJournal.Identifier) "Redis query AllPersistenceIds" must { "implement standard AllPersistenceIdsQuery" in { queries.isInstanceOf[PersistenceIdsQuery] should ===(true) } "find existing persistenceIds" in { system.actorOf(TestActor.props("a")) ! "a1" expectMsg("a1-done") system.actorOf(TestActor.props("b")) ! "b1" expectMsg("b1-done") system.actorOf(TestActor.props("c")) ! "c1" expectMsg("c1-done") val src = queries.currentPersistenceIds() val probe = src.runWith(TestSink.probe[String]) probe.within(10.seconds) { probe.request(5) .expectNextUnordered("a", "b", "c") .expectComplete() } } "find new persistenceIds" in { // a, b, c created by previous step system.actorOf(TestActor.props("d")) ! "d1" expectMsg("d1-done") val src = queries.persistenceIds() val probe = src.runWith(TestSink.probe[String]) probe.within(10.seconds) { probe.request(5) .expectNextUnorderedN(List("a", "b", "c", "d")) system.actorOf(TestActor.props("e")) ! "e1" probe.expectNext("e") val more = (1 to 100).map("f" + _) more.foreach { p => system.actorOf(TestActor.props(p)) ! p } probe.request(100) probe.expectNextUnorderedN(more) } } } }
Example 22
Source File: UnicomplexActorPublisherSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.stream import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.Keep import akka.stream.testkit.scaladsl.{TestSink, TestSource} import akka.testkit.TestKit import com.typesafe.config.{Config, ConfigFactory} import org.scalatest._ import org.squbs.lifecycle.GracefulStop import org.squbs.unicomplex._ import scala.concurrent.duration._ object UnicomplexActorPublisherSpec { val myConfig: Config = ConfigFactory.parseString( """ | squbs.actorsystem-name = UnicomplexActorPublisherSpec """.stripMargin) val boot = UnicomplexBoot(myConfig).createUsing((name, config) => ActorSystem(name, config)) .scanResources("/") .initExtensions .start() } final class UnicomplexActorPublisherSpec extends TestKit(UnicomplexActorPublisherSpec.boot.actorSystem) with FlatSpecLike with Matchers with BeforeAndAfterAll { implicit val materializer = ActorMaterializer() val duration = 10.second val in = TestSource.probe[String] // expose probe port(s) val ((pubIn, pubTrigger), sub) = LifecycleManaged().source(in).toMat(TestSink.probe[String](system))(Keep.both).run() override def afterAll(): Unit = { Unicomplex(system).uniActor ! GracefulStop } "UnicomplexTrigger" should "activate flow by unicomplex" in { // send 2 elements to in pubIn.sendNext("1") pubIn.sendNext("2") sub.request(2) sub.expectNext(duration, "1") sub.expectNext("2") // re-send Active to unicomplex trigger, flow continues sub.request(2) sub.expectNoMessage(remainingOrDefault) pubTrigger ! SystemState pubIn.sendNext("3") pubIn.sendNext("4") sub.expectNext("3", "4") } }
Example 23
Source File: ActorFlowSpec.scala From akka-ddd-cqrs-es-example with MIT License | 5 votes |
package com.github.j5ik2o.bank.infrastrucuture.akka import akka.actor.{ ActorSystem, Props } import akka.stream.ActorMaterializer import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestKit import com.github.j5ik2o.bank.infrastrucuture.akka.dsl.ActorFlow import org.scalatest.FreeSpecLike class ActorFlowSpec extends TestKit(ActorSystem("ActorFlowSpec")) with FreeSpecLike { implicit val mat = ActorMaterializer() "ActorFlow" - { "should be able to send message via stream" in { val props = Props(FlowActor[String]({ case (subscriber, x) => subscriber ! x })) val flowActor = system.actorOf(props) val sinkProbe = Source.single("TEST").via(ActorFlow[String, String](flowActor)).runWith(TestSink.probe) sinkProbe.request(1).expectNext("TEST") } "should be able to error handling" in { val props = Props(FlowActor[String]({ case (_, x) => throw new Exception("message = " + x) })) val flowActor = system.actorOf(props) val sinkProbe = Source.single("TEST").via(ActorFlow[String, String](flowActor)).runWith(TestSink.probe) sinkProbe.request(1).expectError() } } }
Example 24
Source File: ActorSourceSpec.scala From akka-ddd-cqrs-es-example with MIT License | 5 votes |
package com.github.j5ik2o.bank.infrastrucuture.akka import akka.actor.{ ActorSystem, Props } import akka.stream.ActorMaterializer import akka.stream.scaladsl.Keep import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestKit import com.github.j5ik2o.bank.infrastrucuture.akka.dsl.ActorSource import org.scalatest.FreeSpecLike import org.scalatest.concurrent.ScalaFutures class ActorSourceSpec extends TestKit(ActorSystem("ActorSourceSpec")) with FreeSpecLike with ScalaFutures { implicit val mat = ActorMaterializer() "ActorSource" - { "should be able to send message via stream" in { val props = Props(SourceActor[String]({ case (subscriber, msg) => subscriber ! msg })) val (sourceRefFuture, sinkProbe) = ActorSource[String](props).toMat(TestSink.probe)(Keep.both).run() sourceRefFuture.futureValue ! "TEST" sinkProbe.request(1).expectNext("TEST") } "should be able to error handling" in { val props = Props(SourceActor[String]({ case (_, x) => throw new Exception(s"message = $x") })) val (sourceRefFuture, sinkProbe) = ActorSource[String](props).toMat(TestSink.probe)(Keep.both).run() sourceRefFuture.futureValue ! "TEST" sinkProbe.request(1).expectError() } } }
Example 25
Source File: ParsingStageSpec.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.akka.xml import akka.util.ByteString import org.scalatest.{FlatSpec, Matchers} import scala.concurrent.duration._ import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.scaladsl.TestSource class ParsingStageSpec extends FlatSpec { def createStream(instructions:Seq[XMLInstruction],validationMaxSize: Option[Int] = None) = { val as = ActorSystem("CompleteChunkSpec") val am = ActorMaterializer()(as) val source = TestSource.probe[ParsingData](as) val sink = TestSink.probe[(ByteString, Set[XMLElement])](as) val chunk = ParsingStage.parser(instructions) //source.via(chunk).alsoTo(Sink.foreach(a => println(">> " + a._1.decodeString("UTF-8") + " | " + a._2))).toMat(sink)(Keep.both).run()(am) source.via(chunk).toMat(sink)(Keep.both).run()(am) } def getEmptyResult(in:String):(ByteString, Set[XMLElement]) = { (ByteString(in), Set.empty[XMLElement]) } it should "Extract XMLInstruction from a xml broken into pieces (even xml tags are broken up)" in { val idHeader = XMLExtract(List("xml","header","id")) val aaHeader = XMLExtract(List("xml","header","aa")) //This is our entire test xml: <xml><header><id>Joska</id><aa>Pista</aa><bb>Miska</bb><cc/><dd/></header></xml> //The test xml val (pub,sub) = createStream(Seq(idHeader,aaHeader)) sub.request(10) pub.sendNext(ParsingData(ByteString("<xml><hea"),Set.empty,5)) sub.expectNext(getEmptyResult("<xml><hea")) pub.sendNext(ParsingData(ByteString("der><id>Jo"),Set.empty,19)) sub.expectNext(getEmptyResult("der><id>Jo")) pub.sendNext(ParsingData(ByteString("ska</i"),Set.empty,26)) sub.expectNext(getEmptyResult("ska</i")) pub.sendNext(ParsingData(ByteString("d><aa>Pista</a"),Set.empty,32)) sub.expectNext((ByteString("d><aa>Pista</a"),Set(XMLElement(List("xml", "header", "id"),Map(),Some("Joska"))))) pub.sendNext(ParsingData(ByteString("a><bb>Mis"),Set.empty,38)) sub.expectNext((ByteString("a><bb>Mis"),Set(XMLElement(List("xml", "header", "aa"),Map(),Some("Pista"))))) pub.sendNext(ParsingData(ByteString("ka</bb><cc/"),Set.empty,50)) sub.expectNext(getEmptyResult("ka</bb><cc/")) pub.sendNext(ParsingData(ByteString("><dd"),Set.empty,57)) sub.expectNext(getEmptyResult("><dd")) pub.sendNext(ParsingData(ByteString("/></header></xml>"),Set.empty,57)) sub.expectNext(getEmptyResult("/></header></xml>")) pub.sendComplete() } }
Example 26
Source File: CompleteChunkSpec.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.akka.xml import akka.util.ByteString import org.scalatest.{FlatSpec, Matchers} import scala.concurrent.duration._ import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.scaladsl.TestSource class CompleteChunkSpec extends FlatSpec { def createStream() = { val as = ActorSystem("CompleteChunkSpec") val am = ActorMaterializer()(as) val source = TestSource.probe[ByteString](as) val sink = TestSink.probe[ParsingData](as) val chunk = CompleteChunkStage.parser() //source.map(a => {println("<< " + a.decodeString("UTF-8"));a}).via(chunk).alsoTo(Sink.foreach(a => println(">> " + a))).toMat(sink)(Keep.both).run()(am) //Use for debugging source.via(chunk).toMat(sink)(Keep.both).run()(am) } it should "only let whole xml tags through" in { //This is our entire test xml: <xml><header><id>Joska</id><aa>Pista</aa><bb>Miska</bb></header></xml> val (pub,sub) = createStream() sub.request(20) pub.sendNext(ByteString("<xml><hea")) sub.expectNext(ParsingData(ByteString("<xml>"), Set.empty, 5)) pub.sendNext(ByteString("der><id>Jo")) sub.expectNext(ParsingData(ByteString("<header><id>Jo"), Set.empty, 19)) pub.sendNext(ByteString("ska</i")) sub.expectNext(ParsingData(ByteString("ska"), Set.empty, 22)) pub.sendNext(ByteString("d><aa>Pista</a")) sub.expectNext(ParsingData(ByteString("</id><aa>Pista"), Set(), 36)) pub.sendNext(ByteString("a><bb>Mis")) sub.expectNext(ParsingData(ByteString("</aa><bb>Mis"), Set(), 48)) pub.sendNext(ByteString("ka</bb></he")) sub.expectNext(ParsingData(ByteString("ka</bb>"), Set(), 55)) pub.sendNext(ByteString("ader></xml>")) sub.expectNext(ParsingData(ByteString("</header></xml>"), Set(), 70)) pub.sendComplete() sub.expectNext(ParsingData(ByteString.empty, Set(XMLElement(List(),Map("Stream Size" -> "70"), Some("Stream Size"))), 70)) sub.expectComplete() } }
Example 27
Source File: TakeStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class TakeStageTest extends TestSpec { "Take" should "emit only 'n' number of elements and then complete" in { withIterator() { src ⇒ src.take(3) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 1, 2) .expectComplete() } } }
Example 28
Source File: SignalDispatcherTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.akkastreams.dispatcher import java.lang import akka.stream.scaladsl.Sink import akka.stream.testkit.scaladsl.TestSink import com.daml.ledger.api.testing.utils.AkkaBeforeAndAfterAll import org.awaitility.Awaitility.await import org.awaitility.Duration import org.scalatest.concurrent.{AsyncTimeLimitedTests, ScaledTimeSpans} import org.scalatest.time.Span import org.scalatest.time.SpanSugar._ import org.scalatest.{FutureOutcome, Matchers, fixture} class SignalDispatcherTest extends fixture.AsyncWordSpec with Matchers with AkkaBeforeAndAfterAll with ScaledTimeSpans with AsyncTimeLimitedTests { "SignalDispatcher" should { "send a signal on subscription if requested" in { sut => sut.subscribe(true).runWith(Sink.head).map(_ => succeed) } "not send a signal on subscription if not requested" in { sut => val s = sut.subscribe(false).runWith(TestSink.probe[SignalDispatcher.Signal]) s.request(1L) s.expectNoMessage(1.second) succeed } "output a signal when it arrives" in { sut => val result = sut.subscribe(false).runWith(Sink.head).map(_ => succeed) sut.signal() result } "output multiple signals when they arrive" in { sut => val count = 10 val result = sut.subscribe(false).take(count.toLong).runWith(Sink.seq).map(_ => succeed) 1.to(count).foreach(_ => sut.signal) result } "remove queues from its state when the stream terminates behind them" in { sut => val s = sut.subscribe(true).runWith(TestSink.probe[SignalDispatcher.Signal]) s.request(1L) s.expectNext(SignalDispatcher.Signal) sut.getRunningState should have size 1L s.cancel() await("Cancellation handling") .atMost(Duration.TEN_SECONDS) .until(() => new lang.Boolean(sut.getRunningState.isEmpty)) sut.getRunningState shouldBe empty } "remove queues from its state when closed" in { sut => val s = sut.subscribe(true).runWith(TestSink.probe[SignalDispatcher.Signal]) s.request(1L) s.expectNext(SignalDispatcher.Signal) sut.getRunningState should have size 1L sut.close() assertThrows[IllegalStateException](sut.getRunningState) assertThrows[IllegalStateException](sut.signal()) s.expectComplete() succeed } } override def withFixture(test: OneArgAsyncTest): FutureOutcome = test.apply(SignalDispatcher()) override type FixtureParam = SignalDispatcher override def timeLimit: Span = scaled(10.seconds) }
Example 29
Source File: AkkaStreamUtils.scala From akka-serialization-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import akka.NotUsed import akka.stream.scaladsl.Source import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import scala.collection.immutable.Seq import scala.concurrent.duration._ trait AkkaStreamUtils { _: TestSpec ⇒ implicit class SourceOps[A, M](src: Source[A, M]) { def testProbe(f: TestSubscriber.Probe[A] ⇒ Unit): Unit = { val tp = src.runWith(TestSink.probe(system)) tp.within(10.seconds)(f(tp)) } } def withIteratorSrc[T](start: Int = 0)(f: Source[Int, NotUsed] ⇒ Unit): Unit = f(Source.fromIterator(() ⇒ Iterator from start)) def fromCollectionProbe[A](xs: Seq[A])(f: TestSubscriber.Probe[A] ⇒ Unit): Unit = f(Source(xs).runWith(TestSink.probe(system))) }
Example 30
Source File: TestSpec.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams import akka.NotUsed import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.event.{ Logging, LoggingAdapter } import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestProbe import akka.util.Timeout import com.github.dnvriend.streams.util.ClasspathResources import org.scalatest._ import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatestplus.play.guice.GuiceOneServerPerSuite import play.api.inject.BindingKey import play.api.libs.json.{ Format, Json } import play.api.test.WsTestClient import scala.collection.immutable._ import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.reflect.ClassTag import scala.util.Try object Person { implicit val format: Format[Person] = Json.format[Person] } final case class Person(firstName: String, age: Int) class TestSpec extends FlatSpec with Matchers with GivenWhenThen with OptionValues with TryValues with ScalaFutures with WsTestClient with BeforeAndAfterAll with BeforeAndAfterEach with Eventually with ClasspathResources with GuiceOneServerPerSuite { def getComponent[A: ClassTag] = app.injector.instanceOf[A] def getNamedComponent[A](name: String)(implicit ct: ClassTag[A]): A = app.injector.instanceOf[A](BindingKey(ct.runtimeClass.asInstanceOf[Class[A]]).qualifiedWith(name)) // set the port number of the HTTP server override lazy val port: Int = 8081 implicit val timeout: Timeout = 1.second implicit val pc: PatienceConfig = PatienceConfig(timeout = 30.seconds, interval = 300.millis) implicit val system: ActorSystem = getComponent[ActorSystem] implicit val ec: ExecutionContext = getComponent[ExecutionContext] implicit val mat: Materializer = getComponent[Materializer] val log: LoggingAdapter = Logging(system, this.getClass) // ================================== Supporting Operations ==================================== def id: String = java.util.UUID.randomUUID().toString implicit class FutureToTry[T](f: Future[T]) { def toTry: Try[T] = Try(f.futureValue) } implicit class SourceOps[A](src: Source[A, NotUsed]) { def testProbe(f: TestSubscriber.Probe[A] ⇒ Unit): Unit = f(src.runWith(TestSink.probe(system))) } def withIterator[T](start: Int = 0)(f: Source[Int, NotUsed] ⇒ T): T = f(Source.fromIterator(() ⇒ Iterator from start)) def fromCollection[A](xs: Iterable[A])(f: TestSubscriber.Probe[A] ⇒ Unit): Unit = f(Source(xs).runWith(TestSink.probe(system))) def killActors(refs: ActorRef*): Unit = { val tp = TestProbe() refs.foreach { ref ⇒ tp watch ref tp.send(ref, PoisonPill) tp.expectTerminated(ref) } } }
Example 31
Source File: FailureTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.failure import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec import scala.concurrent.Future class FailureTest extends TestSpec { def failedFuture: (Any) => Future[Nothing] = (_: Any) ⇒ Future.failed(new Throwable("Failure")) it should "fail a stream with a Future.failed" in { withIterator() { src ⇒ src.take(5) .mapAsync(1)(failedFuture) .runWith(TestSink.probe[Seq[Int]]) .request(Integer.MAX_VALUE) .expectError() } } it should "fail the resulting future with a Future.failed" in { withIterator() { src ⇒ src.take(5).mapAsync(1)(failedFuture).runForeach(_ ⇒ ()).toTry should be a 'failure } } def throwException = (_: Any) ⇒ throw new RuntimeException("Failure") it should "fail a stream when throwing an Exception" in { withIterator() { src ⇒ src.take(5) .map(throwException) .runWith(TestSink.probe[Seq[Int]]) .request(Integer.MAX_VALUE) .expectError() } } it should "fail the resulting future when throwing an Exception" in { withIterator() { src ⇒ src.take(5).mapAsync(1)(throwException).runForeach(_ ⇒ ()).toTry should be a 'failure } } }
Example 32
Source File: Ex2CustomMapTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.customstage import akka.stream.{ Outlet, Inlet, Attributes, FlowShape } import akka.stream.stage._ import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class Ex2CustomMapTest extends TestSpec { "CustomMapStage" should "be implemented with a PushPullStage" in { withIterator(1) { src ⇒ src.transform(() ⇒ new CustomMapStage(_ * 2)) .take(2) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectNext(2, 4) .expectComplete() } } it should "also be implemented as a GraphStage" in { class CustomMapStage[A, B](f: A ⇒ B) extends GraphStage[FlowShape[A, B]] { val in = Inlet[A]("Map.in") val out = Outlet[B]("Map.out") override def shape: FlowShape[A, B] = FlowShape.of(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { setHandler(in, new InHandler { override def onPush(): Unit = push(out, f(grab(in))) }) setHandler(out, new OutHandler { override def onPull(): Unit = pull(in) }) } } withIterator(1) { src ⇒ src.via(new CustomMapStage(_ * 2)) .take(2) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectNext(2, 4) .expectComplete() } } }
Example 33
Source File: Ex4StatefulStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.customstage import akka.stream.stage._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.{ Attributes, FlowShape, Inlet, Outlet } import com.github.dnvriend.streams.TestSpec class Ex4StatefulStageTest extends TestSpec { withIterator(1) { src ⇒ src.take(20) .transform(() ⇒ new SumEvenAndUnevenNumbersCollector(_ % 10 == 0)) // emit every 10 elements .runWith(TestSink.probe[(Int, Int)]) .request(Int.MaxValue) .expectNext((20, 25), (60, 75)) .expectComplete() } } it should "be implemented as a GraphShape" in { // as the StatefulStage will be deprecated, let's look at how to handle state in a GraphShape class CustomDuplicatorStage[A]() extends GraphStage[FlowShape[A, A]] { val in = Inlet[A]("Duplicator.in") val out = Outlet[A]("Duplicator.out") override def shape: FlowShape[A, A] = FlowShape.of(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { // note: all mutable state must be inside the GraphStageLogic var lastElem: Option[A] = None setHandler(in, new InHandler { override def onPush(): Unit = { val elem = grab(in) lastElem = Some(elem) push(out, elem) } override def onUpstreamFinish(): Unit = { if (lastElem.isDefined) emit(out, lastElem.get) complete(out) } }) setHandler(out, new OutHandler { override def onPull(): Unit = { if (lastElem.isDefined) { push(out, lastElem.get) lastElem = None } else { pull(in) } } }) } } withIterator(1) { src ⇒ src.take(2) .via(new CustomDuplicatorStage) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectNext(1, 1, 2, 2) .expectComplete() } } }
Example 34
Source File: Ex3CustomFilterTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.customstage import akka.stream.stage._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.{ Attributes, FlowShape, Inlet, Outlet } import com.github.dnvriend.streams.TestSpec class Ex3CustomFilterTest extends TestSpec { "CustomFilterStage" should "be implemented with a PushPullStage" in { withIterator(1) { src ⇒ src.transform(() ⇒ new CustomFilterStage(_ % 2 == 0)) .take(5) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectNext(2, 4, 6, 8, 10) .expectComplete() } } it should "also be implemented as a GraphStage" in { class CustomFilterStage[A](p: A ⇒ Boolean) extends GraphStage[FlowShape[A, A]] { val in = Inlet[A]("Filter.in") val out = Outlet[A]("Filter.out") override def shape = FlowShape.of(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { setHandler(in, new InHandler { override def onPush(): Unit = { val elem: A = grab(in) if (p(elem)) push(out, elem) else pull(in) } }) setHandler(out, new OutHandler { override def onPull(): Unit = pull(in) }) } } withIterator(1) { src ⇒ src.via(new CustomFilterStage(_ % 2 == 0)) .take(5) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectNext(2, 4, 6, 8, 10) .expectComplete() } } }
Example 35
Source File: TakeWhileStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class TakeWhileStageTest extends TestSpec { "TakeWhile" should "emit elements while the predicate is true, and completes when the predicate is false" in { withIterator() { src ⇒ src.takeWhile(_ < 5) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 1, 2, 3, 4) .expectComplete() } } }
Example 36
Source File: FilterStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class FilterStageTest extends TestSpec { "Filter a sequence of numbers for even numbers" should "emit only even numbers" in { withIterator() { src ⇒ src.take(10) .filter(_ % 2 == 0) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 2, 4, 6, 8) .expectComplete() } } }
Example 37
Source File: FoldStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class FoldStageTest extends TestSpec { "Fold" should "emit only an element when the upstream completes" in { withIterator() { src ⇒ src.take(4) .fold(0) { (c, _) ⇒ c + 1 } .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(4) .expectComplete() } } }
Example 38
Source File: DropWhileStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class DropWhileStageTest extends TestSpec { "DropWhile" should "discard elements while the predicate is true, else it emits elements" in { withIterator() { src ⇒ src.take(10) .dropWhile(_ < 5) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(5, 6, 7, 8, 9) .expectComplete() } } }
Example 39
Source File: CollectStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class CollectStageTest extends TestSpec { it should "emit only elements on which the partial function is defined" in { withIterator() { src ⇒ src.take(10) .collect { case e if e < 5 ⇒ e } .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 1, 2, 3, 4) .expectComplete() } } it should "transform the stream by applying the partial function" in { withIterator() { src ⇒ src.take(10) .collect { case e if e < 5 ⇒ e.toString case e if e >= 5 && e < 8 ⇒ (e * 2).toString } .runWith(TestSink.probe[String]) .request(Integer.MAX_VALUE) .expectNext("0", "1", "2", "3", "4", "10", "12", "14") .expectComplete() } } it should "transform the stream by applying the partial function for each element" in { withIterator() { src ⇒ src.take(10) .collect { case e if e < 5 ⇒ e.toString case e if e >= 5 && e < 8 ⇒ (e * 2).toString case _ ⇒ "UNKNOWN" } .runWith(TestSink.probe[String]) .request(Integer.MAX_VALUE) .expectNext("0", "1", "2", "3", "4", "10", "12", "14", "UNKNOWN", "UNKNOWN") .expectComplete() } } }
Example 40
Source File: TrackerImplTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.apiserver.services.tracking import akka.NotUsed import akka.stream.OverflowStrategy import akka.stream.scaladsl.{Keep, Source, SourceQueueWithComplete} import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import com.daml.ledger.api.testing.utils.{ AkkaBeforeAndAfterAll, IsStatusException, TestingException } import com.daml.ledger.api.v1.command_service.SubmitAndWaitRequest import com.daml.ledger.api.v1.commands.Commands import com.daml.ledger.api.v1.completion.Completion import com.daml.dec.DirectExecutionContext import com.google.rpc.status.{Status => RpcStatus} import io.grpc.Status import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterEach, Matchers, Succeeded, WordSpec} import scala.concurrent.ExecutionContext.Implicits.global class TrackerImplTest extends WordSpec with Matchers with BeforeAndAfterEach with ScalaFutures with AkkaBeforeAndAfterAll { private var sut: Tracker = _ private var consumer: TestSubscriber.Probe[NotUsed] = _ private var queue: SourceQueueWithComplete[TrackerImpl.QueueInput] = _ private def input(cid: Int) = SubmitAndWaitRequest(Some(Commands(commandId = cid.toString))) override protected def beforeEach(): Unit = { val (q, sink) = Source .queue[TrackerImpl.QueueInput](1, OverflowStrategy.dropNew) .map { in => in.context.success(Completion(in.value.getCommands.commandId, Some(RpcStatus()))) NotUsed } .toMat(TestSink.probe[NotUsed])(Keep.both) .run() queue = q sut = new TrackerImpl(q) consumer = sink } override protected def afterEach(): Unit = { consumer.cancel() queue.complete() } "Tracker Implementation" when { "input is submitted, and the queue is available" should { "work successfully" in { val resultF1 = sut.track(input(1)) consumer.requestNext() val resultF = resultF1.flatMap(_ => sut.track(input(2)))(DirectExecutionContext) consumer.requestNext() whenReady(resultF)(_ => Succeeded) } } "input is submitted, and the queue is backpressuring" should { "return a RESOURCE_EXHAUSTED error" in { sut.track(input(1)) whenReady(sut.track(input(2)).failed)(IsStatusException(Status.RESOURCE_EXHAUSTED)) } } "input is submitted, and the queue has been completed" should { "return an ABORTED error" in { queue.complete() whenReady(sut.track(input(2)).failed)(IsStatusException(Status.ABORTED)) } } "input is submitted, and the queue has failed" should { "return an ABORTED error" in { queue.fail(TestingException("The queue fails with this error.")) whenReady(sut.track(input(2)).failed)(IsStatusException(Status.ABORTED)) } } } }
Example 41
Source File: ScanStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class ScanStageTest extends TestSpec { "Scan" should "do the same as fold, but emits the next current value to the stream" in { withIterator() { src ⇒ src.take(4) .scan(0) { (c, _) ⇒ c + 1 } .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 1, 2, 3, 4) .expectComplete() } } }
Example 42
Source File: MapAsyncStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec import scala.concurrent.Future class MapAsyncStageTest extends TestSpec { it should "transform the stream by applying the function to each element" in { withIterator() { src ⇒ src.take(3) .mapAsync(1)(e ⇒ Future.successful(e * 2)) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 2, 4) .expectComplete() } } it should "emit an Error when the Future completes with a failure" in { withIterator() { src ⇒ src.take(3) .mapAsync(1)(_ ⇒ Future.failed(new RuntimeException(""))) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectError() } } }
Example 43
Source File: RecoverStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class RecoverStageTest extends TestSpec { "Recover" should "emits / forward received elements for non-error messages / normal operation" in { withIterator() { src ⇒ src.take(3) .recover { case e: RuntimeException ⇒ 1000 } .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 1, 2) .expectComplete() } } it should "emit 1000 when the stream fails thus recover the last element, afterwards the stream completes" in { withIterator() { src ⇒ src.take(3) .collect { case 1 ⇒ throw new RuntimeException("Forced exception") case e ⇒ e } .recover { case e: RuntimeException ⇒ 1000 } .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNextUnordered(0, 1000) .expectComplete() } } }
Example 44
Source File: MapStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class MapStageTest extends TestSpec { it should "transform the stream by applying the function to each element" in { withIterator() { src ⇒ src.take(3) .map(_ * 2) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 2, 4) .expectComplete() } } it should "emit an Error when the map throws an Exception" in { withIterator() { src ⇒ src.take(3) .map(_ ⇒ throw new RuntimeException("")) .runWith(TestSink.probe[Int]) .request(Int.MaxValue) .expectError() } } }
Example 45
Source File: DropStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class DropStageTest extends TestSpec { "Drop" should "discard the given number of elements at the beginning of the stream" in { withIterator() { src ⇒ src.take(10) .drop(5) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(5, 6, 7, 8, 9) .expectComplete() } } }
Example 46
Source File: MapConcatTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class MapConcatTest extends TestSpec { "MapConcat" should "transform each input element into an 'iterable' of output elements that is then flattened into the output stream" in { withIterator() { src ⇒ src.take(3) .mapConcat(e ⇒ List(e, e, e)) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 0, 0, 1, 1, 1, 2, 2, 2) .expectComplete() } } it should "flatten two lists" in { withIterator() { src ⇒ src.take(5) .grouped(3) .mapConcat(identity) .runWith(TestSink.probe[Int]) .request(Integer.MAX_VALUE) .expectNext(0, 1, 2, 3, 4) .expectComplete() } } it should "flatten two sequences" in { withIterator() { src ⇒ src.take(10) .splitWhen(_ < 3) .concatSubstreams .runForeach(println) } } }
Example 47
Source File: GroupedStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.simple import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec class GroupedStageTest extends TestSpec { "Grouping a stream of numbers in sequences of three" should "result in two sequences" in { withIterator() { src ⇒ src.take(5) .grouped(3) .runWith(TestSink.probe[Seq[Int]]) .request(Integer.MAX_VALUE) .expectNext(List(0, 1, 2), List(3, 4)) .expectComplete() } } }
Example 48
Source File: TakeWithinStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.timer import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec import scala.concurrent.duration._ class TakeWithinStageTest extends TestSpec { "TakeWithin" should "take elements in the duration window, when the window has passed, the stream completes" in { withIterator() { src ⇒ src.takeWithin(500.millis) .map { e ⇒ Thread.sleep(200); e } .runWith(TestSink.probe[Int]) .request(5) .expectNext(0, 1, 2, 3, 4) .expectComplete() } } }
Example 49
Source File: MapAsyncStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.async import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec import scala.concurrent.Future class MapAsyncStageTest extends TestSpec { "MapAsync" should "transform the stream by applying the function to each element" in { withIterator() { src ⇒ src.take(3) .mapAsync(2)(num ⇒ Future(num * 2)) .runWith(TestSink.probe[Int]) .request(4) .expectNext(0, 2, 4) .expectComplete() } } }
Example 50
Source File: MapAsyncUnorderedStageTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.stage.async import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import com.github.dnvriend.streams.TestSpec import scala.concurrent.Future class MapAsyncUnorderedStageTest extends TestSpec { "MapAsyncUnordered" should "transform the stream by applying the function to each element" in { withIterator() { src ⇒ src.take(10) .mapAsyncUnordered(4)(num ⇒ Future(num * 2)) .runWith(TestSink.probe[Int]) .request(11) .expectNextUnordered(0, 2, 4, 6, 8, 10, 12, 14, 16, 18) .expectComplete() } } }
Example 51
Source File: BatchWriteStageSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.stream import akka.actor._ import akka.stream._ import akka.stream.scaladsl.Keep import akka.stream.testkit._ import akka.stream.testkit.scaladsl.{ TestSink, TestSource } import akka.pattern import akka.testkit._ import com.rbmhtechnology.eventuate.DurableEvent import org.scalatest._ import scala.collection.immutable.Seq import scala.concurrent._ import scala.concurrent.duration._ import scala.util.Random class BatchWriteStageSpec extends TestKit(ActorSystem("test")) with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach { import BatchWriteStage.BatchWriter private val settings: DurableEventWriterSettings = new DurableEventWriterSettings(system.settings.config) implicit val materializer: Materializer = ActorMaterializer() private var src: TestPublisher.Probe[Seq[DurableEvent]] = _ private var snk: TestSubscriber.Probe[Seq[DurableEvent]] = _ override def beforeEach(): Unit = { val probes = TestSource.probe[Seq[DurableEvent]] .via(new BatchWriteStage(ec => writer(ec))) .toMat(TestSink.probe[Seq[DurableEvent]])(Keep.both) .run() src = probes._1 snk = probes._2 } override def afterEach(): Unit = { snk.cancel() } override def afterAll(): Unit = { TestKit.shutdownActorSystem(system) super.afterAll() } private def random: Int = Random.nextInt(100) private def writer(implicit ec: ExecutionContext): BatchWriter = events => if (events.exists(_.payload == "boom")) Future(throw TestException) else pattern.after(random.millis, system.scheduler)(Future(events)) "A BatchWriterStage" must { "write batches sequentially" in { val b1 = Seq("a", "b", "c").map(DurableEvent(_)) val b2 = Seq("d", "e", "f").map(DurableEvent(_)) val b3 = Seq("g", "h", "i").map(DurableEvent(_)) snk.request(3) src.sendNext(b1) src.sendNext(b2) src.sendNext(b3) snk.expectNext() should be(b1) snk.expectNext() should be(b2) snk.expectNext() should be(b3) } "fail if the batch writer fails" in { val b = Seq("a", "boom", "c").map(DurableEvent(_)) snk.request(3) src.sendNext(b) snk.expectError(TestException) } } }