akka.testkit.TestProbe Scala Examples
The following examples show how to use akka.testkit.TestProbe.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: CurrentPersistenceIdsQuerySourceTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.sstreaming import java.util.UUID import java.util.concurrent.atomic.AtomicLong import akka.actor.{ ActorRef, Props } import akka.persistence.PersistentActor import akka.testkit.TestProbe import com.github.dnvriend.TestSpec import com.github.dnvriend.spark.datasources.SparkImplicits._ import com.github.dnvriend.spark.datasources.person.Person import org.apache.spark.sql.streaming.{ OutputMode, ProcessingTime } import org.scalatest.Ignore import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.language.implicitConversions object PersonActor { final case class BlogPost(id: Long, text: String) } class PersonActor(val persistenceId: String, schedule: Boolean)(implicit ec: ExecutionContext) extends PersistentActor { val counter = new AtomicLong() def ping() = context.system.scheduler.scheduleOnce(200.millis, self, "persist") def randomId: String = UUID.randomUUID.toString override val receiveRecover: Receive = PartialFunction.empty override val receiveCommand: Receive = { case "persist" => persist(Person(counter.incrementAndGet(), s"foo-$randomId", 20)) { _ => sender() ! "ack" } if (schedule) ping() } if (schedule) ping() } @Ignore class CurrentPersistenceIdsQuerySourceTest extends TestSpec { def withPersistentActor(pid: String = randomId, schedule: Boolean = false)(f: ActorRef => TestProbe => Unit): Unit = { val tp = TestProbe() val ref = system.actorOf(Props(new PersonActor(pid, schedule))) try f(ref)(tp) finally killActors(ref) } it should "query read journal" in withSparkSession { spark => withPersistentActor() { ref => tp => tp.send(ref, "persist") tp.expectMsg("ack") val jdbcReadJournal = spark.readStream .currentPersistenceIds("jdbc-read-journal") jdbcReadJournal.printSchema() println("Is the query streaming: " + jdbcReadJournal.isStreaming) println("Are there any streaming queries? " + spark.streams.active.isEmpty) val query = jdbcReadJournal .writeStream .format("console") .trigger(ProcessingTime(1.seconds)) .queryName("consoleStream") .outputMode(OutputMode.Append()) .start() query.awaitTermination(10.seconds) } } }
Example 2
Source File: MinMaxActorSpec.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.{Actor, ActorSystem, Props} import akka.testkit.{TestProbe, ImplicitSender, TestActorRef, TestKit} import akka.util.Timeout import io.coral.actors.CoralActorFactory import io.coral.api.DefaultModule import org.json4s.JsonDSL._ import org.json4s._ import org.json4s.jackson.JsonMethods._ import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.duration._ @RunWith(classOf[JUnitRunner]) class MinMaxActorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll { implicit val timeout = Timeout(100.millis) implicit val formats = org.json4s.DefaultFormats implicit val injector = new DefaultModule(system.settings.config) def this() = this(ActorSystem("ZscoreActorSpec")) override def afterAll() { TestKit.shutdownActorSystem(system) } "A MinMaxActor" must { val createJson = parse( """{ "type": "minmax", "params": { "field": "field1", "min": 10.0, "max": 13.5 }}""" .stripMargin).asInstanceOf[JObject] implicit val injector = new DefaultModule(system.settings.config) val props = CoralActorFactory.getProps(createJson).get val threshold = TestActorRef[MinMaxActor](props) // subscribe the testprobe for emitting val probe = TestProbe() threshold.underlyingActor.emitTargets += probe.ref "Emit the minimum when lower than the min" in { val json = parse( """{"field1": 7 }""").asInstanceOf[JObject] threshold ! json probe.expectMsg(parse( """{ "field1": 10.0 }""")) } "Emit the maximum when higher than the max" in { val json = parse( """{"field1": 15.3 }""").asInstanceOf[JObject] threshold ! json probe.expectMsg(parse( """{"field1": 13.5 }""")) } "Emit the value itself when between the min and the max" in { val json = parse( """{"field1": 11.7 }""").asInstanceOf[JObject] threshold ! json probe.expectMsg(parse( """{"field1": 11.7 }""")) } "Emit object unchanged when key is not present in triggering json" in { val json = parse( """{"otherfield": 15.3 }""").asInstanceOf[JObject] threshold ! json probe.expectMsg(parse( """{"otherfield": 15.3 }""")) } } }
Example 3
Source File: LinearRegressionActorSpec.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.{ActorRef, ActorSystem} import akka.testkit.{TestProbe, TestActorRef, ImplicitSender, TestKit} import io.coral.actors.CoralActorFactory import io.coral.api.DefaultModule import org.json4s._ import org.json4s.jackson.JsonMethods._ import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import akka.util.Timeout import org.json4s.native.Serialization.write import scala.concurrent.duration._ class LinearRegressionActorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll { def this() = this(ActorSystem("LinearRegressionActorSpec")) implicit val timeout = Timeout(100.millis) implicit val injector = new DefaultModule(system.settings.config) override def afterAll() { TestKit.shutdownActorSystem(system) } def createLinearRegressionActor(intercept: Double, weights: Map[String, Double]) = { implicit val formats = DefaultFormats val str = s"""{ "type":"linearregression", |"params": { "intercept": $intercept, |"weights": ${write(weights)} |}}""".stripMargin val createJson = parse(str).asInstanceOf[JObject] val props = CoralActorFactory.getProps(createJson).get val actorTestRef = TestActorRef[LinearRegressionActor](props) val probe = TestProbe() actorTestRef.underlyingActor.emitTargets += probe.ref (actorTestRef, probe) } "LinearRegressionActor" should { "Instantiate from companion object" in { val (actor, _) = createLinearRegressionActor(0, Map("salary" -> 2000)) actor.underlyingActor.intercept should be(0) actor.underlyingActor.weights should be(Map("salary" -> 2000)) } "process trigger data when all the features are available even with different order" in { val (actor, probe) = createLinearRegressionActor(0, Map("age" -> 0.2, "salary" -> 0.1)) val message = parse( s"""{"salary": 4000, "age": 40}""").asInstanceOf[JObject] actor ! message probe.expectMsg(parse( s"""{"score": 408.0, "salary": 4000, "age": 40}""")) } "emit when score is calculated" in { val (actor, probe) = createLinearRegressionActor(0, Map("salary" -> 10)) val message = parse( s"""{"salary": 2000}""").asInstanceOf[JObject] actor ! message probe.expectMsg(parse( s"""{"score": 20000.0, "salary": 2000}""")) } "not emit when keys are missing" in { val (actor, probe) = createLinearRegressionActor(0, Map("age" -> 0.2, "salary" -> 10)) val message = parse( s"""{"salary": 2000}""").asInstanceOf[JObject] actor ! message probe.expectNoMsg } } }
Example 4
Source File: SampleActorSpec.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import akka.util.Timeout import io.coral.lib.{NotSoRandom, Random} import org.json4s._ import org.json4s.jackson.JsonMethods._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.duration._ import scala.language.postfixOps class SampleActorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with ScalaFutures { def this() = this(ActorSystem("SampleActorSpec")) override def afterAll() { TestKit.shutdownActorSystem(system) } def arbitrarySampleActor(): SampleActor = { val json = parse( """{ "type": "sample", | "params": { "fraction": 0.707 } } """.stripMargin) val props = SampleActor(json).get TestActorRef[SampleActor](props).underlyingActor } def notSoRandomSampleActor(fraction: Double, randoms: Double*): SampleActor = { val json = parse( s"""{ "type": "sample", "params": { "fraction": ${fraction} } } """.stripMargin) val source = NotSoRandom(randoms: _*) val props = Props(classOf[SampleActor], json, Random(source)) TestActorRef[SampleActor](props).underlyingActor } implicit val timeout = Timeout(100 millis) "A SampleActor" should { "Be instantiated with sample fraction" in { val json = parse("""{ "type": "sample", "params": { "fraction": 0.5 }}""".stripMargin) val props = SampleActor(json).get props.actorClass() should be(classOf[SampleActor]) val actor = TestActorRef[SampleActor](props).underlyingActor actor.fraction should be(0.5) } "Not be instantiated without fraction or percentage" in { val json = parse("""{ "type": "sample", "params": { "bla": "blabla" }}""".stripMargin) SampleActor(json) should be(None) } "Be constructible with a io.coral.lib.Random for random boolean stream" in { val actor = notSoRandomSampleActor(fraction = 0.5, randoms = 0.1, 0.49, 0.50, 0.51, 0.8, 0.4) actor.next() should be(true) actor.next() should be(true) actor.next() should be(false) actor.next() should be(false) actor.next() should be(false) actor.next() should be(true) } "Should trigger true or false according to random binomial sequence" in { val actor = notSoRandomSampleActor(fraction = 0.7, randoms = 0.8, 0.6) val json = parse( """{ "something": "whatever" }""").asInstanceOf[JObject] val result1 = actor.simpleEmitTrigger(json) result1 should be(Some(JNothing)) val result2 = actor.simpleEmitTrigger(json) result2 should be(Some(json)) } "Should have trigger and emit cooperate" in { val actor = notSoRandomSampleActor(fraction = 0.7, randoms = 0.6, 0.8) val ref = actor.self val json = parse( """{ "something": "whatever" }""").asInstanceOf[JObject] val probe = TestProbe() actor.emitTargets += probe.ref ref ! json probe.expectMsg(json) ref ! json probe.expectNoMsg(100 millis) } } }
Example 5
Source File: RequestDSL.scala From twitter4s with Apache License 2.0 | 5 votes |
package com.danielasfregola.twitter4s.helpers import java.text.SimpleDateFormat import java.util.Locale import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.RawHeader import akka.testkit.TestProbe import com.danielasfregola.twitter4s.entities.RateLimit import org.specs2.specification.AfterEach import scala.concurrent.duration._ import scala.concurrent.{Await, Future} abstract class RequestDSL extends TestActorSystem with FixturesSupport with AfterEach { def after = system.terminate private val timeout = 10 seconds val headers = List(RawHeader("x-rate-limit-limit", "15"), RawHeader("x-rate-limit-remaining", "14"), RawHeader("x-rate-limit-reset", "1445181993")) val rateLimit = { val dateFormatter = new SimpleDateFormat("EEE MMM dd HH:mm:ss ZZZZ yyyy", Locale.ENGLISH) val resetDate = dateFormatter.parse("Sun Oct 18 15:26:33 +0000 2015").toInstant new RateLimit(limit = 15, remaining = 14, reset = resetDate) } protected val transport = TestProbe() def when[T](future: Future[T]): RequestMatcher[T] = new RequestMatcher(future) class RequestMatcher[T](future: Future[T]) { protected def responder = new Responder(future) def expectRequest(req: HttpRequest): Responder[T] = { transport.expectMsg(timeout, req) responder } def expectRequest(fn: HttpRequest => Unit) = { transport.expectMsgPF(timeout) { case req: HttpRequest => fn(req) } responder } } class Responder[T](future: Future[T]) { def respondWith(response: HttpResponse): Await[T] = { transport.reply(response) new Await(future) } def respondWith(resourcePath: String): Await[T] = respondWith(HttpResponse(StatusCodes.OK, entity = HttpEntity(MediaTypes.`application/json`, load(resourcePath)))) def respondWithRated(resourcePath: String): Await[T] = respondWith( HttpResponse(StatusCodes.OK, headers = headers, entity = HttpEntity(MediaTypes.`application/json`, load(resourcePath)))) def respondWithOk: Await[Unit] = { val response = HttpResponse(StatusCodes.OK, entity = HttpEntity(MediaTypes.`application/json`, """{"code": "OK"}""")) transport.reply(response) new Await(Future.successful((): Unit)) } } class Await[T](future: Future[T]) { private[helpers] val underlyingFuture = future def await(implicit duration: FiniteDuration = 20 seconds) = Await.result(future, duration) } implicit def awaitToReqMatcher[T](await: Await[T]) = new RequestMatcher(await.underlyingFuture) }
Example 6
Source File: KafkaProducerActorSpec.scala From scala-kafka-client with MIT License | 5 votes |
package cakesolutions.kafka.akka import akka.actor.ActorSystem import akka.testkit.TestProbe import cakesolutions.kafka.{KafkaConsumer, KafkaProducer, KafkaProducerRecord} import org.apache.kafka.clients.consumer.OffsetResetStrategy import org.apache.kafka.clients.producer.ProducerRecord import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer} import scala.util.Random class KafkaProducerActorSpec(system_ : ActorSystem) extends KafkaIntSpec(system_) { def this() = this(ActorSystem("KafkaProducerActorSpec")) private def randomString: String = Random.alphanumeric.take(5).mkString("") val deserializer = new StringDeserializer val consumerConf = KafkaConsumer.Conf( deserializer, deserializer, bootstrapServers = s"localhost:$kafkaPort", groupId = "test", enableAutoCommit = false, autoOffsetReset = OffsetResetStrategy.EARLIEST ) val serializer = new StringSerializer val producerConf = KafkaProducer.Conf(serializer, serializer, bootstrapServers = s"localhost:$kafkaPort") "KafkaProducerActor" should "write a given batch to Kafka" in { val topic = randomString val probe = TestProbe() val producer = system.actorOf(KafkaProducerActor.props(producerConf)) val batch: Seq[ProducerRecord[String, String]] = Seq( KafkaProducerRecord(topic, "foo"), KafkaProducerRecord(topic, "key", "value"), KafkaProducerRecord(topic, "bar")) val message = ProducerRecords(batch, Some('response)) probe.send(producer, message) probe.expectMsg('response) val results = consumeFromTopic(topic, 3, 10000) results(0) shouldEqual ((None, "foo")) results(1) shouldEqual ((Some("key"), "value")) results(2) shouldEqual ((None, "bar")) } "KafkaProducerActor" should "write a given batch to Kafka, requiring no response" in { import scala.concurrent.duration._ val topic = randomString val probe = TestProbe() val producer = system.actorOf(KafkaProducerActor.props(producerConf)) val batch: Seq[ProducerRecord[String, String]] = Seq( KafkaProducerRecord(topic, "foo"), KafkaProducerRecord(topic, "key", "value"), KafkaProducerRecord(topic, "bar") ) val message = ProducerRecords(batch) probe.send(producer, message) probe.expectNoMessage(3.seconds) val results = consumeFromTopic(topic, 3, 10000) results(0) shouldEqual ((None, "foo")) results(1) shouldEqual ((Some("key"), "value")) results(2) shouldEqual ((None, "bar")) } private def consumeFromTopic(topic: String, expectedNumOfMessages: Int, timeout: Long) = kafkaServer.consume(topic, expectedNumOfMessages, timeout, deserializer, deserializer) }
Example 7
Source File: ClusterInternalsPublisherSpec.scala From lithium with Apache License 2.0 | 5 votes |
package akka.cluster.swissborg import akka.actor.ActorSystem import akka.cluster.ClusterEvent.{ReachabilityChanged, SeenChanged} import akka.cluster.Reachability import akka.testkit.{ImplicitSender, TestKit, TestProbe} import com.swissborg.lithium.internals.{LithiumReachabilityChanged, LithiumSeenChanged} import org.scalatest.BeforeAndAfterAll import org.scalatest.wordspec.AnyWordSpecLike import scala.collection.immutable.IndexedSeq import org.scalatest.matchers.should.Matchers class ClusterInternalsPublisherSpec extends TestKit(ActorSystem("lithium")) with ImplicitSender with AnyWordSpecLike with Matchers with BeforeAndAfterAll { override def afterAll(): Unit = TestKit.shutdownActorSystem(system) "ClusterInternalsPublisher" must { "convert and publish ReachabilityChanged events" in { system.actorOf(ClusterInternalsPublisher.props) val probe = TestProbe() system.eventStream.subscribe(probe.ref, classOf[LithiumReachabilityChanged]) system.eventStream.publish(ReachabilityChanged(Reachability(IndexedSeq.empty[Reachability.Record], Map.empty))) probe.expectMsgType[LithiumReachabilityChanged] } "convert and publish SeenChanged events" in { system.actorOf(ClusterInternalsPublisher.props) val probe = TestProbe() system.eventStream.subscribe(probe.ref, classOf[LithiumSeenChanged]) system.eventStream.publish(SeenChanged(false, Set.empty)) probe.expectMsg(LithiumSeenChanged(false, Set.empty)) } } }
Example 8
Source File: BotPluginTestKit.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.test import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import com.sumologic.sumobot.core.model.{IncomingMessage, InstantMessageChannel, OutgoingMessage, UserSender} import org.scalatest.BeforeAndAfterAll import slack.models.User import scala.concurrent.duration.{FiniteDuration, _} @deprecated("use com.sumologic.sumobot.test.annotated.BotPluginTestKit", "1.0.2") class BotPluginTestKit(_system: ActorSystem) extends TestKit(_system) with SumoBotSpec with BeforeAndAfterAll { protected val outgoingMessageProbe = TestProbe() system.eventStream.subscribe(outgoingMessageProbe.ref, classOf[OutgoingMessage]) protected def confirmOutgoingMessage(test: OutgoingMessage => Unit, timeout: FiniteDuration = 1.second): Unit = { outgoingMessageProbe.expectMsgClass(timeout, classOf[OutgoingMessage]) match { case msg: OutgoingMessage => test(msg) } } protected def instantMessage(text: String, user: User = mockUser("123", "jshmoe")): IncomingMessage = { IncomingMessage(text, true, InstantMessageChannel("125", user), "1527239216000090", sentBy = UserSender(user)) } protected def mockUser(id: String, name: String): User = { User(id, name, None, None, None, None, None, None, None, None, None, None, None, None, None, None) } protected def send(message: IncomingMessage): Unit = { system.eventStream.publish(message) } override protected def afterAll(): Unit = { TestKit.shutdownActorSystem(system) } }
Example 9
Source File: BotPluginTestKit.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.test.annotated import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import com.sumologic.sumobot.core.model.{IncomingMessage, InstantMessageChannel, OutgoingMessage, UserSender} import org.junit.runner.RunWith import org.scalatest.concurrent.Eventually import org.scalatest.junit.JUnitRunner import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import slack.models.User import scala.concurrent.duration.{FiniteDuration, _} @RunWith(classOf[JUnitRunner]) abstract class BotPluginTestKit(actorSystem: ActorSystem) extends TestKit(actorSystem) with WordSpecLike with Eventually with Matchers with BeforeAndAfterAll { protected val outgoingMessageProbe = TestProbe() system.eventStream.subscribe(outgoingMessageProbe.ref, classOf[OutgoingMessage]) protected def confirmOutgoingMessage(test: OutgoingMessage => Unit, timeout: FiniteDuration = 1.second): Unit = { outgoingMessageProbe.expectMsgClass(timeout, classOf[OutgoingMessage]) match { case msg: OutgoingMessage => test(msg) } } protected def instantMessage(text: String, user: User = mockUser("123", "jshmoe")): IncomingMessage = { IncomingMessage(text, true, InstantMessageChannel("125", user), "1527239216000090", sentBy = UserSender(user)) } protected def mockUser(id: String, name: String): User = { User(id, name, None, None, None, None, None, None, None, None, None, None, None, None, None, None) } protected def send(message: IncomingMessage): Unit = { system.eventStream.publish(message) } override protected def afterAll(): Unit = { TestKit.shutdownActorSystem(system) } }
Example 10
Source File: HttpIncomingReceiverTest.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.http_frontend import java.time.Instant import akka.actor.{ActorSystem, Props} import akka.http.scaladsl.model.ws.TextMessage import akka.stream.scaladsl.Source import akka.testkit.{TestActorRef, TestActors, TestKit, TestProbe} import com.sumologic.sumobot.core.HttpReceptionist import com.sumologic.sumobot.core.model.IncomingMessage import com.sumologic.sumobot.test.SumoBotSpec import com.sumologic.sumobot.test.annotated.SumoBotTestKit import org.scalatest.BeforeAndAfterAll class HttpIncomingReceiverTest extends SumoBotTestKit(ActorSystem("HttpIncomingReceiverTest")) with BeforeAndAfterAll { private val probe = new TestProbe(system) system.eventStream.subscribe(probe.ref, classOf[IncomingMessage]) private val dummyActor = TestActorRef(TestActors.blackholeProps) private val httpIncomingReceiver = TestActorRef(new HttpIncomingReceiver(dummyActor)) "HttpIncomingReceiver" should { "publish IncomingMessage" when { "received streamed TextMessage" in { val msgSource = Source(List("hello")) val streamedMsg = TextMessage.Streamed(msgSource) httpIncomingReceiver ! streamedMsg val result = probe.expectMsgClass(classOf[IncomingMessage]) result.canonicalText should be ("hello") result.addressedToUs should be (true) result.channel should be (HttpReceptionist.DefaultSumoBotChannel) result.attachments should be (Seq.empty) result.sentBy.plainTextReference should be (HttpReceptionist.DefaultClientUser.id) } "received strict TextMessage" in { val strictMsg = TextMessage.Strict("hi!") httpIncomingReceiver ! strictMsg val result = probe.expectMsgClass(classOf[IncomingMessage]) result.canonicalText should be ("hi!") result.addressedToUs should be (true) result.channel should be (HttpReceptionist.DefaultSumoBotChannel) result.attachments should be (Seq.empty) result.sentBy.plainTextReference should be (HttpReceptionist.DefaultClientUser.id) } "properly format date" when { "sending IncomingMessage" in { val strictMsg = TextMessage.Strict("test") httpIncomingReceiver ! strictMsg val result = probe.expectMsgClass(classOf[IncomingMessage]) val currentDate = Instant.now().getEpochSecond.toDouble val messageDate = result.idTimestamp.toDouble messageDate should be (currentDate +- 5.0) } } } "stop itself and outcoming actor" when { "stream ended" in { val outcomingActor = TestActorRef(TestActors.blackholeProps) val testProbeOutcoming = TestProbe() testProbeOutcoming.watch(outcomingActor) val shutdownReceiver = TestActorRef(new HttpIncomingReceiver(outcomingActor)) val testProbeShutdown = TestProbe() testProbeShutdown.watch(shutdownReceiver) shutdownReceiver ! HttpIncomingReceiver.StreamEnded testProbeOutcoming.expectTerminated(outcomingActor) testProbeShutdown.expectTerminated(shutdownReceiver) } } } override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } }
Example 11
Source File: HttpOutcomingSenderTest.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.http_frontend import akka.actor.{ActorSystem, Props} import akka.http.scaladsl.model.ws.TextMessage import akka.testkit.{TestActorRef, TestActors, TestKit, TestProbe} import com.sumologic.sumobot.core.HttpReceptionist import com.sumologic.sumobot.core.model.{IncomingMessage, OutgoingMessage} import com.sumologic.sumobot.test.SumoBotSpec import com.sumologic.sumobot.test.annotated.SumoBotTestKit import org.scalatest.BeforeAndAfterAll class HttpOutcomingSenderTest extends SumoBotTestKit(ActorSystem("HttpOutcomingSenderTest")) with BeforeAndAfterAll { private val probe = new TestProbe(system) system.eventStream.subscribe(probe.ref, classOf[TextMessage.Strict]) private val httpOutcomingSender = TestActorRef(new HttpOutcomingSender(probe.ref)) "HttpOutcomingSender" should { "send TextMessage" when { "received OutgoingMessage" in { val outgoingMessage = OutgoingMessage(HttpReceptionist.DefaultSumoBotChannel, "hello!") system.eventStream.publish(outgoingMessage) val result = probe.expectMsgClass(classOf[TextMessage.Strict]) result.getStrictText should be ("hello!") } } "stop publisher" when { "it is stopped" in { val dummyActor = TestActorRef(TestActors.blackholeProps) val testProbe = TestProbe() testProbe.watch(dummyActor) val stoppedSender = TestActorRef(new HttpOutcomingSender(dummyActor)) system.stop(stoppedSender) testProbe.expectTerminated(dummyActor) } } } override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } }
Example 12
Source File: QuartzExtensionTest.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.quartz import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import com.sumologic.sumobot.test.SumoBotSpec import com.sumologic.sumobot.test.annotated.SumoBotTestKit import org.quartz.CronExpression import scala.concurrent.duration._ class QuartzExtensionTest extends SumoBotTestKit(ActorSystem("QuartzExtensionTest")) { object TestMessage "QuartzExtension" should { "allow scheduling jobs using cron" in { val quartz = QuartzExtension(system) val probe = TestProbe() new CronExpression("0 0 8,12,20 ? * MON-FRI") // This expression should trigger every second. quartz.scheduleMessage("test", "* * * * * ?", probe.ref, TestMessage) probe.expectMsg(5.seconds, TestMessage) } } }
Example 13
Source File: AliasTest.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.plugins.alias import akka.actor.{ActorSystem, Props} import akka.testkit.TestProbe import com.sumologic.sumobot.brain.InMemoryBrain import com.sumologic.sumobot.core.model.IncomingMessage import com.sumologic.sumobot.plugins.BotPlugin.InitializePlugin import com.sumologic.sumobot.test.annotated.BotPluginTestKit import org.scalatest.concurrent.PatienceConfiguration.Timeout import scala.concurrent.duration._ import org.scalatest.{Matchers, WordSpecLike} import org.scalatest.concurrent.Eventually._ class AliasTest extends BotPluginTestKit(ActorSystem("AliasTest")) { val aliasRef = system.actorOf(Props(classOf[Alias]), "alias") val brainRef = system.actorOf(Props(classOf[InMemoryBrain]), "brain") aliasRef ! InitializePlugin(null, brainRef, null) "alias" should { "allow aliasing messages to the bot" in { send(instantMessage("alias 'foo' to 'bar'")) val otherPlugin = TestProbe() system.eventStream.subscribe(otherPlugin.ref, classOf[IncomingMessage]) send(instantMessage("foo")) eventually(Timeout(5.seconds)) { val messages = otherPlugin.expectMsgAllClassOf(classOf[IncomingMessage]) messages.foreach(msg => println(msg.canonicalText)) messages.exists(_.canonicalText == "bar") should be (true) } } } }
Example 14
Source File: ExecutorSystemLauncherSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.cluster.appmaster import org.apache.gearpump.cluster.worker.WorkerId import scala.concurrent.Await import scala.concurrent.duration._ import akka.actor.{ActorSystem, Props} import akka.testkit.TestProbe import com.typesafe.config.ConfigValueFactory import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import org.apache.gearpump.cluster.AppMasterToWorker.LaunchExecutor import org.apache.gearpump.cluster.TestUtil import org.apache.gearpump.cluster.WorkerToAppMaster.ExecutorLaunchRejected import org.apache.gearpump.cluster.appmaster.ExecutorSystemLauncher._ import org.apache.gearpump.cluster.appmaster.ExecutorSystemScheduler.Session import org.apache.gearpump.cluster.scheduler.Resource import org.apache.gearpump.util.ActorSystemBooter.{ActorSystemRegistered, RegisterActorSystem} import org.apache.gearpump.util.Constants class ExecutorSystemLauncherSpec extends FlatSpec with Matchers with BeforeAndAfterAll { implicit var system: ActorSystem = null val workerId: WorkerId = WorkerId(0, 0L) val appId = 0 val executorId = 0 val url = "akka.tcp://[email protected]:3000" val session = Session(null, null) val launchExecutorSystemTimeout = 3000 val activeConfig = TestUtil.DEFAULT_CONFIG. withValue(Constants.GEARPUMP_START_EXECUTOR_SYSTEM_TIMEOUT_MS, ConfigValueFactory.fromAnyRef(launchExecutorSystemTimeout)) override def beforeAll(): Unit = { system = ActorSystem("test", activeConfig) } override def afterAll(): Unit = { system.terminate() Await.result(system.whenTerminated, Duration.Inf) } it should "report success when worker launch the system successfully" in { val worker = TestProbe() val client = TestProbe() val launcher = system.actorOf(Props(new ExecutorSystemLauncher(appId, session))) client.watch(launcher) client.send(launcher, LaunchExecutorSystem(WorkerInfo(workerId, worker.ref), 0, Resource(1))) worker.expectMsgType[LaunchExecutor] worker.reply(RegisterActorSystem(url)) worker.expectMsgType[ActorSystemRegistered] client.expectMsgType[LaunchExecutorSystemSuccess] client.expectTerminated(launcher) } it should "report failure when worker refuse to launch the system explicitly" in { val worker = TestProbe() val client = TestProbe() val resource = Resource(4) val launcher = system.actorOf(Props(new ExecutorSystemLauncher(appId, session))) client.watch(launcher) client.send(launcher, LaunchExecutorSystem(WorkerInfo(workerId, worker.ref), 0, resource)) worker.expectMsgType[LaunchExecutor] worker.reply(ExecutorLaunchRejected()) client.expectMsg(LaunchExecutorSystemRejected(resource, null, session)) client.expectTerminated(launcher) } it should "report timeout when trying to start a executor system on worker, " + "and worker doesn't response" in { val client = TestProbe() val worker = TestProbe() val launcher = system.actorOf(Props(new ExecutorSystemLauncher(appId, session))) client.send(launcher, LaunchExecutorSystem(WorkerInfo(workerId, worker.ref), 0, Resource(1))) client.watch(launcher) val waitFor = launchExecutorSystemTimeout + 10000 client.expectMsgType[LaunchExecutorSystemTimeout](waitFor.milliseconds) client.expectTerminated(launcher, waitFor.milliseconds) } }
Example 15
Source File: InMemoryKVServiceSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.cluster.appmaster import akka.actor.Props import akka.testkit.TestProbe import com.typesafe.config.Config import org.apache.gearpump.cluster.master.InMemoryKVService import org.apache.gearpump.cluster.master.InMemoryKVService._ import org.apache.gearpump.cluster.{MasterHarness, TestUtil} import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers} import scala.concurrent.duration._ class InMemoryKVServiceSpec extends FlatSpec with Matchers with BeforeAndAfterEach with MasterHarness { override def beforeEach(): Unit = { startActorSystem() } override def afterEach(): Unit = { shutdownActorSystem() } override def config: Config = TestUtil.MASTER_CONFIG "KVService" should "get, put, delete correctly" in { val system = getActorSystem val kvService = system.actorOf(Props(new InMemoryKVService())) val group = "group" val client = TestProbe()(system) client.send(kvService, PutKV(group, "key", 1)) client.expectMsg(PutKVSuccess) client.send(kvService, PutKV(group, "key", 2)) client.expectMsg(PutKVSuccess) client.send(kvService, GetKV(group, "key")) client.expectMsg(GetKVSuccess("key", 2)) client.send(kvService, DeleteKVGroup(group)) // After DeleteGroup, it no longer accept Get and Put message for this group. client.send(kvService, GetKV(group, "key")) client.expectNoMsg(3.seconds) client.send(kvService, PutKV(group, "key", 3)) client.expectNoMsg(3.seconds) } }
Example 16
Source File: MasterConnectionKeeperSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.cluster.appmaster import scala.concurrent.Await import scala.concurrent.duration._ import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.TestProbe import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import org.apache.gearpump.cluster.AppMasterToMaster.RegisterAppMaster import org.apache.gearpump.cluster.MasterToAppMaster.AppMasterRegistered import org.apache.gearpump.cluster.TestUtil import org.apache.gearpump.cluster.appmaster.MasterConnectionKeeper.MasterConnectionStatus.{MasterConnected, _} import org.apache.gearpump.cluster.appmaster.MasterConnectionKeeperSpec.ConnectionKeeperTestEnv import org.apache.gearpump.cluster.master.MasterProxy.WatchMaster class MasterConnectionKeeperSpec extends FlatSpec with Matchers with BeforeAndAfterAll { implicit var system: ActorSystem = null val appId = 0 val register = RegisterAppMaster(appId, null, null) override def beforeAll(): Unit = { system = ActorSystem("test", TestUtil.DEFAULT_CONFIG) } override def afterAll(): Unit = { system.terminate() Await.result(system.whenTerminated, Duration.Inf) } private def startMasterConnectionKeeper: ConnectionKeeperTestEnv = { val statusChangeSubscriber = TestProbe() val master = TestProbe() val keeper = system.actorOf(Props( new MasterConnectionKeeper(register, master.ref, statusChangeSubscriber.ref))) statusChangeSubscriber.watch(keeper) master.expectMsgType[WatchMaster] // Master is alive, response to RegisterAppMaster master.expectMsgType[RegisterAppMaster] master.reply(AppMasterRegistered(appId)) // Notify listener that master is alive statusChangeSubscriber.expectMsg(MasterConnected) ConnectionKeeperTestEnv(master, keeper, statusChangeSubscriber) } it should "start correctly and notify listener that master is alive" in { startMasterConnectionKeeper } it should "re-register the appmaster when master is restarted" in { import org.apache.gearpump.cluster.master.MasterProxy.MasterRestarted val ConnectionKeeperTestEnv(master, keeper, masterChangeListener) = startMasterConnectionKeeper // Master is restarted master.send(keeper, MasterRestarted) master.expectMsgType[RegisterAppMaster] master.reply(AppMasterRegistered(appId)) masterChangeListener.expectMsg(MasterConnected) // Recovery from Master restart is transparent to listener masterChangeListener.expectNoMsg() } it should "notify listener and then shutdown itself when master is dead" in { val ConnectionKeeperTestEnv(master, keeper, masterChangeListener) = startMasterConnectionKeeper // Master is dead master.send(keeper, MasterStopped) // Keeper should tell the listener that master is stopped before shutting down itself masterChangeListener.expectMsg(MasterStopped) masterChangeListener.expectTerminated(keeper) } it should "mark the master as dead when timeout" in { val statusChangeSubscriber = TestProbe() val master = TestProbe() // MasterConnectionKeeper register itself to master by sending RegisterAppMaster val keeper = system.actorOf(Props(new MasterConnectionKeeper(register, master.ref, statusChangeSubscriber.ref))) // Master doesn't reply to keeper, statusChangeSubscriber.watch(keeper) // Timeout, keeper notify listener, and then make suicide statusChangeSubscriber.expectMsg(60.seconds, MasterStopped) statusChangeSubscriber.expectTerminated(keeper, 60.seconds) } } object MasterConnectionKeeperSpec { case class ConnectionKeeperTestEnv( master: TestProbe, keeper: ActorRef, masterChangeListener: TestProbe) }
Example 17
Source File: RunningApplicationSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.cluster.client import java.util.concurrent.TimeUnit import akka.actor.ActorSystem import akka.testkit.TestProbe import akka.util.Timeout import org.apache.gearpump.cluster.ClientToMaster.{ResolveAppId, ShutdownApplication} import org.apache.gearpump.cluster.MasterToClient.{ResolveAppIdResult, ShutdownApplicationResult} import org.apache.gearpump.cluster.TestUtil import org.apache.gearpump.cluster.client.RunningApplicationSpec.{MockAskAppMasterRequest, MockAskAppMasterResponse} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import scala.concurrent.{Await, Future} import scala.concurrent.duration.Duration import scala.util.{Failure, Success} import scala.concurrent.ExecutionContext.Implicits.global class RunningApplicationSpec extends FlatSpec with Matchers with BeforeAndAfterAll { implicit var system: ActorSystem = _ override def beforeAll(): Unit = { system = ActorSystem("test", TestUtil.DEFAULT_CONFIG) } override def afterAll(): Unit = { system.terminate() Await.result(system.whenTerminated, Duration.Inf) } "RunningApplication" should "be able to shutdown application" in { val errorMsg = "mock exception" val master = TestProbe() val timeout = Timeout(90, TimeUnit.SECONDS) val application = new RunningApplication(1, master.ref, timeout) Future { application.shutDown() } master.expectMsg(ShutdownApplication(1)) master.reply(ShutdownApplicationResult(Success(1))) val result = Future { intercept[Exception] { application.shutDown() } } master.expectMsg(ShutdownApplication(1)) master.reply(ShutdownApplicationResult(Failure(new Exception(errorMsg)))) val exception = Await.result(result, Duration.Inf) assert(exception.getMessage.equals(errorMsg)) } "RunningApplication" should "be able to ask appmaster" in { val master = TestProbe() val appMaster = TestProbe() val appId = 1 val timeout = Timeout(90, TimeUnit.SECONDS) val request = MockAskAppMasterRequest("request") val application = new RunningApplication(appId, master.ref, timeout) val future = application.askAppMaster[MockAskAppMasterResponse](request) master.expectMsg(ResolveAppId(appId)) master.reply(ResolveAppIdResult(Success(appMaster.ref))) appMaster.expectMsg(MockAskAppMasterRequest("request")) appMaster.reply(MockAskAppMasterResponse("response")) val result = Await.result(future, Duration.Inf) assert(result.res.equals("response")) // ResolveAppId should not be called multiple times val future2 = application.askAppMaster[MockAskAppMasterResponse](request) appMaster.expectMsg(MockAskAppMasterRequest("request")) appMaster.reply(MockAskAppMasterResponse("response")) val result2 = Await.result(future2, Duration.Inf) assert(result2.res.equals("response")) } } object RunningApplicationSpec { case class MockAskAppMasterRequest(req: String) case class MockAskAppMasterResponse(res: String) }
Example 18
Source File: AppMasterLauncherSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.cluster.master import org.apache.gearpump.cluster.worker.WorkerId import scala.util.Success import akka.actor._ import akka.testkit.TestProbe import com.typesafe.config.Config import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers} import org.apache.gearpump.cluster.AppMasterToMaster.RequestResource import org.apache.gearpump.cluster.AppMasterToWorker.{LaunchExecutor, ShutdownExecutor} import org.apache.gearpump.cluster.MasterToAppMaster.ResourceAllocated import org.apache.gearpump.cluster.MasterToClient.SubmitApplicationResult import org.apache.gearpump.cluster.WorkerToAppMaster.ExecutorLaunchRejected import org.apache.gearpump.cluster.scheduler.{Resource, ResourceAllocation, ResourceRequest} import org.apache.gearpump.cluster.{MasterHarness, TestUtil} import org.apache.gearpump.util.ActorSystemBooter._ class AppMasterLauncherSpec extends FlatSpec with Matchers with BeforeAndAfterEach with MasterHarness { override def config: Config = TestUtil.DEFAULT_CONFIG val appId = 1 val executorId = 2 var master: TestProbe = null var client: TestProbe = null var worker: TestProbe = null var watcher: TestProbe = null var appMasterLauncher: ActorRef = null override def beforeEach(): Unit = { startActorSystem() master = createMockMaster() client = TestProbe()(getActorSystem) worker = TestProbe()(getActorSystem) watcher = TestProbe()(getActorSystem) appMasterLauncher = getActorSystem.actorOf(AppMasterLauncher.props(appId, executorId, TestUtil.dummyApp, None, "username", master.ref, Some(client.ref))) watcher watch appMasterLauncher master.expectMsg(RequestResource(appId, ResourceRequest(Resource(1), WorkerId.unspecified))) val resource = ResourceAllocated( Array(ResourceAllocation(Resource(1), worker.ref, WorkerId(0, 0L)))) master.reply(resource) worker.expectMsgType[LaunchExecutor] } override def afterEach(): Unit = { shutdownActorSystem() } "AppMasterLauncher" should "launch appmaster correctly" in { worker.reply(RegisterActorSystem("systempath")) worker.expectMsgType[ActorSystemRegistered] worker.expectMsgType[CreateActor] worker.reply(ActorCreated(master.ref, "appmaster")) client.expectMsg(SubmitApplicationResult(Success(appId))) watcher.expectTerminated(appMasterLauncher) } "AppMasterLauncher" should "reallocate resource if executor launch rejected" in { worker.reply(ExecutorLaunchRejected("")) master.expectMsg(RequestResource(appId, ResourceRequest(Resource(1), WorkerId.unspecified))) val resource = ResourceAllocated( Array(ResourceAllocation(Resource(1), worker.ref, WorkerId(0, 0L)))) master.reply(resource) worker.expectMsgType[LaunchExecutor] worker.reply(RegisterActorSystem("systempath")) worker.expectMsgType[ActorSystemRegistered] worker.expectMsgType[CreateActor] worker.reply(CreateActorFailed("", new Exception)) worker.expectMsgType[ShutdownExecutor] assert(client.receiveN(1).head.asInstanceOf[SubmitApplicationResult].appId.isFailure) watcher.expectTerminated(appMasterLauncher) } }
Example 19
Source File: TimeOutSchedulerSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.util import scala.concurrent.duration._ import akka.actor._ import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import org.slf4j.Logger import org.apache.gearpump.cluster.TestUtil class TimeOutSchedulerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll { def this() = this(ActorSystem("WorkerSpec", TestUtil.DEFAULT_CONFIG)) val mockActor = TestProbe() override def afterAll { TestKit.shutdownActorSystem(system) } "The TimeOutScheduler" should { "handle the time out event" in { val testActorRef = TestActorRef(Props(classOf[TestActor], mockActor.ref)) val testActor = testActorRef.underlyingActor.asInstanceOf[TestActor] testActor.sendMsgToIgnore() mockActor.expectMsg(30.seconds, MessageTimeOut) } } } case object Echo case object MessageTimeOut class TestActor(mock: ActorRef) extends Actor with TimeOutScheduler { private val LOG: Logger = LogUtil.getLogger(getClass) val target = context.actorOf(Props(classOf[EchoActor])) override def receive: Receive = { case _ => } def sendMsgToIgnore(): Unit = { sendMsgWithTimeOutCallBack(target, Echo, 2000, sendMsgTimeOut()) } private def sendMsgTimeOut(): Unit = { mock ! MessageTimeOut } } class EchoActor extends Actor { override def receive: Receive = { case _ => } }
Example 20
Source File: ActorSystemBooterSpec.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.util import scala.concurrent.Await import scala.concurrent.duration.Duration import akka.actor.{Actor, ActorSystem, Props} import akka.testkit.TestProbe import org.scalatest.mock.MockitoSugar import org.scalatest.{FlatSpec, Matchers} import org.apache.gearpump.cluster.TestUtil import org.apache.gearpump.util.ActorSystemBooter.{ActorCreated, RegisterActorSystem, _} import org.apache.gearpump.util.ActorSystemBooterSpec._ class ActorSystemBooterSpec extends FlatSpec with Matchers with MockitoSugar { "ActorSystemBooter" should "report its address back" in { val boot = bootSystem() boot.prob.expectMsgType[RegisterActorSystem] boot.shutdown() } "ActorSystemBooter" should "terminate itself when parent actor dies" in { val boot = bootSystem() boot.prob.expectMsgType[RegisterActorSystem] val dummy = boot.host.actorOf(Props(classOf[Dummy]), "dummy") boot.prob.reply(ActorSystemRegistered(boot.prob.ref)) boot.prob.reply(BindLifeCycle(dummy)) boot.host.stop(dummy) val terminated = retry(5)(boot.bootedSystem.whenTerminated.isCompleted) assert(terminated) boot.shutdown() } "ActorSystemBooter" should "create new actor" in { val boot = bootSystem() boot.prob.expectMsgType[RegisterActorSystem] boot.prob.reply(ActorSystemRegistered(boot.prob.ref)) boot.prob.reply(CreateActor(Props(classOf[AcceptThreeArguments], 1, 2, 3), "three")) boot.prob.expectMsgType[ActorCreated] boot.prob.reply(CreateActor(Props(classOf[AcceptZeroArguments]), "zero")) boot.prob.expectMsgType[ActorCreated] boot.shutdown() } private def bootSystem(): Boot = { val booter = ActorSystemBooter(TestUtil.DEFAULT_CONFIG) val system = ActorSystem("reportback", TestUtil.DEFAULT_CONFIG) val receiver = TestProbe()(system) val address = ActorUtil.getFullPath(system, receiver.ref.path) val bootSystem = booter.boot("booter", address) Boot(system, receiver, bootSystem) } case class Boot(host: ActorSystem, prob: TestProbe, bootedSystem: ActorSystem) { def shutdown(): Unit = { host.terminate() bootedSystem.terminate() Await.result(host.whenTerminated, Duration.Inf) Await.result(bootedSystem.whenTerminated, Duration.Inf) } } def retry(seconds: Int)(fn: => Boolean): Boolean = { val result = fn if (result) { result } else { Thread.sleep(1000) retry(seconds - 1)(fn) } } } object ActorSystemBooterSpec { class Dummy extends Actor { def receive: Receive = { case _ => } } class AcceptZeroArguments extends Actor { def receive: Receive = { case _ => } } class AcceptThreeArguments(a: Int, b: Int, c: Int) extends Actor { def receive: Receive = { case _ => } } }
Example 21
Source File: TestProbeUtil.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump import scala.language.implicitConversions import akka.actor.{Actor, Props, Terminated} import akka.testkit.TestProbe object TestProbeUtil { implicit def toProps(probe: TestProbe): Props = { Props(new Actor { val probeRef = probe.ref context.watch(probeRef) def receive: Receive = { case Terminated(probeRef) => context.stop(self) case x => probeRef.forward(x) } }) } }
Example 22
Source File: ShellClientSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client.socket import java.util.UUID import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import org.apache.toree.communication.ZMQMessage import org.apache.toree.communication.security.SecurityActorType import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.client.ActorLoader import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import play.api.libs.json.Json class ShellClientSpec extends TestKit(ActorSystem("ShellActorSpec")) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { private val SignatureEnabled = true describe("ShellClientActor") { val socketFactory = mock[SocketFactory] val mockActorLoader = mock[ActorLoader] val probe : TestProbe = TestProbe() when(socketFactory.ShellClient( any(classOf[ActorSystem]), any(classOf[ActorRef]) )).thenReturn(probe.ref) val signatureManagerProbe = TestProbe() doReturn(system.actorSelection(signatureManagerProbe.ref.path.toString)) .when(mockActorLoader).load(SecurityActorType.SignatureManager) val shellClient = system.actorOf(Props( classOf[ShellClient], socketFactory, mockActorLoader, SignatureEnabled )) describe("send execute request") { it("should send execute request") { val request = ExecuteRequest( "foo", false, true, UserExpressions(), true ) val header = Header( UUID.randomUUID().toString, "spark", UUID.randomUUID().toString, MessageType.Incoming.ExecuteRequest.toString, "5.0" ) val kernelMessage = KernelMessage( Seq[Array[Byte]](), "", header, HeaderBuilder.empty, Metadata(), Json.toJson(request).toString ) shellClient ! kernelMessage // Echo back the kernel message sent to have a signature injected signatureManagerProbe.expectMsgClass(classOf[KernelMessage]) signatureManagerProbe.reply(kernelMessage) probe.expectMsgClass(classOf[ZMQMessage]) } } } }
Example 23
Source File: HeartbeatClientSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client.socket import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.client.ActorLoader import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Matchers._ import org.mockito.Mockito._ class HeartbeatClientSpec extends TestKit(ActorSystem("HeartbeatActorSpec")) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("HeartbeatClientActor") { val socketFactory = mock[SocketFactory] val mockActorLoader = mock[ActorLoader] val probe : TestProbe = TestProbe() when(socketFactory.HeartbeatClient(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref) val heartbeatClient = system.actorOf(Props( classOf[HeartbeatClient], socketFactory, mockActorLoader, true )) describe("send heartbeat") { it("should send ping ZMQMessage") { heartbeatClient ! HeartbeatMessage probe.expectMsgClass(classOf[ZMQMessage]) } } } }
Example 24
Source File: SparkKernelClientSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import org.apache.toree.comm.{CommCallbacks, CommStorage, CommRegistrar} import org.apache.toree.kernel.protocol.v5 import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.client.execution.ExecuteRequestTuple import scala.concurrent.duration._ import org.mockito.Mockito._ import org.mockito.Matchers.{eq => mockEq, _} import org.scalatest.mock.MockitoSugar import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers} class SparkKernelClientSpec extends TestKit(ActorSystem("SparkKernelClientActorSystem")) with Matchers with MockitoSugar with FunSpecLike with BeforeAndAfter { private val TestTargetName = "some target" private var mockActorLoader: ActorLoader = _ private var mockCommRegistrar: CommRegistrar = _ private var sparkKernelClient: SparkKernelClient = _ private var executeRequestProbe: TestProbe = _ private var shellClientProbe: TestProbe = _ before { mockActorLoader = mock[ActorLoader] mockCommRegistrar = mock[CommRegistrar] executeRequestProbe = TestProbe() when(mockActorLoader.load(MessageType.Incoming.ExecuteRequest)) .thenReturn(system.actorSelection(executeRequestProbe.ref.path.toString)) shellClientProbe = TestProbe() when(mockActorLoader.load(SocketType.ShellClient)) .thenReturn(system.actorSelection(shellClientProbe.ref.path.toString)) sparkKernelClient = new SparkKernelClient( mockActorLoader, system, mockCommRegistrar) } describe("SparkKernelClient") { describe("#execute") { it("should send an ExecuteRequest message") { val func = (x: Any) => println(x) sparkKernelClient.execute("val foo = 2") executeRequestProbe.expectMsgClass(classOf[ExecuteRequestTuple]) } } } }
Example 25
Source File: CodeCompleteHandlerSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.handler import akka.actor._ import akka.testkit.{TestProbe, ImplicitSender, TestKit} import org.apache.toree.Main import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.CompleteRequest import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5Test._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, BeforeAndAfter, Matchers} import org.mockito.Mockito._ import test.utils.MaxAkkaTestTimeout class CodeCompleteHandlerSpec extends TestKit( ActorSystem("CodeCompleteHandlerSpec", None, Some(Main.getClass.getClassLoader)) ) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter { var actorLoader: ActorLoader = _ var handlerActor: ActorRef = _ var kernelMessageRelayProbe: TestProbe = _ var interpreterProbe: TestProbe = _ var statusDispatchProbe: TestProbe = _ before { actorLoader = mock[ActorLoader] handlerActor = system.actorOf(Props(classOf[CodeCompleteHandler], actorLoader)) kernelMessageRelayProbe = TestProbe() when(actorLoader.load(SystemActorType.KernelMessageRelay)) .thenReturn(system.actorSelection(kernelMessageRelayProbe.ref.path.toString)) interpreterProbe = new TestProbe(system) when(actorLoader.load(SystemActorType.Interpreter)) .thenReturn(system.actorSelection(interpreterProbe.ref.path.toString)) statusDispatchProbe = new TestProbe(system) when(actorLoader.load(SystemActorType.StatusDispatch)) .thenReturn(system.actorSelection(statusDispatchProbe.ref.path.toString)) } def replyToHandlerWithOkAndResult() = { val expectedClass = classOf[CompleteRequest] interpreterProbe.expectMsgClass(expectedClass) interpreterProbe.reply((0, List[String]())) } def replyToHandlerWithOkAndBadResult() = { val expectedClass = classOf[CompleteRequest] interpreterProbe.expectMsgClass(expectedClass) interpreterProbe.reply("hello") } describe("CodeCompleteHandler (ActorLoader)") { it("should send a CompleteRequest") { handlerActor ! MockCompleteRequestKernelMessage replyToHandlerWithOkAndResult() kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) { case KernelMessage(_, _, header, _, _, _) => header.msg_type == MessageType.Outgoing.CompleteReply.toString } } it("should throw an error for bad JSON") { handlerActor ! MockKernelMessageWithBadJSON var result = false try { replyToHandlerWithOkAndResult() } catch { case t: Throwable => result = true } result should be (true) } it("should throw an error for bad code completion") { handlerActor ! MockCompleteRequestKernelMessage try { replyToHandlerWithOkAndBadResult() } catch { case error: Exception => error.getMessage should be ("Parse error in CodeCompleteHandler") } } it("should send an idle message") { handlerActor ! MockCompleteRequestKernelMessage replyToHandlerWithOkAndResult() statusDispatchProbe.fishForMessage(MaxAkkaTestTimeout) { case Tuple2(status, _) => status == KernelStatusType.Idle } } } }
Example 26
Source File: GenericSocketMessageHandlerSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.handler import akka.actor.{ActorSystem, Props, ActorRef, ActorSelection} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5Test._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import test.utils.MaxAkkaTestTimeout class GenericSocketMessageHandlerSpec extends TestKit( ActorSystem( "GenericSocketMessageHandlerSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("GenericSocketMessageHandler( ActorLoader, SocketType )") { // Create a mock ActorLoader for the Relay we are going to test val actorLoader: ActorLoader = mock[ActorLoader] // Create a probe for the ActorSelection that the ActorLoader will return val selectionProbe: TestProbe = TestProbe() val selection: ActorSelection = system.actorSelection(selectionProbe.ref.path.toString) when(actorLoader.load(SocketType.Control)).thenReturn(selection) // The Relay we are going to be testing against val genericHandler: ActorRef = system.actorOf( Props(classOf[GenericSocketMessageHandler], actorLoader, SocketType.Control) ) describe("#receive( KernelMessage )") { genericHandler ! MockKernelMessage it("should send the message to the selected actor"){ selectionProbe.expectMsg(MaxAkkaTestTimeout, MockKernelMessage) } } } }
Example 27
Source File: KernelInfoRequestHandlerSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.handler import akka.actor.{ActorSelection, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.Main import org.apache.toree.kernel.protocol.v5.content.KernelInfoReply import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5._ import org.mockito.AdditionalMatchers.{not => mockNot} import org.mockito.Matchers.{eq => mockEq} import com.typesafe.config.ConfigFactory import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import play.api.libs.json.Json import test.utils.MaxAkkaTestTimeout object KernelInfoRequestHandlerSpec { val config = """ akka { loglevel = "WARNING" }""" } class KernelInfoRequestHandlerSpec extends TestKit( ActorSystem("KernelInfoRequestHandlerSpec", ConfigFactory.parseString(KernelInfoRequestHandlerSpec.config), Main.getClass.getClassLoader) ) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { val actorLoader: ActorLoader = mock[ActorLoader] val actor = system.actorOf(Props(classOf[KernelInfoRequestHandler], actorLoader, LanguageInfo("test", "1.0.0", Some(".test")))) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)) .thenReturn(relaySelection) when(actorLoader.load(mockNot(mockEq(SystemActorType.KernelMessageRelay)))) .thenReturn(system.actorSelection("")) val header = Header("","","","","") val kernelMessage = new KernelMessage( Seq[Array[Byte]](), "test message", header, header, Metadata(), "{}" ) describe("Kernel Info Request Handler") { it("should return a KernelMessage containing kernel info response") { actor ! kernelMessage val reply = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage] val kernelInfo = Json.parse(reply.contentString).as[KernelInfoReply] kernelInfo.implementation should be ("spark") } } }
Example 28
Source File: ShellSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities} import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object ShellSpec { val config =""" akka { loglevel = "WARNING" }""" } class ShellSpec extends TestKit( ActorSystem( "ShellActorSpec", ConfigFactory.parseString(ShellSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Shell") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 29
Source File: IOPubSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object IOPubSpec { val config =""" akka { loglevel = "WARNING" }""" } class IOPubSpec extends TestKit( ActorSystem("IOPubActorSpec", ConfigFactory.parseString(IOPubSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("IOPubActor") { val socketFactory = mock[SocketFactory] val probe : TestProbe = TestProbe() when(socketFactory.IOPub(any(classOf[ActorSystem]))).thenReturn(probe.ref) val socket = system.actorOf(Props(classOf[IOPub], socketFactory)) // TODO test that the response type changed describe("#receive") { it("should reply with a ZMQMessage") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage socket ! MockKernelMessage probe.expectMsg(MaxAkkaTestTimeout, MockZMQMessage) } } } }
Example 30
Source File: HeartbeatSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object HeartbeatSpec { val config = """ akka { loglevel = "WARNING" }""" } class HeartbeatSpec extends TestKit( ActorSystem( "HeartbeatActorSpec", ConfigFactory.parseString(HeartbeatSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { val SomeMessage: String = "some message" val SomeZMQMessage: ZMQMessage = ZMQMessage(ByteString(SomeMessage.getBytes)) describe("HeartbeatActor") { val socketFactory = mock[SocketFactory] val probe : TestProbe = TestProbe() when(socketFactory.Heartbeat(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref) val heartbeat = system.actorOf(Props(classOf[Heartbeat], socketFactory)) describe("send heartbeat") { it("should receive and send same ZMQMessage") { heartbeat ! SomeZMQMessage probe.expectMsg(MaxAkkaTestTimeout, SomeZMQMessage) } } } }
Example 31
Source File: StdinSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.kernel.protocol.v5Test._ import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import com.typesafe.config.ConfigFactory import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import test.utils.MaxAkkaTestTimeout object StdinSpec { val config =""" akka { loglevel = "WARNING" }""" } class StdinSpec extends TestKit(ActorSystem( "StdinActorSpec", ConfigFactory.parseString(StdinSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Stdin") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 32
Source File: ActorLoaderSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5.{MessageType, SocketType} import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.TestProbeProxyActor import test.utils.MaxAkkaTestTimeout class ActorLoaderSpec extends TestKit( ActorSystem( "ActorLoaderSpecSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("ActorLoader"){ describe("#load( MessageType )"){ it("should load an ActorSelection that has been loaded into the system"){ val testProbe: TestProbe = TestProbe() system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), MessageType.Outgoing.ClearOutput.toString) val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(MessageType.Outgoing.ClearOutput) ! "<Test Message>" testProbe.expectMsg("<Test Message>") } it("should expect no message when there is no actor"){ val testProbe: TestProbe = TestProbe() val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(MessageType.Outgoing.CompleteReply) ! "<Test Message>" testProbe.expectNoMessage(MaxAkkaTestTimeout) // This is to test to see if there the messages go to the actor inbox or the dead mail inbox system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), MessageType.Outgoing.CompleteReply.toString) testProbe.expectNoMessage(MaxAkkaTestTimeout) } } describe("#load( SocketType )"){ it("should load an ActorSelection that has been loaded into the system"){ val testProbe: TestProbe = TestProbe() system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.Shell.toString) val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(SocketType.Shell) ! "<Test Message>" testProbe.expectMsg("<Test Message>") } it("should expect no message when there is no actor"){ val testProbe: TestProbe = TestProbe() val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(SocketType.IOPub) ! "<Test Message>" testProbe.expectNoMessage(MaxAkkaTestTimeout) // This is to test to see if there the messages go to the actor inbox or the dead mail inbox system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.IOPub.toString) testProbe.expectNoMessage(MaxAkkaTestTimeout) } } } }
Example 33
Source File: SimpleActorLoaderSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import akka.actor.{ActorSelection, ActorSystem, Props} import akka.testkit.{TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5.MessageType import org.scalatest.{FunSpecLike, Matchers} import test.utils.TestProbeProxyActor import test.utils.MaxAkkaTestTimeout class SimpleActorLoaderSpec extends TestKit( ActorSystem( "SimpleActorLoaderSpecSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) ) ) with FunSpecLike with Matchers { describe("SimpleActorLoader") { //val system = ActorSystem("SimpleActorLoaderSystem") val testMessage: String = "Hello Message" describe("#load( MessageType )") { it("should load a MessageType Actor"){ // Create a new test probe to verify our selection works val messageTypeProbe: TestProbe = new TestProbe(system) // Add an actor to the system to send a message to system.actorOf( Props(classOf[TestProbeProxyActor], messageTypeProbe), name = MessageType.Outgoing.ExecuteInput.toString ) // Create the ActorLoader with our test system val actorLoader: SimpleActorLoader = SimpleActorLoader(system) // Get the actor and send it a message val loadedMessageActor: ActorSelection = actorLoader.load(MessageType.Outgoing.ExecuteInput) loadedMessageActor ! testMessage // Assert the probe received the message messageTypeProbe.expectMsg(MaxAkkaTestTimeout, testMessage) } } } }
Example 34
Source File: StatusDispatchSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.dispatch import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.KernelStatus import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers} import play.api.libs.json.Json import test.utils.MaxAkkaTestTimeout class StatusDispatchSpec extends TestKit( ActorSystem( "StatusDispatchSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) ) ) with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter{ var statusDispatchRef: ActorRef = _ var relayProbe: TestProbe = _ before { // Mock the relay with a probe relayProbe = TestProbe() // Mock the ActorLoader val mockActorLoader: ActorLoader = mock[ActorLoader] when(mockActorLoader.load(SystemActorType.KernelMessageRelay)) .thenReturn(system.actorSelection(relayProbe.ref.path.toString)) statusDispatchRef = system.actorOf(Props(classOf[StatusDispatch],mockActorLoader)) } describe("StatusDispatch") { describe("#receive( KernelStatusType )") { it("should send a status message to the relay") { statusDispatchRef ! KernelStatusType.Busy // Check the kernel message is the correct type val statusMessage: KernelMessage = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage] statusMessage.header.msg_type should be (MessageType.Outgoing.Status.toString) // Check the status is what we sent val status: KernelStatus = Json.parse(statusMessage.contentString).as[KernelStatus] status.execution_state should be (KernelStatusType.Busy.toString) } } describe("#receive( KernelStatusType, Header )") { it("should send a status message to the relay") { val tuple = Tuple2(KernelStatusType.Busy, mock[Header]) statusDispatchRef ! tuple // Check the kernel message is the correct type val statusMessage: KernelMessage = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage] statusMessage.header.msg_type should be (MessageType.Outgoing.Status.toString) // Check the status is what we sent val status: KernelStatus = Json.parse(statusMessage.contentString).as[KernelStatus] status.execution_state should be (KernelStatusType.Busy.toString) } } } }
Example 35
Source File: StreamMethodsSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.api import akka.actor.ActorSystem import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5 import org.apache.toree.kernel.protocol.v5.KernelMessage import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, BeforeAndAfter, Matchers} import play.api.libs.json.Json import test.utils.MaxAkkaTestTimeout import org.mockito.Mockito._ class StreamMethodsSpec extends TestKit( ActorSystem( "StreamMethodsSpec", None, Some(org.apache.toree.Main.getClass.getClassLoader) ) ) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter { private var kernelMessageRelayProbe: TestProbe = _ private var mockParentHeader: v5.ParentHeader = _ private var mockActorLoader: v5.kernel.ActorLoader = _ private var mockKernelMessage: v5.KernelMessage = _ private var streamMethods: StreamMethods = _ before { kernelMessageRelayProbe = TestProbe() mockParentHeader = mock[v5.ParentHeader] mockActorLoader = mock[v5.kernel.ActorLoader] doReturn(system.actorSelection(kernelMessageRelayProbe.ref.path)) .when(mockActorLoader).load(v5.SystemActorType.KernelMessageRelay) mockKernelMessage = mock[v5.KernelMessage] doReturn(mockParentHeader).when(mockKernelMessage).header streamMethods = new StreamMethods(mockActorLoader, mockKernelMessage) } describe("StreamMethods") { describe("#()") { it("should put the header of the given message as the parent header") { val expected = mockKernelMessage.header val actual = streamMethods.kmBuilder.build.parentHeader actual should be (expected) } } describe("#sendAll") { it("should send a message containing all of the given text") { val expected = "some text" streamMethods.sendAll(expected) val outgoingMessage = kernelMessageRelayProbe.receiveOne(MaxAkkaTestTimeout) val kernelMessage = outgoingMessage.asInstanceOf[KernelMessage] val actual = Json.parse(kernelMessage.contentString) .as[v5.content.StreamContent].text actual should be (expected) } } } }
Example 36
Source File: DeadLetterTest.scala From 006877 with MIT License | 5 votes |
package aia.channels import akka.testkit.{ ImplicitSender, TestProbe, TestKit } import akka.actor.{ PoisonPill, Props, DeadLetter, ActorSystem } import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import java.util.Date class DeadLetterTest extends TestKit(ActorSystem("DeadLetterTest")) with WordSpecLike with BeforeAndAfterAll with MustMatchers with ImplicitSender { override def afterAll() { system.terminate() } "DeadLetter" must { "catch messages send to deadLetters" in { val deadLetterMonitor = TestProbe() system.eventStream.subscribe( deadLetterMonitor.ref, classOf[DeadLetter]) val msg = new StateEvent(new Date(), "Connected") system.deadLetters ! msg val dead = deadLetterMonitor.expectMsgType[DeadLetter] dead.message must be(msg) dead.sender must be(testActor) dead.recipient must be(system.deadLetters) } "catch deadLetter messages send to deadLetters" in { val deadLetterMonitor = TestProbe() val actor = system.actorOf(Props[EchoActor], "echo") system.eventStream.subscribe( deadLetterMonitor.ref, classOf[DeadLetter]) val msg = new Order("me", "Akka in Action", 1) val dead = DeadLetter(msg, testActor, actor) system.deadLetters ! dead deadLetterMonitor.expectMsg(dead) system.stop(actor) } "catch messages send to terminated Actor" in { val deadLetterMonitor = TestProbe() system.eventStream.subscribe( deadLetterMonitor.ref, classOf[DeadLetter]) val actor = system.actorOf(Props[EchoActor], "echo") actor ! PoisonPill val msg = new Order("me", "Akka in Action", 1) actor ! msg val dead = deadLetterMonitor.expectMsgType[DeadLetter] dead.message must be(msg) dead.sender must be(testActor) dead.recipient must be(actor) } } }
Example 37
Source File: ThroughputCPUTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.throughput import akka.testkit.TestProbe import akka.actor.{Props, ActorSystem} import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.routing.RoundRobinPool import com.typesafe.config.ConfigFactory import aia.performance.{ProcessCPURequest, SystemMessage, ProcessRequest} import concurrent.duration._ class ThroughputCPUTest extends WordSpecLike with BeforeAndAfterAll with MustMatchers { val configuration = ConfigFactory.load("performance/through") implicit val system = ActorSystem("ThroughputTest", configuration) "System" must { "fails to with cpu" in { val nrWorkers = 40 val nrMessages = nrWorkers * 40 val end = TestProbe() val workers = system.actorOf( RoundRobinPool(nrWorkers).props( Props(new ProcessCPURequest(250 millis, end.ref)).withDispatcher("my-dispatcher")), "Workers-cpu") val startTime = System.currentTimeMillis() for (i <- 0 until nrMessages) { workers ! new SystemMessage(startTime, 0, "") } val msg = end.receiveN(n = nrMessages, max = 9000 seconds).asInstanceOf[Seq[SystemMessage]] val endTime = System.currentTimeMillis() val total = endTime - startTime println("total process time %d Average=%d".format(total, total / nrMessages)) val grouped = msg.groupBy(_.id) grouped.map { case (key, listMsg) => (key, listMsg.foldLeft(0L) { (m, x) => math.max(m, x.duration) }) }.foreach(println(_)) Thread.sleep(1000) system.stop(workers) } } }
Example 38
Source File: ThroughputTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.throughput import akka.testkit.TestProbe import akka.actor.{Props, ActorSystem} import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.routing.RoundRobinPool import com.typesafe.config.ConfigFactory import aia.performance.{ProcessCPURequest, SystemMessage, ProcessRequest} import concurrent.duration._ class ThroughputTest extends WordSpecLike with BeforeAndAfterAll with MustMatchers { val configuration = ConfigFactory.load("performance/through") implicit val system = ActorSystem("ThroughputTest", configuration) "System" must { "fails to perform" in { val nrMessages = 99 val nrWorkers = 3 val statDuration = 2000 millis //((nrMessages * 10)+1000)/4 millis val end = TestProbe() val workers = system.actorOf( RoundRobinPool(nrWorkers).props(Props(new ProcessRequest(1 second, end.ref)).withDispatcher("my-dispatcher")), "Workers") val startTime = System.currentTimeMillis() for (i <- 0 until nrMessages) { workers ! new SystemMessage(startTime, 0, "") } val msg = end.receiveN(n = nrMessages, max = 9000 seconds).asInstanceOf[Seq[SystemMessage]] val endTime = System.currentTimeMillis() val total = endTime - startTime println("total process time %d Average=%d".format(total, total / nrMessages)) val grouped = msg.groupBy(_.id) grouped.map { case (key, listMsg) => (key, listMsg.foldLeft(0L) { (m, x) => math.max(m, x.duration) }) }.foreach(println(_)) Thread.sleep(1000) system.stop(workers) } } }
Example 39
Source File: MonitorMailboxTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.monitor import akka.testkit.TestProbe import akka.actor.{ Props, Actor, ActorSystem } import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import concurrent.duration._ import com.typesafe.config.ConfigFactory class MonitorMailboxTest extends WordSpecLike with BeforeAndAfterAll with MustMatchers { val configuration = ConfigFactory.load("monitor/mailbox") implicit val system = ActorSystem("MonitorMailboxTest", configuration) override protected def afterAll(): Unit = { system.terminate() super.afterAll() } "mailbox" must { "send statistics with dispatcher" in { val statProbe = TestProbe() system.eventStream.subscribe( statProbe.ref, classOf[MailboxStatistics]) val testActor = system.actorOf(Props( new ProcessTestActor(1.second)) .withDispatcher("my-dispatcher"), "monitorActor") statProbe.send(testActor, "message") statProbe.send(testActor, "message2") statProbe.send(testActor, "message3") val stat = statProbe.expectMsgType[MailboxStatistics] println(stat) stat.queueSize must be(1) val stat2 = statProbe.expectMsgType[MailboxStatistics] println(stat2) stat2.queueSize must (be(2) or be(1)) val stat3 = statProbe.expectMsgType[MailboxStatistics] println(stat3) stat3.queueSize must (be(3) or be(2)) Thread.sleep(2000) system.stop(testActor) system.eventStream.unsubscribe(statProbe.ref) } "send statistics with default" in { val statProbe = TestProbe() system.eventStream.subscribe( statProbe.ref, classOf[MailboxStatistics]) val testActor = system.actorOf(Props( new ProcessTestActor(1.second)), "monitorActor2") statProbe.send(testActor, "message") statProbe.send(testActor, "message2") statProbe.send(testActor, "message3") val stat = statProbe.expectMsgType[MailboxStatistics] stat.queueSize must be(1) val stat2 = statProbe.expectMsgType[MailboxStatistics] stat2.queueSize must (be(2) or be(1)) val stat3 = statProbe.expectMsgType[MailboxStatistics] stat3.queueSize must (be(3) or be(2)) Thread.sleep(2000) system.stop(testActor) system.eventStream.unsubscribe(statProbe.ref) } } } class ProcessTestActor(serviceTime: Duration) extends Actor { def receive = { case _ => { Thread.sleep(serviceTime.toMillis) } } }
Example 40
Source File: MonitorActorTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.monitor import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.testkit.{ TestProbe, TestKit } import akka.actor.{ Props, ActorSystem } import concurrent.duration._ class MonitorActorTest extends TestKit(ActorSystem("MonitorActorTest")) with WordSpecLike with BeforeAndAfterAll with MustMatchers { "Actor" must { "send statistics" in { val statProbe = TestProbe() system.eventStream.subscribe( statProbe.ref, classOf[ActorStatistics]) val testActor = system.actorOf(Props( new ProcessTestActor(1.second) with MonitorActor), "monitorActor") statProbe.send(testActor, "message") statProbe.send(testActor, "message2") statProbe.send(testActor, "message3") val stat = statProbe.expectMsgType[ActorStatistics] println(stat) stat.exitTime - stat.entryTime must be(1000L +- 20) val stat2 = statProbe.expectMsgType[ActorStatistics] println(stat2) stat2.exitTime - stat2.entryTime must be(1000L +- 20) val stat3 = statProbe.expectMsgType[ActorStatistics] println(stat3) stat3.exitTime - stat3.entryTime must be(1000L +- 20) Thread.sleep(2000) system.stop(testActor) system.eventStream.unsubscribe(statProbe.ref) } } }
Example 41
Source File: HashRoutingTest.scala From 006877 with MIT License | 5 votes |
package aia.routing import scala.concurrent.duration._ import akka.actor._ import org.scalatest.{BeforeAndAfterAll, WordSpecLike} import akka.routing._ import akka.routing.ConsistentHashingRouter._ import akka.testkit.{TestProbe, TestKit} class HashRoutingTest extends TestKit(ActorSystem("PerfRoutingTest")) with WordSpecLike with BeforeAndAfterAll { override def afterAll() = { system.terminate() } "The HashRouting" must { "work using mapping" in { val endProbe = TestProbe() def hashMapping: ConsistentHashMapping = { case msg: GatherMessage => msg.id } val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10, hashMapping = hashMapping). props(Props(new SimpleGather(endProbe.ref))), name = "routerMapping") router ! GatherMessageNormalImpl("1", Seq("msg1")) endProbe.expectNoMsg(100.millis) router ! GatherMessageNormalImpl("1", Seq("msg2")) endProbe.expectMsg(GatherMessageNormalImpl("1",Seq("msg1","msg2"))) router ! GatherMessageNormalImpl("10", Seq("msg1")) endProbe.expectNoMsg(100.millis) router ! GatherMessageNormalImpl("10", Seq("msg2")) endProbe.expectMsg(GatherMessageNormalImpl("10",Seq("msg1","msg2"))) system.stop(router) } "work using messages" in { val endProbe = TestProbe() val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10). props(Props(new SimpleGather(endProbe.ref))), name = "routerMessage") router ! GatherMessageWithHash("1", Seq("msg1")) endProbe.expectNoMsg(100.millis) router ! GatherMessageWithHash("1", Seq("msg2")) endProbe.expectMsg(GatherMessageNormalImpl("1",Seq("msg1","msg2"))) router ! GatherMessageWithHash("10", Seq("msg1")) endProbe.expectNoMsg(100.millis) router ! GatherMessageWithHash("10", Seq("msg2")) endProbe.expectMsg(GatherMessageNormalImpl("10",Seq("msg1","msg2"))) system.stop(router) } "work using Envelope" in { val endProbe = TestProbe() val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10). props(Props(new SimpleGather(endProbe.ref))), name = "routerMessage") router ! ConsistentHashableEnvelope( message = GatherMessageNormalImpl("1", Seq("msg1")), hashKey = "someHash") endProbe.expectNoMsg(100.millis) router ! ConsistentHashableEnvelope( message = GatherMessageNormalImpl("1", Seq("msg2")), hashKey = "someHash") endProbe.expectMsg(GatherMessageNormalImpl("1",Seq("msg1","msg2"))) router ! ConsistentHashableEnvelope( message = GatherMessageNormalImpl("10", Seq("msg1")), hashKey = "10") endProbe.expectNoMsg(100.millis) router ! ConsistentHashableEnvelope( message = GatherMessageNormalImpl("10", Seq("msg2")), hashKey = "10") endProbe.expectMsg(GatherMessageNormalImpl("10",Seq("msg1","msg2"))) system.stop(router) } "fail without using hash" in { val endProbe = TestProbe() val router = system.actorOf(ConsistentHashingPool(10, virtualNodesFactor = 10). props(Props(new SimpleGather(endProbe.ref))), name = "routerMessage") router ! GatherMessageNormalImpl("1", Seq("msg1")) endProbe.expectNoMsg(100.millis) router ! GatherMessageNormalImpl("1", Seq("msg2")) endProbe.expectNoMsg(1000.millis) system.stop(router) } } }
Example 42
Source File: ClientTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.networking import java.net.{InetSocketAddress, ServerSocket} import akka.actor.ActorSystem import akka.io.{Inet, Tcp} import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import org.bitcoins.core.config.TestNet3 import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.bitcoins.spvnode.messages.control.VersionMessage import org.bitcoins.spvnode.messages.{NetworkPayload, VersionMessage} import org.bitcoins.spvnode.util.BitcoinSpvNodeUtil import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FlatSpecLike, MustMatchers} import scala.concurrent.duration._ import scala.util.Try class ClientTest extends TestKit(ActorSystem("ClientTest")) with FlatSpecLike with MustMatchers with ImplicitSender with BeforeAndAfter with BeforeAndAfterAll with BitcoinSLogger { "Client" must "connect to a node on the bitcoin network, " + "send a version message to a peer on the network and receive a version message back, then close that connection" in { val probe = TestProbe() val client = TestActorRef(Client.props,probe.ref) val remote = new InetSocketAddress(TestNet3.dnsSeeds(0), TestNet3.port) val randomPort = 23521 //random port client ! Tcp.Connect(remote, Some(new InetSocketAddress(randomPort))) //val bound : Tcp.Bound = probe.expectMsgType[Tcp.Bound] val conn : Tcp.Connected = probe.expectMsgType[Tcp.Connected] //make sure the socket is currently bound Try(new ServerSocket(randomPort)).isSuccess must be (false) client ! Tcp.Abort val confirmedClosed = probe.expectMsg(Tcp.Aborted) //make sure the port is now available val boundSocket = Try(new ServerSocket(randomPort)) boundSocket.isSuccess must be (true) boundSocket.get.close() } it must "bind connect to two nodes on one port" in { //NOTE if this test case fails it is more than likely because one of the two dns seeds //below is offline val remote1 = new InetSocketAddress(TestNet3.dnsSeeds(0), TestNet3.port) val remote2 = new InetSocketAddress(TestNet3.dnsSeeds(2), TestNet3.port) val probe1 = TestProbe() val probe2 = TestProbe() val client1 = TestActorRef(Client.props, probe1.ref) val client2 = TestActorRef(Client.props, probe2.ref) val local1 = new InetSocketAddress(TestNet3.port) val options = List(Inet.SO.ReuseAddress(true)) client1 ! Tcp.Connect(remote1,Some(local1),options) probe1.expectMsgType[Tcp.Connected] client1 ! Tcp.Abort val local2 = new InetSocketAddress(TestNet3.port) client2 ! Tcp.Connect(remote2,Some(local2),options) probe2.expectMsgType[Tcp.Connected](5.seconds) client2 ! Tcp.Abort } override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } }
Example 43
Source File: BlockActorTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.networking import akka.actor.ActorSystem import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.protocol.blockchain.BlockHeader import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.bitcoins.spvnode.messages.BlockMessage import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FlatSpecLike, MustMatchers} import scala.concurrent.duration.DurationInt class BlockActorTest extends TestKit(ActorSystem("BlockActorTest")) with FlatSpecLike with MustMatchers with ImplicitSender with BeforeAndAfter with BeforeAndAfterAll with BitcoinSLogger { def blockActor = TestActorRef(BlockActor.props,self) val blockHash = DoubleSha256Digest(BitcoinSUtil.flipEndianness("00000000b873e79784647a6c82962c70d228557d24a747ea4d1b8bbe878e1206")) "BlockActor" must "be able to send a GetBlocksMessage then receive that block back" in { blockActor ! blockHash val blockMsg = expectMsgType[BlockMessage](10.seconds) blockMsg.block.blockHeader.hash must be (blockHash) } it must "be able to request a block from it's block header" in { val blockHeader = BlockHeader("0100000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000bac8b0fa927c0ac8234287e33c5f74d38d354820e24756ad709d7038fc5f31f020e7494dffff001d03e4b672") blockActor ! blockHeader val blockMsg = expectMsgType[BlockMessage](10.seconds) blockMsg.block.blockHeader.hash must be (blockHash) } override def afterAll = { TestKit.shutdownActorSystem(system) } }
Example 44
Source File: ServiceSpec.scala From mqtt-mongo with MIT License | 5 votes |
package com.izmailoff.mm.service import akka.actor.ActorSystem import akka.testkit.{TestProbe, DefaultTimeout, ImplicitSender, TestKit} import com.izmailoff.mm.config.GlobalAppConfig import com.sandinh.paho.akka.MqttPubSub.{Subscribe, SubscribeAck, Message} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.duration._ import scala.collection.JavaConversions._ class ServiceSpec extends TestKit(ActorSystem("test-mqtt-mongo-system", GlobalAppConfig.config)) with DefaultTimeout with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with TestMqttMongoServiceImpl with TestHelpers { override def afterAll { shutdown() } "Subscription between MQTT Broker and Consumer" should { "get established when consumer is started" in { val mqttBroker = startMqttIntermediary() val probe = TestProbe() val mqttConsumer = startMqttConsumer(probe.ref) probe.expectMsg(Subscribe(testTopic, mqttConsumer)) probe.forward(mqttBroker, Subscribe(testTopic, probe.ref)) probe.expectMsg(SubscribeAck(Subscribe(testTopic, probe.ref))) probe.forward(mqttConsumer, SubscribeAck(Subscribe(testTopic, mqttConsumer))) probe.expectNoMsg() } } "Sending a message to MQTT Broker" should { "forward it to MQTT Consumer and get saved in DB in proper JSON format" in { val collection = getCollectionName(testTopic).head db.getCollection(collection).count() should be(0) val mqttBroker = startMqttIntermediary() val mqttConsumer = startMqttConsumer(mqttBroker) expectNoMsg(1 second) mqttBroker ! new Message(testTopic, "test content".getBytes) mqttBroker ! new Message(testTopic, """{ "field1" : "str val", "field2" : 123 }""".getBytes) expectNoMsg(1 second) db.getCollection(collection).count() should be(2) val allDocsDb = db.getCollection(collection).find().iterator.toList allDocsDb.exists { d => val fields: Map[Any, Any] = d.toMap.toMap fields.size == 2 && fields("payload") == "test content" } should be(true) allDocsDb.exists { d => val fields: Map[Any, Any] = d.toMap.toMap fields.size == 3 && fields("field1") == "str val" && fields("field2") == 123 } should be(true) } } }
Example 45
Source File: AwaitPersistenceInit.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.persistence.testkit import java.util.concurrent.TimeUnit import akka.actor.ActorSystem import akka.actor.Props import akka.persistence.PersistentActor import akka.testkit.TestProbe import org.slf4j.LoggerFactory import scala.concurrent.duration._ // A copy of akka.persistence.cassandra.CassandraLifecycle's awaitPersistenceInit. private[lagom] object AwaitPersistenceInit { def awaitPersistenceInit(system: ActorSystem): Unit = { val probe = TestProbe()(system) val log = LoggerFactory.getLogger(getClass) val t0 = System.nanoTime() var n = 0 probe.within(45.seconds) { probe.awaitAssert { n += 1 system.actorOf(Props[AwaitPersistenceInit], "persistenceInit" + n).tell("hello", probe.ref) probe.expectMsg(15.seconds, "hello") log.debug( "awaitPersistenceInit took {} ms {}", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0), system.name ) } } } } private[lagom] class AwaitPersistenceInit extends PersistentActor { def persistenceId: String = self.path.name def receiveRecover: Receive = { case _ => } def receiveCommand: Receive = { case msg => persist(msg) { _ => sender() ! msg context.stop(self) } } }
Example 46
Source File: MultiNodeExpect.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.cluster import akka.Done import akka.actor.ActorRef import akka.actor.ActorSystem import akka.actor.Scheduler import akka.annotation.ApiMayChange import akka.cluster.ddata.DistributedData import akka.cluster.ddata.Flag import akka.cluster.ddata.FlagKey import akka.cluster.ddata.Replicator.Get import akka.cluster.ddata.Replicator.GetSuccess import akka.cluster.ddata.Replicator.ReadLocal import akka.cluster.ddata.Replicator.Update import akka.cluster.ddata.Replicator.UpdateSuccess import akka.cluster.ddata.Replicator.WriteAll import akka.cluster.ddata.Replicator.WriteConsistency import akka.cluster.ddata.SelfUniqueAddress import akka.event.LoggingAdapter import akka.testkit.TestProbe import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.concurrent.duration._ import scala.reflect.ClassTag import akka.pattern.after import akka.pattern.ask import akka.util.Timeout import scala.util.control.NonFatal @ApiMayChange class MultiNodeExpect(probe: TestProbe)(implicit system: ActorSystem) { private implicit val scheduler: Scheduler = system.scheduler private implicit val executionContext: ExecutionContext = system.dispatcher val replicator: ActorRef = DistributedData(system).replicator implicit val node: SelfUniqueAddress = DistributedData(system).selfUniqueAddress def expectMsgType[T](expectationKey: String, max: FiniteDuration)(implicit t: ClassTag[T]): Future[Done] = { val eventualT = () => Future(errorAsRuntime(probe.expectMsgType[T](max))) doExpect(eventualT)(expectationKey, max) } // prevents Errors from turning into BoxedError when using `Future(f)` (where f throws Error) private def errorAsRuntime[T](f: => T): T = { try { f } catch { case NonFatal(t) => throw t case x: Throwable => throw new RuntimeException(x) } } private def doExpect[T](eventualT: () => Future[T])(expectationKey: String, max: FiniteDuration): Future[Done] = { val DataKey: FlagKey = FlagKey(expectationKey) val writeAll: WriteConsistency = WriteAll(max) implicit val timeout: Timeout = Timeout(max) val retryDelay = 3.second val fTimeout = after(max, scheduler)(Future.failed(new RuntimeException(s"timeout $max expired"))) // If the local expectation wins, it must notify others. val fLocalExpect: Future[Done] = eventualT() .map { _ => (replicator ? Update(DataKey, Flag.empty, writeAll)( _.switchOn )).mapTo[UpdateSuccess[Flag]] } .map(_ => Done) // if a remote expectation wins, we can move on. val poll: () => Future[Done] = () => (replicator ? Get(DataKey, ReadLocal)).map { case g @ GetSuccess(DataKey, _) if g.get(DataKey).enabled => Done case _ => throw new RuntimeException("Flag unset") } val fRemoteExpect: Future[Done] = retry( poll, retryDelay, Int.MaxValue // keep retrying, there's a timeout later ) Future .firstCompletedOf( Seq( fLocalExpect, fRemoteExpect, fTimeout ) ) } // From vklang's https://gist.github.com/viktorklang/9414163 def retry[T](op: () => Future[T], delay: FiniteDuration, retries: Int): Future[T] = op().recoverWith { case _ if retries > 0 => after(delay, scheduler)(retry(op, delay, retries - 1)) } }
Example 47
Source File: PersistentEntityTestDriverCompatSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.testkit import scala.collection.JavaConverters._ import scala.concurrent.duration._ import com.lightbend.lagom.javadsl.persistence.TestEntity import akka.testkit.TestProbe import com.lightbend.lagom.internal.javadsl.persistence.PersistentEntityActor import java.util.Optional import com.lightbend.lagom.javadsl.persistence.cassandra.CassandraPersistenceSpec class PersistentEntityTestDriverCompatSpec extends CassandraPersistenceSpec { "PersistentEntityActor and PersistentEntityTestDriver" must { "produce same events and state" in { val probe1 = TestProbe() val p = system.actorOf( PersistentEntityActor.props( "test", Optional.of("1"), () => new TestEntity(system, probe1.ref), Optional.empty(), 10.seconds, "", "" ) ) val probe2 = TestProbe() val driver = new PersistentEntityTestDriver(system, new TestEntity(system, probe2.ref), "1") val commands = List( TestEntity.Get.instance, TestEntity.Add.of("a"), TestEntity.Add.of("b"), TestEntity.Add.of(""), new TestEntity.ChangeMode(TestEntity.Mode.PREPEND), TestEntity.Add.of("C"), new TestEntity.Add("D", 2), TestEntity.Get.instance ) val outcome = driver.run(commands: _*) commands.foreach(p ! _) val replySideEffects = outcome.sideEffects.asScala.collect { case PersistentEntityTestDriver.Reply(msg) => msg } val replies = receiveN(replySideEffects.size) replySideEffects should be(replies) // Add 2 generates 2 events, but only one reply so drop last event when comparing outcome.events.asScala.dropRight(1) should be(replies.collect { case evt: TestEntity.Evt => evt }) outcome.state should be(replies.last) expectNoMessage(200.millis) probe1.expectMsgType[TestEntity.AfterRecovery] probe2.expectMsgType[TestEntity.AfterRecovery] outcome.issues.asScala.toList should be(Nil) } } }
Example 48
Source File: PersistentEntityTestDriverCompatSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.testkit import akka.testkit.TestProbe import com.lightbend.lagom.internal.scaladsl.persistence.PersistentEntityActor import com.lightbend.lagom.scaladsl.persistence.TestEntity import com.lightbend.lagom.scaladsl.persistence.TestEntitySerializerRegistry import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraPersistenceSpec import scala.concurrent.duration._ class PersistentEntityTestDriverCompatSpec extends CassandraPersistenceSpec(TestEntitySerializerRegistry) { "PersistentEntityActor and PersistentEntityTestDriver" must { "produce same events and state" in { val probe1 = TestProbe() val p = system.actorOf( PersistentEntityActor .props("test", Some("1"), () => new TestEntity(system, Some(probe1.ref)), None, 10.seconds, "", "") ) val probe2 = TestProbe() val driver = new PersistentEntityTestDriver(system, new TestEntity(system, Some(probe2.ref)), "1") val commands = List( TestEntity.Get, TestEntity.Add("a"), TestEntity.Add("b"), TestEntity.Add(""), TestEntity.ChangeMode(TestEntity.Mode.Prepend), TestEntity.Add("C"), TestEntity.Add("D", 2), TestEntity.Get ) val outcome = driver.run(commands: _*) commands.foreach(p ! _) val replySideEffects = outcome.sideEffects.collect { case PersistentEntityTestDriver.Reply(msg) => msg } val replies = receiveN(replySideEffects.size) replySideEffects should be(replies) // Add 2 generates 2 events, but only one reply so drop last event when comparing outcome.events.dropRight(1) should be(replies.collect { case evt: TestEntity.Evt => evt }) outcome.state should be(replies.last) expectNoMessage(200.millis) probe1.expectMsgType[TestEntity.AfterRecovery] probe2.expectMsgType[TestEntity.AfterRecovery] outcome.issues should be(Nil) } } }
Example 49
Source File: HttpContactPointRoutesSpec.scala From akka-management with Apache License 2.0 | 5 votes |
package akka.management.cluster.bootstrap.contactpoint import akka.cluster.{ Cluster, ClusterEvent } import akka.event.NoLogging import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.management.cluster.bootstrap.ClusterBootstrapSettings import akka.testkit.{ SocketUtil, TestProbe } import org.scalatest.concurrent.Eventually import org.scalatest.time.{ Millis, Seconds, Span } import org.scalatest.{ Matchers, WordSpecLike } class HttpContactPointRoutesSpec extends WordSpecLike with Matchers with ScalatestRouteTest with HttpBootstrapJsonProtocol with Eventually { implicit override val patienceConfig: PatienceConfig = PatienceConfig(timeout = scaled(Span(3, Seconds)), interval = scaled(Span(50, Millis))) override def testConfigSource = s""" akka { remote { netty.tcp { hostname = "127.0.0.1" port = ${SocketUtil.temporaryServerAddress("127.0.0.1").getPort} } } } """.stripMargin "Http Bootstrap routes" should { val settings = ClusterBootstrapSettings(system.settings.config, NoLogging) val httpBootstrap = new HttpClusterBootstrapRoutes(settings) "empty list if node is not part of a cluster" in { ClusterBootstrapRequests.bootstrapSeedNodes("") ~> httpBootstrap.routes ~> check { responseAs[String] should include(""""seedNodes":[]""") } } "include seed nodes when part of a cluster" in { val cluster = Cluster(system) cluster.join(cluster.selfAddress) val p = TestProbe() cluster.subscribe(p.ref, ClusterEvent.InitialStateAsEvents, classOf[ClusterEvent.MemberUp]) val up = p.expectMsgType[ClusterEvent.MemberUp] up.member should ===(cluster.selfMember) eventually { ClusterBootstrapRequests.bootstrapSeedNodes("") ~> httpBootstrap.routes ~> check { val response = responseAs[HttpBootstrapJsonProtocol.SeedNodes] response.seedNodes should !==(Set.empty) response.seedNodes.map(_.node) should contain(cluster.selfAddress) } } } } }
Example 50
Source File: PersistenceIdsQuerySpec.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package akka.persistence.couchbase.scaladsl import akka.persistence.couchbase.TestActor import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestProbe import scala.concurrent.duration._ class PersistenceIdsQuerySpec extends AbstractCouchbaseSpec("PersistenceIdsQuerySpec") { "currentPersistenceIds" must { "work" in { val senderProbe = TestProbe() implicit val sender = senderProbe.ref val pa1 = system.actorOf(TestActor.props("p1")) pa1 ! "p1-evt-1" senderProbe.expectMsg("p1-evt-1-done") val pa2 = system.actorOf(TestActor.props("p2")) pa2 ! "p2-evt-1" senderProbe.expectMsg("p2-evt-1-done") awaitAssert( { val probe: TestSubscriber.Probe[String] = queries.currentPersistenceIds().runWith(TestSink.probe) probe.requestNext("p1") probe.requestNext("p2") probe.expectComplete() }, readOurOwnWritesTimeout ) } } "live persistenceIds" must { "show new persistence ids" in { val senderProbe = TestProbe() implicit val sender = senderProbe.ref val queryProbe: TestSubscriber.Probe[String] = queries.persistenceIds().runWith(TestSink.probe) queryProbe.request(10) val pa3 = system.actorOf(TestActor.props("p3")) pa3 ! "p3-evt-1" senderProbe.expectMsg("p3-evt-1-done") awaitAssert({ queryProbe.expectNext("p3") }, 5.seconds) val pa4 = system.actorOf(TestActor.props("p4")) pa4 ! "p4-evt-1" senderProbe.expectMsg("p4-evt-1-done") // we shouldn't see p3 again queryProbe.expectNext("p4") // also not after p4 (it could come out of order) queryProbe.expectNoMessage(noMsgTimeout) } } }
Example 51
Source File: AbstractCouchbaseSpec.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package akka.persistence.couchbase.scaladsl import akka.actor.{ActorRef, ActorSystem} import akka.persistence.couchbase.{CouchbaseBucketSetup, TestActor} import akka.persistence.query.PersistenceQuery import akka.stream.{ActorMaterializer, Materializer} import akka.testkit.{TestKit, TestProbe, WithLogCapturing} import com.typesafe.config.{Config, ConfigFactory} import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.duration._ abstract class AbstractCouchbaseSpec(testName: String, config: Config) extends TestKit( ActorSystem(testName, config.withFallback(ConfigFactory.load())) ) with WordSpecLike with BeforeAndAfterAll with Matchers with ScalaFutures with CouchbaseBucketSetup with WithLogCapturing { def this(testName: String) = this( testName, ConfigFactory.parseString(""" couchbase-journal.read { page-size = 10 } akka.loggers = ["akka.testkit.SilenceAllTestEventListener"] akka.loglevel=debug """) ) var idCounter = 0 def nextPersistenceId(): String = { idCounter += 1 val id = Integer.toString(idCounter, 24) id.toString } // provides a unique persistence-id per test case and some initial persisted events protected trait Setup { lazy val probe = TestProbe() implicit def sender: ActorRef = probe.ref // note must be a def or lazy val or else it doesn't work (init order) def initialPersistedEvents: Int = 0 def startPersistentActor(initialEvents: Int): (String, ActorRef) = { val pid = nextPersistenceId() system.log.debug("Starting actor with pid {}, and writing {} initial events", pid, initialPersistedEvents) val persistentActor = system.actorOf(TestActor.props(pid)) if (initialEvents > 0) { for (i <- 1 to initialEvents) { persistentActor ! s"$pid-$i" probe.expectMsg(s"$pid-$i-done") } } (pid, persistentActor) } val (pid, persistentActor) = startPersistentActor(initialPersistedEvents) // no guarantee we can immediately read our own writes def readingOurOwnWrites[A](f: => A): A = awaitAssert(f, readOurOwnWritesTimeout, interval = 250.millis) // no need to bombard the db with retries } protected val noMsgTimeout = 100.millis protected val readOurOwnWritesTimeout = 10.seconds override implicit val patienceConfig: PatienceConfig = PatienceConfig(readOurOwnWritesTimeout) implicit val materializer: Materializer = ActorMaterializer() lazy // #read-journal-access val queries: CouchbaseReadJournal = PersistenceQuery(system).readJournalFor[CouchbaseReadJournal](CouchbaseReadJournal.Identifier) // #read-journal-access protected override def afterAll(): Unit = { super.afterAll() shutdown(system) } }
Example 52
Source File: CouchbaseSnapshotSpec.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package akka.persistence.couchbase import akka.actor.{ActorSystem, PoisonPill} import akka.persistence.couchbase.TestActor.{GetLastRecoveredEvent, SaveSnapshot} import akka.stream.ActorMaterializer import akka.testkit.{TestKit, TestProbe, WithLogCapturing} import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike} import scala.concurrent.duration._ class CouchbaseSnapshotSpec extends TestKit( ActorSystem( "CouchbaseSnapshotSpec", ConfigFactory.parseString(""" |akka.loggers = ["akka.testkit.SilenceAllTestEventListener"] """.stripMargin).withFallback(ConfigFactory.load()) ) ) with WordSpecLike with BeforeAndAfterAll with Matchers with CouchbaseBucketSetup with BeforeAndAfterEach with WithLogCapturing { protected override def afterAll(): Unit = { super.afterAll() shutdown(system) } val waitTime = 100.millis implicit val materializer = ActorMaterializer() "entity" should { "recover" in { val senderProbe = TestProbe() implicit val sender = senderProbe.ref { val pa1 = system.actorOf(TestActor.props("p1")) pa1 ! "p1-evt-1" senderProbe.expectMsg("p1-evt-1-done") senderProbe.watch(pa1) pa1 ! PoisonPill senderProbe.expectTerminated(pa1) } { val pa1 = system.actorOf(TestActor.props("p1")) pa1 ! GetLastRecoveredEvent senderProbe.expectMsg("p1-evt-1") } } "recover after snapshot" in { val senderProbe = TestProbe() implicit val sender = senderProbe.ref { val pa1 = system.actorOf(TestActor.props("p2")) pa1 ! "p2-evt-1" senderProbe.expectMsg("p2-evt-1-done") pa1 ! SaveSnapshot senderProbe.expectMsgType[Long] senderProbe.watch(pa1) pa1 ! PoisonPill senderProbe.expectTerminated(pa1) } { val pa1 = system.actorOf(TestActor.props("p2")) pa1 ! GetLastRecoveredEvent senderProbe.expectMsg("p2-evt-1") } } } }
Example 53
Source File: WaitForFundingCreatedInternalStateSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.channel.states.b import akka.testkit.{TestFSMRef, TestProbe} import fr.acinq.bitcoin.{ByteVector32, Satoshi} import fr.acinq.eclair.TestConstants.{Alice, Bob} import fr.acinq.eclair.blockchain.{MakeFundingTxResponse, TestWallet} import fr.acinq.eclair.channel._ import fr.acinq.eclair.channel.states.StateTestsHelperMethods import fr.acinq.eclair.wire._ import fr.acinq.eclair.{TestConstants, TestKitBaseClass} import org.scalatest.Outcome import org.scalatest.funsuite.FixtureAnyFunSuiteLike import scodec.bits.ByteVector import scala.concurrent.duration._ import scala.concurrent.{Future, Promise} class WaitForFundingCreatedInternalStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods { case class FixtureParam(alice: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, alice2blockchain: TestProbe) override def withFixture(test: OneArgTest): Outcome = { val noopWallet = new TestWallet { override def makeFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: Long): Future[MakeFundingTxResponse] = Promise[MakeFundingTxResponse].future // will never be completed } val setup = init(wallet = noopWallet) import setup._ val aliceInit = Init(Alice.channelParams.features) val bobInit = Init(Bob.channelParams.features) within(30 seconds) { alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, TestConstants.fundingSatoshis, TestConstants.pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD) bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit) alice2bob.expectMsgType[OpenChannel] alice2bob.forward(bob) bob2alice.expectMsgType[AcceptChannel] bob2alice.forward(alice) awaitCond(alice.stateName == WAIT_FOR_FUNDING_INTERNAL) withFixture(test.toNoArgTest(FixtureParam(alice, alice2bob, bob2alice, alice2blockchain))) } } test("recv Error") { f => import f._ alice ! Error(ByteVector32.Zeroes, "oops") awaitCond(alice.stateName == CLOSED) } test("recv CMD_CLOSE") { f => import f._ alice ! CMD_CLOSE(None) awaitCond(alice.stateName == CLOSED) } }
Example 54
Source File: WaitForFundingSignedStateSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.channel.states.b import akka.testkit.{TestFSMRef, TestProbe} import fr.acinq.bitcoin.{ByteVector32, ByteVector64} import fr.acinq.eclair.TestConstants.{Alice, Bob} import fr.acinq.eclair.blockchain._ import fr.acinq.eclair.channel.Channel.TickChannelOpenTimeout import fr.acinq.eclair.channel._ import fr.acinq.eclair.channel.states.StateTestsHelperMethods import fr.acinq.eclair.wire.{AcceptChannel, Error, FundingCreated, FundingSigned, Init, OpenChannel} import fr.acinq.eclair.{TestConstants, TestKitBaseClass} import org.scalatest.Outcome import org.scalatest.funsuite.FixtureAnyFunSuiteLike import scala.concurrent.duration._ class WaitForFundingSignedStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods { case class FixtureParam(alice: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, alice2blockchain: TestProbe) override def withFixture(test: OneArgTest): Outcome = { val setup = init() import setup._ val aliceInit = Init(Alice.channelParams.features) val bobInit = Init(Bob.channelParams.features) within(30 seconds) { alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, TestConstants.fundingSatoshis, TestConstants.pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD) bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit) alice2bob.expectMsgType[OpenChannel] alice2bob.forward(bob) bob2alice.expectMsgType[AcceptChannel] bob2alice.forward(alice) alice2bob.expectMsgType[FundingCreated] alice2bob.forward(bob) awaitCond(alice.stateName == WAIT_FOR_FUNDING_SIGNED) withFixture(test.toNoArgTest(FixtureParam(alice, alice2bob, bob2alice, alice2blockchain))) } } test("recv FundingSigned with valid signature") { f => import f._ bob2alice.expectMsgType[FundingSigned] bob2alice.forward(alice) awaitCond(alice.stateName == WAIT_FOR_FUNDING_CONFIRMED) alice2blockchain.expectMsgType[WatchSpent] alice2blockchain.expectMsgType[WatchConfirmed] } test("recv FundingSigned with invalid signature") { f => import f._ // sending an invalid sig alice ! FundingSigned(ByteVector32.Zeroes, ByteVector64.Zeroes) awaitCond(alice.stateName == CLOSED) alice2bob.expectMsgType[Error] } test("recv CMD_CLOSE") { f => import f._ alice ! CMD_CLOSE(None) awaitCond(alice.stateName == CLOSED) } test("recv CMD_FORCECLOSE") { f => import f._ alice ! CMD_FORCECLOSE awaitCond(alice.stateName == CLOSED) } test("recv INPUT_DISCONNECTED") { f => import f._ val fundingTx = alice.stateData.asInstanceOf[DATA_WAIT_FOR_FUNDING_SIGNED].fundingTx assert(alice.underlyingActor.wallet.asInstanceOf[TestWallet].rolledback.isEmpty) alice ! INPUT_DISCONNECTED awaitCond(alice.stateName == CLOSED) assert(alice.underlyingActor.wallet.asInstanceOf[TestWallet].rolledback.contains(fundingTx)) } test("recv TickChannelOpenTimeout") { f => import f._ alice ! TickChannelOpenTimeout awaitCond(alice.stateName == CLOSED) } }
Example 55
Source File: WaitForFundingCreatedStateSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.channel.states.b import akka.testkit.{TestFSMRef, TestProbe} import fr.acinq.bitcoin.{ByteVector32, Satoshi} import fr.acinq.eclair.TestConstants.{Alice, Bob} import fr.acinq.eclair.blockchain._ import fr.acinq.eclair.channel._ import fr.acinq.eclair.channel.states.StateTestsHelperMethods import fr.acinq.eclair.transactions.Transactions import fr.acinq.eclair.wire._ import fr.acinq.eclair.{LongToBtcAmount, TestConstants, TestKitBaseClass, ToMilliSatoshiConversion} import org.scalatest.funsuite.FixtureAnyFunSuiteLike import org.scalatest.{Outcome, Tag} import scala.concurrent.duration._ class WaitForFundingCreatedStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods { case class FixtureParam(bob: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, bob2blockchain: TestProbe) override def withFixture(test: OneArgTest): Outcome = { val setup = init() import setup._ val (fundingSatoshis, pushMsat) = if (test.tags.contains("funder_below_reserve")) { (1000100 sat, (1000000 sat).toMilliSatoshi) // toLocal = 100 satoshis } else { (TestConstants.fundingSatoshis, TestConstants.pushMsat) } val aliceInit = Init(Alice.channelParams.features) val bobInit = Init(Bob.channelParams.features) within(30 seconds) { alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, fundingSatoshis, pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD) bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit) alice2bob.expectMsgType[OpenChannel] alice2bob.forward(bob) bob2alice.expectMsgType[AcceptChannel] bob2alice.forward(alice) awaitCond(bob.stateName == WAIT_FOR_FUNDING_CREATED) withFixture(test.toNoArgTest(FixtureParam(bob, alice2bob, bob2alice, bob2blockchain))) } } test("recv FundingCreated") { f => import f._ alice2bob.expectMsgType[FundingCreated] alice2bob.forward(bob) awaitCond(bob.stateName == WAIT_FOR_FUNDING_CONFIRMED) bob2alice.expectMsgType[FundingSigned] bob2blockchain.expectMsgType[WatchSpent] bob2blockchain.expectMsgType[WatchConfirmed] } test("recv FundingCreated (funder can't pay fees)", Tag("funder_below_reserve")) { f => import f._ val fees = Satoshi(Transactions.commitWeight * TestConstants.feeratePerKw / 1000) val reserve = Bob.channelParams.channelReserve val missing = 100.sat - fees - reserve val fundingCreated = alice2bob.expectMsgType[FundingCreated] alice2bob.forward(bob) val error = bob2alice.expectMsgType[Error] assert(error === Error(fundingCreated.temporaryChannelId, s"can't pay the fee: missing=${-missing} reserve=$reserve fees=$fees")) awaitCond(bob.stateName == CLOSED) } test("recv Error") { f => import f._ bob ! Error(ByteVector32.Zeroes, "oops") awaitCond(bob.stateName == CLOSED) } test("recv CMD_CLOSE") { f => import f._ bob ! CMD_CLOSE(None) awaitCond(bob.stateName == CLOSED) } }
Example 56
Source File: ThroughputSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.channel import java.util.UUID import java.util.concurrent.CountDownLatch import java.util.concurrent.atomic.AtomicLong import akka.actor.{Actor, ActorRef, ActorSystem, Props} import akka.testkit.TestProbe import fr.acinq.bitcoin.{ByteVector32, Crypto} import fr.acinq.eclair.TestConstants.{Alice, Bob} import fr.acinq.eclair._ import fr.acinq.eclair.blockchain._ import fr.acinq.eclair.blockchain.bitcoind.ZmqWatcher import fr.acinq.eclair.payment.relay.{CommandBuffer, Relayer} import fr.acinq.eclair.wire.{Init, UpdateAddHtlc} import org.scalatest.funsuite.AnyFunSuite import scala.concurrent.duration._ import scala.util.Random class ThroughputSpec extends AnyFunSuite { ignore("throughput") { implicit val system = ActorSystem("test") val pipe = system.actorOf(Props[Pipe], "pipe") val blockCount = new AtomicLong() val blockchain = system.actorOf(ZmqWatcher.props(blockCount, new TestBitcoinClient()), "blockchain") val paymentHandler = system.actorOf(Props(new Actor() { val random = new Random() context.become(run(Map())) override def receive: Receive = ??? def run(h2r: Map[ByteVector32, ByteVector32]): Receive = { case ('add, tgt: ActorRef) => val r = randomBytes32 val h = Crypto.sha256(r) tgt ! CMD_ADD_HTLC(1 msat, h, CltvExpiry(1), TestConstants.emptyOnionPacket, Upstream.Local(UUID.randomUUID())) context.become(run(h2r + (h -> r))) case ('sig, tgt: ActorRef) => tgt ! CMD_SIGN case htlc: UpdateAddHtlc if h2r.contains(htlc.paymentHash) => val r = h2r(htlc.paymentHash) sender ! CMD_FULFILL_HTLC(htlc.id, r) context.become(run(h2r - htlc.paymentHash)) } }), "payment-handler") val registerA = TestProbe() val registerB = TestProbe() val commandBufferA = system.actorOf(Props(new CommandBuffer(Alice.nodeParams, registerA.ref))) val commandBufferB = system.actorOf(Props(new CommandBuffer(Bob.nodeParams, registerB.ref))) val relayerA = system.actorOf(Relayer.props(Alice.nodeParams, TestProbe().ref, registerA.ref, commandBufferA, paymentHandler)) val relayerB = system.actorOf(Relayer.props(Bob.nodeParams, TestProbe().ref, registerB.ref, commandBufferB, paymentHandler)) val wallet = new TestWallet val alice = system.actorOf(Channel.props(Alice.nodeParams, wallet, Bob.nodeParams.nodeId, blockchain, relayerA, None), "a") val bob = system.actorOf(Channel.props(Bob.nodeParams, wallet, Alice.nodeParams.nodeId, blockchain, relayerB, None), "b") val aliceInit = Init(Alice.channelParams.features) val bobInit = Init(Bob.channelParams.features) alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, TestConstants.fundingSatoshis, TestConstants.pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, pipe, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD) bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, pipe, aliceInit) val latch = new CountDownLatch(2) val listener = system.actorOf(Props(new Actor { override def receive: Receive = { case ChannelStateChanged(_, _, _, _, NORMAL, _) => latch.countDown() } }), "listener") system.eventStream.subscribe(listener, classOf[ChannelEvent]) pipe ! (alice, bob) latch.await() var i = new AtomicLong(0) val random = new Random() def msg = random.nextInt(100) % 5 match { case 0 | 1 | 2 | 3 => 'add case 4 => 'sig } import scala.concurrent.ExecutionContext.Implicits.global system.scheduler.schedule(0 seconds, 50 milliseconds, new Runnable() { override def run(): Unit = paymentHandler ! (msg, alice) }) system.scheduler.schedule(5 seconds, 70 milliseconds, new Runnable() { override def run(): Unit = paymentHandler ! (msg, bob) }) Thread.sleep(Long.MaxValue) } }
Example 57
Source File: PaymentHandlerSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.payment import akka.actor.Actor.Receive import akka.actor.{ActorContext, ActorSystem} import akka.event.DiagnosticLoggingAdapter import akka.testkit.{TestKit, TestProbe} import fr.acinq.eclair.TestConstants.Alice import fr.acinq.eclair.TestKitBaseClass import fr.acinq.eclair.payment.receive.{PaymentHandler, ReceiveHandler} import org.scalatest.funsuite.AnyFunSuiteLike import scala.concurrent.duration._ class PaymentHandlerSpec extends TestKitBaseClass with AnyFunSuiteLike { test("compose payment handlers") { val handler = system.actorOf(PaymentHandler.props(Alice.nodeParams, TestProbe().ref)) val intHandler = new ReceiveHandler { override def handle(implicit ctx: ActorContext, log: DiagnosticLoggingAdapter): Receive = { case i: Int => ctx.sender ! -i } } val stringHandler = new ReceiveHandler { override def handle(implicit ctx: ActorContext, log: DiagnosticLoggingAdapter): Receive = { case s: String => ctx.sender ! s.reverse } } val probe = TestProbe() probe.send(handler, 42) probe.expectNoMsg(300 millis) probe.send(handler, intHandler) probe.send(handler, 42) probe.expectMsg(-42) probe.send(handler, "abcdef") probe.expectNoMsg(300 millis) probe.send(handler, stringHandler) probe.send(handler, 123) probe.expectMsg(-123) probe.send(handler, "abcdef") probe.expectMsg("fedcba") } }
Example 58
Source File: TestUtils.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair import java.io.File import java.net.ServerSocket import akka.actor.ActorRef import akka.event.DiagnosticLoggingAdapter import akka.testkit import akka.testkit.{TestActor, TestProbe} import fr.acinq.eclair.channel.Channel import fr.acinq.eclair.wire.LightningMessage object TestUtils { def forwardOutgoingToPipe(peer: TestProbe, pipe: ActorRef): Unit = { peer.setAutoPilot(new testkit.TestActor.AutoPilot { override def run(sender: ActorRef, msg: Any): TestActor.AutoPilot = msg match { case Channel.OutgoingMessage(msg: LightningMessage, _: ActorRef) => pipe tell (msg, sender) TestActor.KeepRunning case _ => TestActor.KeepRunning } }) } }
Example 59
Source File: BackupHandlerSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.db import java.io.File import java.sql.DriverManager import java.util.UUID import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import fr.acinq.eclair.channel.ChannelPersisted import fr.acinq.eclair.db.sqlite.SqliteChannelsDb import fr.acinq.eclair.wire.ChannelCodecsSpec import fr.acinq.eclair.{TestConstants, TestKitBaseClass, TestUtils, randomBytes32} import org.scalatest.funsuite.AnyFunSuiteLike class BackupHandlerSpec extends TestKitBaseClass with AnyFunSuiteLike { test("process backups") { val db = TestConstants.inMemoryDb() val wip = new File(TestUtils.BUILD_DIRECTORY, s"wip-${UUID.randomUUID()}") val dest = new File(TestUtils.BUILD_DIRECTORY, s"backup-${UUID.randomUUID()}") wip.deleteOnExit() dest.deleteOnExit() val channel = ChannelCodecsSpec.normal db.channels.addOrUpdateChannel(channel) assert(db.channels.listLocalChannels() == Seq(channel)) val handler = system.actorOf(BackupHandler.props(db, dest, None)) val probe = TestProbe() system.eventStream.subscribe(probe.ref, classOf[BackupEvent]) handler ! ChannelPersisted(null, TestConstants.Alice.nodeParams.nodeId, randomBytes32, null) handler ! ChannelPersisted(null, TestConstants.Alice.nodeParams.nodeId, randomBytes32, null) handler ! ChannelPersisted(null, TestConstants.Alice.nodeParams.nodeId, randomBytes32, null) probe.expectMsg(BackupCompleted) val db1 = new SqliteChannelsDb(DriverManager.getConnection(s"jdbc:sqlite:$dest")) val check = db1.listLocalChannels() assert(check == Seq(channel)) } }
Example 60
Source File: SwitchboardSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.io import akka.actor.ActorRef import akka.testkit.{TestActorRef, TestProbe} import fr.acinq.bitcoin.ByteVector64 import fr.acinq.bitcoin.Crypto.PublicKey import fr.acinq.eclair.TestConstants._ import fr.acinq.eclair.blockchain.TestWallet import fr.acinq.eclair.wire._ import fr.acinq.eclair.{Features, NodeParams, TestKitBaseClass} import org.scalatest.funsuite.AnyFunSuiteLike import scodec.bits._ class SwitchboardSpec extends TestKitBaseClass with AnyFunSuiteLike { class TestSwitchboard(nodeParams: NodeParams, remoteNodeId: PublicKey, remotePeer: TestProbe) extends Switchboard(nodeParams, TestProbe().ref, TestProbe().ref, new TestWallet()) { override def createPeer(remoteNodeId2: PublicKey): ActorRef = { assert(remoteNodeId === remoteNodeId2) remotePeer.ref } } test("on initialization create peers") { val nodeParams = Alice.nodeParams val peer = TestProbe() val remoteNodeId = ChannelCodecsSpec.normal.commitments.remoteParams.nodeId // If we have a channel with that remote peer, we will automatically reconnect. nodeParams.db.channels.addOrUpdateChannel(ChannelCodecsSpec.normal) val _ = TestActorRef(new TestSwitchboard(nodeParams, remoteNodeId, peer)) peer.expectMsg(Peer.Init(Set(ChannelCodecsSpec.normal))) } test("when connecting to a new peer forward Peer.Connect to it") { val nodeParams = Alice.nodeParams val (probe, peer) = (TestProbe(), TestProbe()) val remoteNodeId = PublicKey(hex"03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f") val remoteNodeAddress = NodeAddress.fromParts("127.0.0.1", 9735).get nodeParams.db.network.addNode(NodeAnnouncement(ByteVector64.Zeroes, Features.empty, 0, remoteNodeId, Color(0, 0, 0), "alias", remoteNodeAddress :: Nil)) val switchboard = TestActorRef(new TestSwitchboard(nodeParams, remoteNodeId, peer)) probe.send(switchboard, Peer.Connect(remoteNodeId, None)) peer.expectMsg(Peer.Init(Set.empty)) peer.expectMsg(Peer.Connect(remoteNodeId, None)) } }
Example 61
Source File: ProcessActorTest.scala From scastie with Apache License 2.0 | 5 votes |
package com.olegych.scastie.util import java.io.File import java.nio.file.{Files, StandardCopyOption} import akka.actor.{Actor, ActorRef, ActorSystem} import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import com.olegych.scastie.api.ProcessOutput import com.olegych.scastie.api.ProcessOutputType._ import com.olegych.scastie.util.ProcessActor._ import org.scalatest.BeforeAndAfterAll import org.scalatest.funsuite.AnyFunSuiteLike import scala.concurrent.duration._ class ProcessActorTest() extends TestKit(ActorSystem("ProcessActorTest")) with ImplicitSender with AnyFunSuiteLike with BeforeAndAfterAll { test("do it") { (1 to 10).foreach { i => println(s"--- Run $i ---") val command = new File("target", "echo.sh") Files.copy(getClass.getResourceAsStream("/echo.sh"), command.toPath, StandardCopyOption.REPLACE_EXISTING) command.setExecutable(true) val probe = TestProbe() val processActor = TestActorRef(new ProcessReceiver(command.getPath, probe.ref)) processActor ! Input("abcd") processActor ! Input("1234") processActor ! Input("quit") def expected(msg0: String): Unit = { probe.expectMsgPF(4000.milliseconds) { case ProcessOutput(msg1, StdOut, _) if msg0.trim == msg1.trim => true case ProcessOutput(msg1, StdOut, _) => println(s""""$msg1" != "$msg0"""") false } } expected("abcd") expected("1234") } } override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } } class ProcessReceiver(command: String, probe: ActorRef) extends Actor { private val props = ProcessActor.props(command = List("bash", "-c", command.replace("\\", "/")), killOnExit = false) private val process = context.actorOf(props, name = "process-receiver") override def receive: Receive = { case output: ProcessOutput => probe ! output case input: Input => process ! input } }
Example 62
Source File: DummyPubSubSubscriber.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.messagebus.testkits import akka.actor.{ActorRef, ActorSystem} import akka.testkit.TestProbe import cool.graph.akkautil.SingleThreadedActorSystem import cool.graph.messagebus.Conversions.Converter import cool.graph.messagebus.{PubSubPublisher, PubSubSubscriber} import cool.graph.messagebus.pubsub.{Message, Only, Subscription, Topic} object DummyPubSubSubscriber { // Initializes a minimal actor system to use def standalone[T]: DummyPubSubSubscriber[T] = { implicit val system = SingleThreadedActorSystem("DummyPubSubSubscriber") DummyPubSubSubscriber[T]() } } case class DummyPubSubSubscriber[T]()(implicit system: ActorSystem) extends PubSubSubscriber[T] { val testProbe = TestProbe() override def subscribe(topic: Topic, onReceive: Message[T] => Unit): Subscription = Subscription(topic.topic, testProbe.ref) override def subscribe(topic: Topic, subscriber: ActorRef): Subscription = Subscription(topic.topic, testProbe.ref) override def unsubscribe(subscription: Subscription): Unit = {} override def subscribe[U](topic: Topic, subscriber: ActorRef, converter: Converter[T, U]) = Subscription(topic.topic, testProbe.ref) } case class DummyPubSubPublisher[T]() extends PubSubPublisher[T] { override def publish(topic: Only, msg: T): Unit = {} }
Example 63
Source File: PubSubRouterAltSpec.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.messagebus.pubsub.inmemory import akka.actor.Props import akka.testkit.{TestActorRef, TestKit, TestProbe} import cool.graph.akkautil.SingleThreadedActorSystem import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe} import cool.graph.messagebus.pubsub.PubSubRouterAlt import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike} import scala.concurrent.duration._ class PubSubRouterAltSpec extends TestKit(SingleThreadedActorSystem("pubsub-router-spec")) with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach { override def afterAll = shutdown(verifySystemShutdown = true) "The PubSubRouter implementation" should { "subscribe subscribers correctly and route messages" in { val routerActor = TestActorRef(Props[PubSubRouterAlt]) val router = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt] val probe = TestProbe() val topic = "testTopic" routerActor ! Subscribe(topic, probe.ref) router.router.routees.length shouldEqual 1 routerActor ! Publish(topic, "test") probe.expectMsg("test") probe.expectNoMsg(max = 1.second) routerActor ! Publish("testTopic2", "test2") probe.expectNoMsg(max = 1.second) } "unsubscribe subscribers correctly" in { val routerActor = TestActorRef(Props[PubSubRouterAlt]) val router = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt] val probe = TestProbe() val topic = "testTopic" routerActor ! Subscribe(topic, probe.ref) router.router.routees.length shouldEqual 1 routerActor ! Unsubscribe(topic, probe.ref) router.router.routees.length shouldEqual 0 routerActor ! Publish(topic, "test") probe.expectNoMsg(max = 1.second) } "handle actor terminations" in { val routerActor = TestActorRef(Props[PubSubRouterAlt]) val router = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt] val probe = TestProbe() val topic = "testTopic" routerActor ! Subscribe(topic, probe.ref) router.router.routees.length shouldEqual 1 system.stop(probe.ref) Thread.sleep(50) router.router.routees.length shouldEqual 0 } } }
Example 64
Source File: PubSubRouterSpec.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.messagebus.pubsub.inmemory import akka.actor.Props import akka.testkit.{TestActorRef, TestKit, TestProbe} import cool.graph.akkautil.SingleThreadedActorSystem import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe} import cool.graph.messagebus.pubsub.PubSubRouter import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike} import scala.concurrent.duration._ class PubSubRouterSpec extends TestKit(SingleThreadedActorSystem("pubsub-router-spec")) with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach { override def afterAll = shutdown(verifySystemShutdown = true) "The PubSubRouter implementation" should { "subscribe subscribers correctly and route messages" in { val routerActor = TestActorRef(Props[PubSubRouter]) val router = routerActor.underlyingActor.asInstanceOf[PubSubRouter] val probe = TestProbe() val topic = "testTopic" routerActor ! Subscribe(topic, probe.ref) router.subscribers.values.map(_.size).sum shouldEqual 1 routerActor ! Publish(topic, "test") probe.expectMsg("test") probe.expectNoMsg(max = 1.second) routerActor ! Publish("testTopic2", "test2") probe.expectNoMsg(max = 1.second) } "unsubscribe subscribers correctly" in { val routerActor = TestActorRef(Props[PubSubRouter]) val router = routerActor.underlyingActor.asInstanceOf[PubSubRouter] val probe = TestProbe() val topic = "testTopic" routerActor ! Subscribe(topic, probe.ref) router.subscribers.values.map(_.size).sum shouldEqual 1 routerActor ! Unsubscribe(topic, probe.ref) router.subscribers.values.map(_.size).sum shouldEqual 0 routerActor ! Publish(topic, "test") probe.expectNoMsg(max = 1.second) } "handle actor terminations" in { val routerActor = TestActorRef(Props[PubSubRouter]) val router = routerActor.underlyingActor.asInstanceOf[PubSubRouter] val probe = TestProbe() val topic = "testTopic" routerActor ! Subscribe(topic, probe.ref) router.subscribers.values.map(_.size).sum shouldEqual 1 system.stop(probe.ref) Thread.sleep(50) router.subscribers.values.map(_.size).sum shouldEqual 0 } } }
Example 65
Source File: InMemoryAkkaQueueSpec.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.messagebus.queue.inmemory import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.testkit.{TestKit, TestProbe} import cool.graph.messagebus.QueuePublisher import cool.graph.messagebus.queue.{BackoffStrategy, ConstantBackoff} import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike} import scala.concurrent.Future import scala.concurrent.duration._ class InMemoryAkkaQueueSpec extends TestKit(ActorSystem("queueing-spec")) with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with ScalaFutures { implicit val materializer = ActorMaterializer() def withInMemoryQueue[T](backoff: BackoffStrategy = ConstantBackoff(100.millis))(testFn: (InMemoryAkkaQueue[T], TestProbe) => Unit) = { val inMemoryQueue = InMemoryAkkaQueue[T](backoff) val testProbe = TestProbe() try { testFn(inMemoryQueue, testProbe) } finally { inMemoryQueue.shutdown } } override def afterAll = shutdown(verifySystemShutdown = true) "Queue" should { "call the onMsg function if a valid message arrives" in { withInMemoryQueue[String]() { (queue, probe) => queue.withConsumer((str: String) => { probe.ref ! str; Future.successful(()) }) queue.publish("test") probe.expectMsg("test") } } "increment the message tries correctly on failure" in { withInMemoryQueue[String]() { (queue, probe) => queue.withConsumer((str: String) => { probe.ref ! str; Future.failed(new Exception("Kabooom")) }) queue.publish("test") // 5 tries, 5 times the same message (can't check for the tries explicitly here) probe.expectMsgAllOf(2.seconds, Vector.fill(5) { "test" }: _*) probe.expectNoMsg(1.second) } } "map a type correctly with a MappingQueueConsumer" in { withInMemoryQueue[String]() { (queue, probe) => val mapped = queue.map[Int]((str: String) => str.toInt) mapped.withConsumer((int: Int) => { probe.ref ! int; Future.successful(()) }) queue.publish("123") probe.expectMsg(123) } } "map a type correctly with a MappingQueuePublisher" in { withInMemoryQueue[String]() { (queue: InMemoryAkkaQueue[String], probe) => val mapped: QueuePublisher[Int] = queue.map[Int]((int: Int) => int.toString) queue.withConsumer((str: String) => { probe.ref ! str; Future.successful(()) }) mapped.publish(123) probe.expectMsg("123") } } } }
Example 66
Source File: WebsocketSessionSpec.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.subscriptions.websockets import akka.actor.{ActorSystem, Props} import akka.testkit.TestProbe import cool.graph.messagebus.testkits.spechelpers.InMemoryMessageBusTestKits import cool.graph.websockets.WebsocketSession import cool.graph.websockets.protocol.Request import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} class WebsocketSessionSpec extends InMemoryMessageBusTestKits(ActorSystem("websocket-session-spec")) with WordSpecLike with Matchers with BeforeAndAfterAll with ScalaFutures { override def afterAll = shutdown() "The WebsocketSession" should { "send a message with the body STOP to the requests queue AND a Poison Pill to the outActor when it is stopped" in { withQueueTestKit[Request] { testKit => val projectId = "projectId" val sessionId = "sessionId" val outgoing = TestProbe().ref val probe = TestProbe() probe.watch(outgoing) val session = system.actorOf(Props(WebsocketSession(projectId, sessionId, outgoing, testKit, bugsnag = null))) system.stop(session) probe.expectTerminated(outgoing) testKit.expectPublishedMsg(Request(sessionId, projectId, "STOP")) } } } }
Example 67
Source File: CassandraJournalSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.journal import akka.actor.Actor import akka.persistence.{ AtomicWrite, PersistentRepr } import akka.persistence.JournalProtocol.{ ReplayMessages, WriteMessageFailure, WriteMessages, WriteMessagesFailed } import scala.concurrent.duration._ import akka.persistence.journal._ import akka.persistence.cassandra.CassandraLifecycle import akka.stream.alpakka.cassandra.CassandraMetricsRegistry import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory object CassandraJournalConfiguration { val config = ConfigFactory.parseString(s""" akka.persistence.cassandra.journal.keyspace=CassandraJournalSpec akka.persistence.cassandra.snapshot.keyspace=CassandraJournalSpecSnapshot datastax-java-driver { basic.session-name = CassandraJournalSpec advanced.metrics { session.enabled = [ "bytes-sent", "cql-requests"] } } """).withFallback(CassandraLifecycle.config) lazy val perfConfig = ConfigFactory.parseString(""" akka.actor.serialize-messages=off akka.persistence.cassandra.journal.keyspace=CassandraJournalPerfSpec akka.persistence.cassandra.snapshot.keyspace=CassandraJournalPerfSpecSnapshot """).withFallback(config) } // Can't use CassandraSpec so needs to do its own clean up class CassandraJournalSpec extends JournalSpec(CassandraJournalConfiguration.config) with CassandraLifecycle { override def systemName: String = "CassandraJournalSpec" override def supportsRejectingNonSerializableObjects = false "A Cassandra Journal" must { "insert Cassandra metrics to Cassandra Metrics Registry" in { val registry = CassandraMetricsRegistry(system).getRegistry val metricsNames = registry.getNames.toArray.toSet // metrics category is the configPath of the plugin + the session-name metricsNames should contain("akka.persistence.cassandra.CassandraJournalSpec.bytes-sent") metricsNames should contain("akka.persistence.cassandra.CassandraJournalSpec.cql-requests") } "be able to replay messages after serialization failure" in { // there is no chance that a journal could create a data representation for type of event val notSerializableEvent = new Object { override def toString = "not serializable" } val msg = PersistentRepr( payload = notSerializableEvent, sequenceNr = 6, persistenceId = pid, sender = Actor.noSender, writerUuid = writerUuid) val probe = TestProbe() journal ! WriteMessages(List(AtomicWrite(msg)), probe.ref, actorInstanceId) val err = probe.expectMsgPF() { case fail: WriteMessagesFailed => fail.cause } probe.expectMsg(WriteMessageFailure(msg, err, actorInstanceId)) journal ! ReplayMessages(5, 5, 1, pid, probe.ref) probe.expectMsg(replayedMessage(5)) } } } class CassandraJournalPerfSpec extends JournalPerfSpec(CassandraJournalConfiguration.perfConfig) with CassandraLifecycle { override def systemName: String = "CassandraJournalPerfSpec" override def awaitDurationMillis: Long = 20.seconds.toMillis override def supportsRejectingNonSerializableObjects = false }
Example 68
Source File: CassandraSerializationSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.journal import akka.actor.{ ExtendedActorSystem, Props } import akka.persistence.RecoveryCompleted import akka.persistence.cassandra.EventWithMetaData.UnknownMetaData import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, EventWithMetaData, Persister } import akka.serialization.BaseSerializer import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory object CassandraSerializationSpec { val config = ConfigFactory.parseString(s""" |akka.actor.serialize-messages=false |akka.actor.serializers.crap="akka.persistence.cassandra.journal.BrokenDeSerialization" |akka.actor.serialization-identifiers."akka.persistence.cassandra.journal.BrokenDeSerialization" = 666 |akka.actor.serialization-bindings { | "akka.persistence.cassandra.Persister$$CrapEvent" = crap |} |akka.persistence.journal.max-deletion-batch-size = 3 |akka.persistence.publish-confirmations = on |akka.persistence.publish-plugin-commands = on |akka.persistence.cassandra.journal.target-partition-size = 5 |akka.persistence.cassandra.max-result-size = 3 |akka.persistence.cassandra.journal.keyspace=CassandraIntegrationSpec |akka.persistence.cassandra.snapshot.keyspace=CassandraIntegrationSpecSnapshot | """.stripMargin).withFallback(CassandraLifecycle.config) } class BrokenDeSerialization(override val system: ExtendedActorSystem) extends BaseSerializer { override def includeManifest: Boolean = false override def toBinary(o: AnyRef): Array[Byte] = // I was serious with the class name Array.emptyByteArray override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = throw new RuntimeException("I can't deserialize a single thing") } class CassandraSerializationSpec extends CassandraSpec(CassandraSerializationSpec.config) { import akka.persistence.cassandra.Persister._ "A Cassandra journal" must { "Fail recovery when deserialization fails" in { val probe = TestProbe() val incarnation1 = system.actorOf(Props(new Persister("id1", probe.ref))) probe.expectMsgType[RecoveryCompleted] incarnation1 ! CrapEvent(1) probe.expectMsg(CrapEvent(1)) probe.watch(incarnation1) system.stop(incarnation1) probe.expectTerminated(incarnation1) val incarnation2 = system.actorOf(Props(new Persister("id1", probe.ref))) probe.expectMsgType[RuntimeException].getMessage shouldBe "I can't deserialize a single thing" incarnation2 } "be able to store meta data" in { val probe = TestProbe() val incarnation1 = system.actorOf(Props(new Persister("id2", probe.ref))) probe.expectMsgType[RecoveryCompleted] val eventWithMeta = EventWithMetaData("TheActualEvent", "TheAdditionalMetaData") incarnation1 ! eventWithMeta probe.expectMsg(eventWithMeta) probe.watch(incarnation1) system.stop(incarnation1) probe.expectTerminated(incarnation1) system.actorOf(Props(new Persister("id2", probe.ref))) probe.expectMsg(eventWithMeta) // from replay } "not fail replay due to deserialization problem of meta data" in { val probe = TestProbe() val incarnation1 = system.actorOf(Props(new Persister("id3", probe.ref))) probe.expectMsgType[RecoveryCompleted] val eventWithMeta = EventWithMetaData("TheActualEvent", CrapEvent(13)) incarnation1 ! eventWithMeta probe.expectMsg(eventWithMeta) probe.watch(incarnation1) system.stop(incarnation1) probe.expectTerminated(incarnation1) system.actorOf(Props(new Persister("id3", probe.ref))) probe.expectMsg(EventWithMetaData("TheActualEvent", UnknownMetaData(666, ""))) // from replay, no meta } } }
Example 69
Source File: PubSubThrottlerSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.journal import scala.concurrent.duration.DurationInt import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.matchers.should.Matchers import akka.actor.{ ActorSystem, Props } import akka.testkit.{ TestKit, TestProbe } class PubSubThrottlerSpec extends TestKit(ActorSystem("CassandraConfigCheckerSpec")) with AnyWordSpecLike with Matchers { "PubSubThrottler" should { "eat up duplicate messages that arrive within the same [interval] window" in { val delegate = TestProbe() val throttler = system.actorOf(Props(new PubSubThrottler(delegate.ref, 5.seconds))) throttler ! "hello" throttler ! "hello" throttler ! "hello" delegate.within(2.seconds) { delegate.expectMsg("hello") } // Only first "hello" makes it through during the first interval. delegate.expectNoMessage(2.seconds) // Eventually, the interval will roll over and forward ONE further hello. delegate.expectMsg(10.seconds, "hello") delegate.expectNoMessage(2.seconds) throttler ! "hello" delegate.within(2.seconds) { delegate.expectMsg("hello") } } "allow differing messages to pass through within the same [interval] window" in { val delegate = TestProbe() val throttler = system.actorOf(Props(new PubSubThrottler(delegate.ref, 5.seconds))) throttler ! "hello" throttler ! "world" delegate.within(2.seconds) { delegate.expectMsg("hello") delegate.expectMsg("world") } } } }
Example 70
Source File: ClusterShardingQuickTerminationSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.sharding import akka.actor.{ ActorLogging, ActorRef, Props, ReceiveTimeout } import akka.cluster.{ Cluster, MemberStatus } import akka.cluster.sharding.{ ClusterSharding, ClusterShardingSettings, ShardRegion } import akka.persistence.PersistentActor import akka.persistence.cassandra.CassandraSpec import akka.testkit.TestProbe import scala.concurrent.duration._ object ClusterShardingQuickTerminationSpec { case object Increment case object Decrement final case class Get(counterId: Long) final case class EntityEnvelope(id: Long, payload: Any) case object Ack case object Stop final case class CounterChanged(delta: Int) class Counter extends PersistentActor with ActorLogging { import ShardRegion.Passivate context.setReceiveTimeout(5.seconds) // self.path.name is the entity identifier (utf-8 URL-encoded) override def persistenceId: String = "Counter-" + self.path.name var count = 0 def updateState(event: CounterChanged): Unit = count += event.delta override def receiveRecover: Receive = { case evt: CounterChanged => updateState(evt) case other => log.debug("Other: {}", other) } override def receiveCommand: Receive = { case Increment => persist(CounterChanged(+1))(updateState) case Decrement => persist(CounterChanged(-1))(updateState) case Get(_) => sender() ! count case ReceiveTimeout => context.parent ! Passivate(stopMessage = Stop) case Stop => sender() ! Ack context.stop(self) } } val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) => (id.toString, payload) case msg @ Get(id) => (id.toString, msg) } val numberOfShards = 100 val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(id, _) => (id % numberOfShards).toString case Get(id) => (id % numberOfShards).toString } } class ClusterShardingQuickTerminationSpec extends CassandraSpec(""" akka.actor.provider = cluster """.stripMargin) { import ClusterShardingQuickTerminationSpec._ "Cassandra Plugin with Cluster Sharding" must { "clear state if persistent actor shuts down" in { Cluster(system).join(Cluster(system).selfMember.address) awaitAssert { Cluster(system).selfMember.status shouldEqual MemberStatus.Up } ClusterSharding(system).start( typeName = "tagging", entityProps = Props[Counter], settings = ClusterShardingSettings(system), extractEntityId = extractEntityId, extractShardId = extractShardId) (0 to 100).foreach { i => val counterRegion: ActorRef = ClusterSharding(system).shardRegion("tagging") awaitAssert { val sender = TestProbe() counterRegion.tell(Get(123), sender.ref) sender.expectMsg(500.millis, i) } counterRegion ! EntityEnvelope(123, Increment) counterRegion ! Get(123) expectMsg(i + 1) counterRegion ! EntityEnvelope(123, Stop) expectMsg(Ack) } } } }
Example 71
Source File: DeleteTagViewForPersistenceIdSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.reconciler import akka.persistence.cassandra.CassandraSpec import akka.persistence.cassandra.TestTaggingActor import akka.testkit.TestProbe import akka.persistence.RecoveryCompleted class DeleteTagViewForPersistenceIdSpec extends CassandraSpec { "Deleting " should { val tag = "tag1" val pid1 = "p1" val pid2 = "p2" "only delete for the provided persistence id" in { writeEventsFor(tag, pid1, 3) writeEventsFor(tag, pid2, 3) eventsByTag(tag) .request(10) .expectNextN(List("p1 event-1", "p1 event-2", "p1 event-3", "p2 event-1", "p2 event-2", "p2 event-3")) .expectNoMessage() .cancel() val reconciliation = new Reconciliation(system) reconciliation.deleteTagViewForPersistenceIds(Set(pid2), tag).futureValue eventsByTag(tag).request(5).expectNextN(List("p1 event-1", "p1 event-2", "p1 event-3")).expectNoMessage().cancel() } "recover the tagged events if persistence id is started again" in { val probe = TestProbe() system.actorOf(TestTaggingActor.props(pid2, Set(tag), Some(probe.ref))) probe.expectMsg(RecoveryCompleted) eventsByTag(tag) .request(10) .expectNextN(List("p1 event-1", "p1 event-2", "p1 event-3", "p2 event-1", "p2 event-2", "p2 event-3")) .expectNoMessage() .cancel() } } }
Example 72
Source File: HelloWorldActorSpec.scala From Scala-Reactive-Programming with MIT License | 5 votes |
package com.packt.publishing.reactive.hello.actor.v2 import akka.actor.{ActorSystem, Props} import akka.testkit.{TestKit, TestProbe} import com.packt.publishing.reactive.hello.model.HelloWorld import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.duration._ class HelloWorldActorSpec(actorSystem: ActorSystem) extends TestKit(actorSystem) with Matchers with WordSpecLike with BeforeAndAfterAll { def this() = this(ActorSystem("AkkaHelloWorld")) "HelloWorld Actor" should { "pass on a HelloWorld message" in { val testProbe = TestProbe() val helloWorldActor = system.actorOf(Props(new HelloWorldActor(testProbe.ref)), "HelloWorldActor") helloWorldActor ! HelloWorld testProbe.expectMsg(500 millis, HelloWorld) } } override def afterAll: Unit = { shutdown(system) } }
Example 73
Source File: HelloWorldActorSpec.scala From Scala-Reactive-Programming with MIT License | 5 votes |
package com.packt.publishing.reactive.hello.actor import akka.actor.{ActorSystem, Props} import akka.testkit.{EventFilter, TestKit, TestProbe} import com.packt.publishing.reactive.hello.actor.HelloWorldActorSpec._ import com.packt.publishing.reactive.hello.model.HelloWorld import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} class HelloWorldActorSpec extends TestKit(system) with Matchers with WordSpecLike with BeforeAndAfterAll { "HelloWorld Actor" should { "pass on a HelloWorld message" in { val testProbe = TestProbe() val helloWorldActor = system.actorOf(Props[HelloWorldActor], "HelloWorldActor") EventFilter.info(message = "Hello World", occurrences = 1) .intercept(helloWorldActor ! HelloWorld) } } override def afterAll: Unit = { shutdown(system) } } object HelloWorldActorSpec { val system = { val loggerConfig = ConfigFactory.parseString("akka.loggers = [akka.testkit.TestEventListener]") ActorSystem("AkkaHelloWorld", loggerConfig) } }
Example 74
Source File: ProcessManagerSupport.scala From akka-cqrs with Apache License 2.0 | 5 votes |
package com.productfoundry.akka.cqrs.process import java.util.UUID import akka.actor._ import akka.testkit.TestProbe import com.productfoundry.akka.cqrs._ import com.productfoundry.akka.cqrs.publish.EventPublication import scala.concurrent.Await import scala.reflect.ClassTag trait ProcessManagerFixture { val commandReceiver = TestProbe() val aggregateRegistry = new AggregateRegistry { override def apply[A <: Aggregate : ClassTag]: ActorRef = commandReceiver.ref } private var aggregateRevisionByName = Map.empty[String, AggregateRevision] private def aggregateRevision(name: String): AggregateRevision = { val revision = aggregateRevisionByName.getOrElse(name, AggregateRevision.Initial) aggregateRevisionByName = aggregateRevisionByName.updated(name, revision.next) revision } def createEventRecord(event: AggregateEvent, nameOption: Option[String] = None, headersOption: Option[CommitHeaders] = None): AggregateEventRecord = { val aggregateName = nameOption.getOrElse(event.getClass.getSimpleName) AggregateEventRecord( AggregateTag(aggregateName, event.id.entityId, aggregateRevision(aggregateName)), headersOption, event ) } def register[P <: ProcessManager : ProcessManagerCompanion : ClassTag](factory: ProcessManagerFactory[P]): Unit = { Await.result(processManagerRegistry.register(factory), executionTimeout.duration) } def publishEvents(events: AggregateEvent*): Unit = { publishEventRecords(events.map(event => createEventRecord(event)): _*) } def publishEventRecords(eventRecords: AggregateEventRecord*): Unit = { eventRecords.foreach { eventRecord => val publication = EventPublication(eventRecord) processManagerRegistry.actor ! publication } } } }
Example 75
Source File: LocalEntityContextSpec.scala From akka-cqrs with Apache License 2.0 | 5 votes |
package com.productfoundry.akka.cqrs import akka.actor._ import akka.testkit.TestProbe import com.productfoundry.akka.{Passivate, PassivationConfig} import com.productfoundry.akka.cqrs.DummyAggregate._ import com.productfoundry.support.AggregateTestSupport class LocalEntityContextSpec extends AggregateTestSupport { implicit object DummyAggregateFactory extends AggregateFactory[DummyAggregate] { override def props(config: PassivationConfig): Props = { Props(classOf[DummyAggregate], config) } } implicit val supervisorFactory = entityContext.entitySupervisorFactory[DummyAggregate] val supervisor: ActorRef = EntitySupervisor.forType[DummyAggregate] "Aggregate passivation" must { "succeed" in new AggregateFixture { val probe = TestProbe() supervisor ! Create(testId) expectMsgType[AggregateStatus.Success] probe.watch(lastSender) lastSender ! Passivate probe.expectMsgType[Terminated] Thread.sleep(1000) supervisor ! Count(testId) expectMsgType[AggregateStatus.Success] } } trait AggregateFixture { val testId = DummyId.generate() } }
Example 76
Source File: LocalEventPublisherSpec.scala From akka-cqrs with Apache License 2.0 | 5 votes |
package com.productfoundry.akka.cqrs.publish import akka.actor.{ActorRef, Props} import akka.testkit.TestProbe import com.productfoundry.akka.PassivationConfig import com.productfoundry.akka.cqrs.DummyAggregate._ import com.productfoundry.akka.cqrs._ import com.productfoundry.support.AggregateTestSupport class LocalEventPublisherSpec extends AggregateTestSupport { implicit object TestAggregateFactory extends AggregateFactory[DummyAggregate] { override def props(config: PassivationConfig): Props = { Props(new DummyAggregate(config) with LocalEventPublisher) } } implicit val supervisorFactory = entityContext.entitySupervisorFactory[DummyAggregate] val supervisor: ActorRef = EntitySupervisor.forType[DummyAggregate] "Local event publisher" must { "publish events" in new fixture { eventRecord.event should be(Created(testId)) eventRecord.tag.revision should be(AggregateRevision(1L)) } "not have confirmation" in new fixture { eventPublication.confirmationOption should be('empty) } "not request confirmation" in new fixture { eventPublication.confirmIfRequested() expectNoMsg() } trait fixture extends { val publishedEventProbe = TestProbe() system.eventStream.subscribe(publishedEventProbe.ref, classOf[Any]) val testId = DummyId.generate() supervisor ! Create(testId) expectMsgType[AggregateStatus.Success] val eventPublication = publishedEventProbe.expectMsgType[EventPublication] val eventRecord = eventPublication.eventRecord } } }
Example 77
Source File: StaticDomainWatcherSpec.scala From shield with MIT License | 5 votes |
package shield.actors.config.domain import akka.actor.{ActorSystem, Props} import akka.testkit.{TestActorRef, TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpecLike} import shield.actors.ShieldActorMsgs import shield.config.Settings class StaticDomainWatcherSpec extends TestKit(ActorSystem("testSystem")) with WordSpecLike with MustMatchers with BeforeAndAfterAll { val settings = Settings(system) "StaticDomainWatcher" should { "notify shield about domains found" in { val parent = TestProbe() TestActorRef(Props(new StaticDomainWatcher()), parent.ref, "static-domain-watcher") val msg: ShieldActorMsgs.DomainsUpdated = parent.expectMsgClass(classOf[ShieldActorMsgs.DomainsUpdated]) msg.domains.size must equal (settings.config.getConfigList("shield.domains").size) } } }
Example 78
Source File: RareBooksSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.actor.ActorDSL._ import akka.testkit.{ EventFilter, TestProbe } class RareBooksSpec extends BaseAkkaSpec("as-3001-rare-books-spec") { import RareBooksProtocol._ "Creating RareBooks" should { val rareBooks = system.actorOf(RareBooks.props, "rare-books") "create child actor librarian" in { TestProbe().expectActor("/user/rare-books/librarian") } "when closed with no requests processed, log `0 requests processed.` at info" in { EventFilter.info(pattern = ".*0 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } "when opened, log `Time to open up!` at info" in { EventFilter.info(pattern = ".*Time to open up!.*", occurrences = 1) intercept { rareBooks ! RareBooks.Open } } } "Sending FindBookByTag" should { val librarian = TestProbe() val rareBooks = actor(new RareBooks() { override def createLibrarian() = librarian.ref }) "forward to librarian" in { val msg = FindBookByTopic(Set(Greece)) rareBooks ! msg librarian.expectMsg(msg) } "when closed with one request processed, log `1 requests processed.` at info" in { EventFilter.info(pattern = ".*1 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } } }
Example 79
Source File: RareBooksAppSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.TestProbe class RareBooksAppSpec extends BaseAkkaSpec("as-3001-rare-books-app-spec") { "Creating RareBooksApp" should { new RareBooksApp(system) { createCustomer(3, 100, 5) } "create a top-level actor named 'rare-books'" in { TestProbe().expectActor("/user/rare-books") } "create n customers as top-level actors when calling createCustomer" in { TestProbe().expectActor("/user/$a") TestProbe().expectActor("/user/$b") TestProbe().expectActor("/user/$c") } } }
Example 80
Source File: LibrarianSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.{ EventFilter, TestProbe } import scala.concurrent.duration.{ Duration, MILLISECONDS => Millis } class LibrarianSpec extends BaseAkkaSpec("as-3001-librarian-spec") { import RareBooksProtocol._ private val findBookDuration = Duration(system.settings.config.getDuration("rare-books.librarian.find-book-duration", Millis), Millis) "Receiving FindBookByTitle" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTitle("The Epic of Gilgamesh") sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Swiss Family Robinson.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTitle("Swiss Family Robinson") } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTitle("Swiss Family Robinson") sender.expectMsgType[BookNotFound] } } "Receiving FindBookByTopic" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTopic(Set(Tradition)) sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Set\\(Unknown\\).*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTopic(Set(Unknown)) } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTopic(Set(Unknown)) sender.expectMsgType[BookNotFound] } } "Receiving Complain" should { "log Credit issued at info" in { EventFilter.info(pattern = ".*Credit issued to customer.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! Complain() } } "send Credit" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! Complain() sender.expectMsgType[Credit] } } }
Example 81
Source File: RareBooksSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.actor.ActorDSL._ import akka.routing.{RoundRobinRoutingLogic, ActorRefRoutee, Router} import akka.testkit.{ EventFilter, TestProbe } class RareBooksSpec extends BaseAkkaSpec("as-3004-rare-books-spec") { import RareBooksProtocol._ val nbrOfLibrarians = system.settings.config getInt "rare-books.nbr-of-librarians" "Creating RareBooks" should { val rareBooks = system.actorOf(RareBooks.props, "rare-books") "create nbrOfLibrarians" in { for(i <- 0 to nbrOfLibrarians - 1) { TestProbe().expectActor(s"/user/rare-books/librarian-$i") } } "when closed with no requests processed, log `0 requests processed.` at info" in { EventFilter.info(pattern = ".*0 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } "when opened, log `Time to open up!` at info" in { EventFilter.info(pattern = ".*Time to open up!.*", occurrences = 1) intercept { rareBooks ! RareBooks.Open } } } "Sending FindBookByTag" should { val librarian = TestProbe() val rareBooks = actor(new RareBooks() { override def createLibrarian(): Router = { val routees: Vector[ActorRefRoutee] = Vector.fill(1) { val r = librarian.ref ActorRefRoutee(r) } Router(RoundRobinRoutingLogic(), routees) } }) "forward to librarian" in { val msg = FindBookByTopic(Set(Greece)) rareBooks ! msg librarian.expectMsg(msg) } "when closed with one request processed, log `1 requests processed.` at info" in { EventFilter.info(pattern = ".*1 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } } "On failure of Librarian" should { system.actorOf(RareBooks.props, "rare-books-faulty-librarian") val customer = TestProbe() val librarian = TestProbe().expectActor("/user/rare-books-faulty-librarian/librarian-1") implicit val _ = customer.ref "send Credit to customer" in { librarian ! Complain() customer.expectMsgType[Credit] // from librarian librarian ! Complain() customer.expectMsgType[Credit] // from librarian librarian ! Complain() customer.expectMsgType[Credit] // from rareBooks } "should restart librarian" in { librarian ! Complain() customer.expectMsgType[Credit] // from librarian } "should log `RareBooks sent customer...` credit at info" in { EventFilter.info(pattern = ".*RareBooks sent customer.*", occurrences = 1) intercept { librarian ! Complain() librarian ! Complain() } } } }
Example 82
Source File: RareBooksAppSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.TestProbe class RareBooksAppSpec extends BaseAkkaSpec("as-3004-rare-books-app-spec") { "Creating RareBooksApp" should { new RareBooksApp(system) { createCustomer(3, 100, 5) } "create a top-level actor named 'rare-books'" in { TestProbe().expectActor("/user/rare-books") } "create n customers as top-level actors when calling createCustomer" in { TestProbe().expectActor("/user/$a") TestProbe().expectActor("/user/$b") TestProbe().expectActor("/user/$c") } } }
Example 83
Source File: LibrarianSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.{ EventFilter, TestProbe } import scala.concurrent.duration.{ Duration, MILLISECONDS => Millis } class LibrarianSpec extends BaseAkkaSpec("as-3004-librarian-spec") { import RareBooksProtocol._ private val findBookDuration = Duration(system.settings.config.getDuration("rare-books.librarian.find-book-duration", Millis), Millis) private val maxComplainCount: Int = system.settings.config getInt "rare-books.librarian.max-complain-count" "Receiving FindBookByTitle" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("The Epic of Gilgamesh") sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Swiss Family Robinson.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("Swiss Family Robinson") } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("Swiss Family Robinson") sender.expectMsgType[BookNotFound] } } "Receiving FindBookByTopic" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Tradition)) sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Set\\(Unknown\\).*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Unknown)) } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Unknown)) sender.expectMsgType[BookNotFound] } } "Receiving Complain" should { "log Credit issued at info" in { EventFilter.info(pattern = ".*Credit issued to customer.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! Complain() } } "send Credit" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! Complain() sender.expectMsgType[Credit] } "result in a ComplainException if maxComplainCount reached" in { val librarian = system.actorOf(Librarian.props(findBookDuration, 0)) EventFilter[Librarian.ComplainException](occurrences = 1) intercept { librarian ! Complain() } } } }
Example 84
Source File: RareBooksSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.actor.ActorDSL._ import akka.routing.{RoundRobinRoutingLogic, ActorRefRoutee, Router} import akka.testkit.{ EventFilter, TestProbe } class RareBooksSpec extends BaseAkkaSpec("as-5001-rare-books-spec") { import LibraryProtocol._ val nbrOfLibrarians = system.settings.config getInt "rare-books.nbr-of-librarians" "Creating RareBooks" should { val rareBooks = system.actorOf(RareBooks.props, "rare-books") "create nbrOfLibrarians" in { for(i <- 0 until nbrOfLibrarians) { TestProbe().expectActor(s"/user/rare-books/librarian-$i") } } "when closed with no requests processed, log `0 requests processed.` at info" in { EventFilter.info(pattern = ".*0 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } "when opened, log `Time to open up!` at info" in { EventFilter.info(pattern = ".*Time to open up!.*", occurrences = 1) intercept { rareBooks ! RareBooks.Open } } } "Sending FindBookByTag" should { val librarian = TestProbe() val rareBooks = actor(new RareBooks() { override def createLibrarian(): Router = { val routees: Vector[ActorRefRoutee] = Vector.fill(1) { val r = librarian.ref ActorRefRoutee(r) } Router(RoundRobinRoutingLogic(), routees) } }) "forward to librarian" in { val msg = FindBookByTopic(Set(Greece)) rareBooks ! msg librarian.expectMsg(msg) } "when closed with one request processed, log `1 requests processed.` at info" in { EventFilter.info(pattern = ".*1 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } } "On failure of Librarian" should { system.actorOf(RareBooks.props, "rare-books-faulty-librarian") val customer = TestProbe() val librarian = TestProbe().expectActor("/user/rare-books-faulty-librarian/librarian-1") implicit val _ = customer.ref "send Credit to customer" in { librarian ! Complain() customer.expectMsgType[Credit] // from librarian librarian ! Complain() customer.expectMsgType[Credit] // from librarian librarian ! Complain() customer.expectMsgType[Credit] // from rareBooks } "should restart librarian" in { librarian ! Complain() customer.expectMsgType[Credit] // from librarian } "should log `RareBooks sent customer...` credit at info" in { EventFilter.info(pattern = ".*RareBooks sent customer.*", occurrences = 1) intercept { librarian ! Complain() librarian ! Complain() } } } }
Example 85
Source File: LibrarianSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.{ EventFilter, TestProbe } import scala.concurrent.duration.{ Duration, MILLISECONDS => Millis } class LibrarianSpec extends BaseAkkaSpec("as-5001-librarian-spec") { import LibraryProtocol._ private val findBookDuration = Duration(system.settings.config.getDuration("rare-books.librarian.find-book-duration", Millis), Millis) private val maxComplainCount: Int = system.settings.config getInt "rare-books.librarian.max-complain-count" "Receiving FindBookByTitle" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("The Epic of Gilgamesh") sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Swiss Family Robinson.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("Swiss Family Robinson") } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("Swiss Family Robinson") sender.expectMsgType[BookNotFound] } } "Receiving FindBookByTopic" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Tradition)) sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Set\\(Unknown\\).*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Unknown)) } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Unknown)) sender.expectMsgType[BookNotFound] } } "Receiving Complain" should { "log Credit issued at info" in { EventFilter.info(pattern = ".*Credit issued to customer.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! Complain() } } "send Credit" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! Complain() sender.expectMsgType[Credit] } "result in a ComplainException if maxComplainCount reached" in { val librarian = system.actorOf(Librarian.props(findBookDuration, 0)) EventFilter[Librarian.ComplainException](occurrences = 1) intercept { librarian ! Complain() } } } }
Example 86
Source File: RareBooksSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.actor.ActorDSL._ import akka.routing.{RoundRobinRoutingLogic, ActorRefRoutee, Router} import akka.testkit.{ EventFilter, TestProbe } class RareBooksSpec extends BaseAkkaSpec("as-3002-rare-books-spec") { import RareBooksProtocol._ val nbrOfLibrarians = system.settings.config getInt "rare-books.nbr-of-librarians" "Creating RareBooks" should { val rareBooks = system.actorOf(RareBooks.props, "rare-books") "create nbrOfLibrarians" in { for(i <- 0 to nbrOfLibrarians - 1) { TestProbe().expectActor(s"/user/rare-books/librarian-$i") } } "when closed with no requests processed, log `0 requests processed.` at info" in { EventFilter.info(pattern = ".*0 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } "when opened, log `Time to open up!` at info" in { EventFilter.info(pattern = ".*Time to open up!.*", occurrences = 1) intercept { rareBooks ! RareBooks.Open } } } "Sending FindBookByTag" should { val librarian = TestProbe() val rareBooks = actor(new RareBooks() { override def createLibrarian(): Router = { val routees: Vector[ActorRefRoutee] = Vector.fill(1) { val r = librarian.ref ActorRefRoutee(r) } Router(RoundRobinRoutingLogic(), routees) } }) "forward to librarian" in { val msg = FindBookByTopic(Set(Greece)) rareBooks ! msg librarian.expectMsg(msg) } "when closed with one request processed, log `1 requests processed.` at info" in { EventFilter.info(pattern = ".*1 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } } }
Example 87
Source File: RareBooksAppSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.TestProbe class RareBooksAppSpec extends BaseAkkaSpec("as-3002-rare-books-app-spec") { "Creating RareBooksApp" should { new RareBooksApp(system) { createCustomer(3, 100, 5) } "create a top-level actor named 'rare-books'" in { TestProbe().expectActor("/user/rare-books") } "create n customers as top-level actors when calling createCustomer" in { TestProbe().expectActor("/user/$a") TestProbe().expectActor("/user/$b") TestProbe().expectActor("/user/$c") } } }
Example 88
Source File: LibrarianSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.{ EventFilter, TestProbe } import scala.concurrent.duration.{ Duration, MILLISECONDS => Millis } class LibrarianSpec extends BaseAkkaSpec("as-3002-librarian-spec") { import RareBooksProtocol._ private val findBookDuration = Duration(system.settings.config.getDuration("rare-books.librarian.find-book-duration", Millis), Millis) "Receiving FindBookByTitle" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTitle("The Epic of Gilgamesh") sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Swiss Family Robinson.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTitle("Swiss Family Robinson") } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTitle("Swiss Family Robinson") sender.expectMsgType[BookNotFound] } } "Receiving FindBookByTopic" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTopic(Set(Tradition)) sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Set\\(Unknown\\).*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTopic(Set(Unknown)) } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! FindBookByTopic(Set(Unknown)) sender.expectMsgType[BookNotFound] } } "Receiving Complain" should { "log Credit issued at info" in { EventFilter.info(pattern = ".*Credit issued to customer.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! Complain() } } "send Credit" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration)) librarian ! Complain() sender.expectMsgType[Credit] } } }
Example 89
Source File: RareBooksSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.actor.ActorDSL._ import akka.routing.{RoundRobinRoutingLogic, ActorRefRoutee, Router} import akka.testkit.{ EventFilter, TestProbe } class RareBooksSpec extends BaseAkkaSpec("as-3003-rare-books-spec") { import RareBooksProtocol._ val nbrOfLibrarians = system.settings.config getInt "rare-books.nbr-of-librarians" "Creating RareBooks" should { val rareBooks = system.actorOf(RareBooks.props, "rare-books") "create nbrOfLibrarians" in { for(i <- 0 to nbrOfLibrarians - 1) { TestProbe().expectActor(s"/user/rare-books/librarian-$i") } } "when closed with no requests processed, log `0 requests processed.` at info" in { EventFilter.info(pattern = ".*0 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } "when opened, log `Time to open up!` at info" in { EventFilter.info(pattern = ".*Time to open up!.*", occurrences = 1) intercept { rareBooks ! RareBooks.Open } } } "Sending FindBookByTag" should { val librarian = TestProbe() val rareBooks = actor(new RareBooks() { override def createLibrarian(): Router = { val routees: Vector[ActorRefRoutee] = Vector.fill(1) { val r = librarian.ref ActorRefRoutee(r) } Router(RoundRobinRoutingLogic(), routees) } }) "forward to librarian" in { val msg = FindBookByTopic(Set(Greece)) rareBooks ! msg librarian.expectMsg(msg) } "when closed with one request processed, log `1 requests processed.` at info" in { EventFilter.info(pattern = ".*1 requests processed.*", occurrences = 1) intercept { rareBooks ! RareBooks.Close } } } }
Example 90
Source File: RareBooksAppSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.TestProbe class RareBooksAppSpec extends BaseAkkaSpec("as-3003-rare-books-app-spec") { "Creating RareBooksApp" should { new RareBooksApp(system) { createCustomer(3, 100, 5) } "create a top-level actor named 'rare-books'" in { TestProbe().expectActor("/user/rare-books") } "create n customers as top-level actors when calling createCustomer" in { TestProbe().expectActor("/user/$a") TestProbe().expectActor("/user/$b") TestProbe().expectActor("/user/$c") } } }
Example 91
Source File: LibrarianSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.testkit.{ EventFilter, TestProbe } import scala.concurrent.duration.{ Duration, MILLISECONDS => Millis } class LibrarianSpec extends BaseAkkaSpec("as-3003-librarian-spec") { import RareBooksProtocol._ private val findBookDuration = Duration(system.settings.config.getDuration("rare-books.librarian.find-book-duration", Millis), Millis) private val maxComplainCount: Int = system.settings.config getInt "rare-books.librarian.max-complain-count" "Receiving FindBookByTitle" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("The Epic of Gilgamesh") sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Swiss Family Robinson.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("Swiss Family Robinson") } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTitle("Swiss Family Robinson") sender.expectMsgType[BookNotFound] } } "Receiving FindBookByTopic" should { "When book exists, result in BookFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Tradition)) sender.expectMsgType[BookFound] } "When book does not exist, log BookNotFound at info" in { EventFilter.info(pattern = ".*BookNotFound\\(Book\\(s\\) not found based on Set\\(Unknown\\).*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Unknown)) } } "When book does not exist, return BookNotFound" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! FindBookByTopic(Set(Unknown)) sender.expectMsgType[BookNotFound] } } "Receiving Complain" should { "log Credit issued at info" in { EventFilter.info(pattern = ".*Credit issued to customer.*", occurrences = 1) intercept { val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! Complain() } } "send Credit" in { val sender = TestProbe() implicit val _ = sender.ref val librarian = system.actorOf(Librarian.props(findBookDuration, maxComplainCount)) librarian ! Complain() sender.expectMsgType[Credit] } "result in a ComplainException if maxComplainCount reached" in { val librarian = system.actorOf(Librarian.props(findBookDuration, 0)) EventFilter[Librarian.ComplainException](occurrences = 1) intercept { librarian ! Complain() } } } }
Example 92
Source File: BaseAkkaSpec.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import akka.actor.{ ActorIdentity, ActorRef, ActorSystem, Identify } import akka.testkit.{ EventFilter, TestEvent, TestProbe } import org.scalatest.BeforeAndAfterAll import scala.concurrent.Await import scala.concurrent.duration.{ DurationInt, FiniteDuration } abstract class BaseAkkaSpec(actorSystemName: String) extends BaseSpec with BeforeAndAfterAll { implicit class TestProbeOps(probe: TestProbe) { def expectActor(path: String, max: FiniteDuration = 3.seconds): ActorRef = { probe.within(max) { var actor = null: ActorRef probe.awaitAssert { (probe.system actorSelection path).tell(Identify(path), probe.ref) probe.expectMsgPF(100 milliseconds) { case ActorIdentity(`path`, Some(ref)) => actor = ref } } actor } } } implicit val system = ActorSystem(actorSystemName) system.eventStream.publish(TestEvent.Mute(EventFilter.debug())) system.eventStream.publish(TestEvent.Mute(EventFilter.info())) system.eventStream.publish(TestEvent.Mute(EventFilter.warning())) system.eventStream.publish(TestEvent.Mute(EventFilter.error())) override protected def afterAll(): Unit = { Await.ready(system.terminate(), 20.seconds) } }
Example 93
Source File: ContextsMasterSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master.execution import akka.testkit.{TestActorRef, TestProbe} import io.hydrosphere.mist.core.CommonData.{CancelJobRequest, RunJobRequest} import io.hydrosphere.mist.master.execution.ContextEvent.{CancelJobCommand, RunJobCommand} import io.hydrosphere.mist.master.{ActorSpec, TestData} import io.hydrosphere.mist.utils.akka.ActorF class ContextsMasterSpec extends ActorSpec("contexts-master") with TestData { it("should spawn/ proxy to contexts") { val ctx = TestProbe() val master = TestActorRef[ContextsMaster](ContextsMaster.props( contextF = ActorF.static(ctx.ref) )) master ! RunJobCommand(FooContext, mkRunReq("id")) ctx.expectMsgType[RunJobRequest] master ! CancelJobCommand(FooContext.name, CancelJobRequest("id")) ctx.expectMsgType[CancelJobRequest] } }
Example 94
Source File: ExecutionServiceSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master.execution import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import io.hydrosphere.mist.core.{FunctionInfoData, MockitoSugar} import io.hydrosphere.mist.master.Messages.StatusMessages.UpdateStatusEvent import io.hydrosphere.mist.master.execution.status.StatusReporter import io.hydrosphere.mist.master.execution.workers.WorkerHub import io.hydrosphere.mist.master.models.JobStartRequest import io.hydrosphere.mist.master.store.JobRepository import io.hydrosphere.mist.master.{JobDetails, TestData, TestUtils} import org.scalatest._ import org.mockito.Mockito.verify import mist.api.data._ import mist.api.encoding.defaultEncoders._ import mist.api.encoding.JsSyntax._ import scala.concurrent.Await import scala.concurrent.duration.Duration class ExecutionServiceSpec extends TestKit(ActorSystem("testMasterService")) with FunSpecLike with Matchers with MockitoSugar with TestData { describe("jobs starting") { it("should start job") { val execution = TestProbe() val repo = mock[JobRepository] val hub = mock[WorkerHub] val reporter = mock[StatusReporter] when(repo.update(any[JobDetails])).thenSuccess(()) val service = new ExecutionService(execution.ref, hub, reporter, repo) val future = service.startJob( JobStartRequest( id = "id", function = FunctionInfoData("name", path="path", className="className", defaultContext="context"), context = TestUtils.contextSettings.default, parameters = JsMap("1" -> 2.js), source = JobDetails.Source.Http, externalId = None )) execution.expectMsgType[ContextEvent.RunJobCommand] execution.reply(ExecutionInfo(req = mkRunReq("id"))) val executionInfo = Await.result(future, Duration.Inf) executionInfo.request.id shouldBe "id" verify(reporter).reportPlain(any[UpdateStatusEvent]) } } describe("jobs stopping") { it("should stop job") { //TODO val contextsMaster = TestProbe() val repo = mock[JobRepository] val hub = mock[WorkerHub] val reporter = mock[StatusReporter] when(repo.get(any[String])) .thenSuccess(Some(mkDetails(JobDetails.Status.Started))) .thenSuccess(Some(mkDetails(JobDetails.Status.Canceled))) val service = new ExecutionService(contextsMaster.ref, hub, reporter, repo) val future = service.stopJob("id") contextsMaster.expectMsgType[ContextEvent.CancelJobCommand] contextsMaster.reply(ContextEvent.JobCancelledResponse("id", mkDetails(JobDetails.Status.Canceled))) val details = Await.result(future, Duration.Inf) details.get.status shouldBe JobDetails.Status.Canceled } } }
Example 95
Source File: WorkerConnectionSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master.execution.workers import akka.testkit.TestProbe import io.hydrosphere.mist.master.{ActorSpec, TestData} import scala.concurrent.Promise class WorkerConnectionSpec extends ActorSpec("worker_conn") with TestData { it("should send shutdown command") { val connRef = TestProbe() val termination = Promise[Unit] val connection = WorkerConnection( id = "id", ref = connRef.ref, data = workerLinkData, whenTerminated = termination.future ) connection.shutdown(true) connRef.expectMsgType[WorkerBridge.Event.ForceShutdown.type] connection.shutdown(false) connRef.expectMsgType[WorkerBridge.Event.CompleteAndShutdown.type] } }
Example 96
Source File: ExclusiveConnectorSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master.execution.workers import akka.actor.ActorRef import akka.testkit.{TestActorRef, TestProbe} import io.hydrosphere.mist.core.CommonData.RunJobRequest import io.hydrosphere.mist.master.execution.workers.WorkerBridge.Event.CompleteAndShutdown import io.hydrosphere.mist.master.execution.workers.WorkerConnector.Event.Released import io.hydrosphere.mist.master.{ActorSpec, FilteredException, TestData} import scala.concurrent.{Await, Future, Promise} import scala.concurrent.duration._ class ExclusiveConnectorSpec extends ActorSpec("excl-conn") with TestData { it("shouldn't ignore errors") { val connector = TestActorRef[ExclusiveConnector](ExclusiveConnector.props( id = "id", ctx = FooContext, startWorker = (_, _) => Future.failed(FilteredException()) )) val probe = TestProbe() val resolve = Promise[PerJobConnection] probe.send(connector, WorkerConnector.Event.AskConnection(resolve)) intercept[Throwable] { Await.result(resolve.future, Duration.Inf) } } it("should return wrapped connections") { val originalRef = TestProbe() val original = WorkerConnection("id", originalRef.ref, workerLinkData, Promise[Unit].future) val connector = TestActorRef[ExclusiveConnector](ExclusiveConnector.props( id = "id", ctx = FooContext, startWorker = (_, _) => Future.successful(original) )) val probe = TestProbe() val resolve = Promise[PerJobConnection] probe.send(connector, WorkerConnector.Event.AskConnection(resolve)) val connection = Await.result(resolve.future, Duration.Inf) connection.run(mkRunReq("id"), probe.ref) originalRef.expectMsgType[RunJobRequest] originalRef.expectMsgType[WorkerBridge.Event.CompleteAndShutdown.type] } describe("Exclusive conn wrapper") { it("should release connection") { val connRef = TestProbe() val termination = Promise[Unit] val connection = WorkerConnection( id = "id", ref = connRef.ref, data = workerLinkData, whenTerminated = termination.future ) val connector = TestProbe() val wrapped = ExclusiveConnector.wrappedConnection(connector.ref, connection) wrapped.release() connector.expectMsgType[WorkerConnector.Event.Released] wrapped.run(mkRunReq("id"), ActorRef.noSender) connRef.expectMsgType[RunJobRequest] connRef.expectMsgType[CompleteAndShutdown.type] } } }
Example 97
Source File: FutureSubscribeSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master.execution import akka.actor.{Actor, ActorRef, Props} import akka.testkit.{TestActorRef, TestProbe} import io.hydrosphere.mist.master.ActorSpec import scala.concurrent.{Future, Promise} class FutureSubscribeSpec extends ActorSpec("future-subsribe-spec") { import FutureSubscribeSpec._ it("should handle success") { val actor = TestActorRef[TestActor](Props(classOf[TestActor])) val probe = TestProbe() val p = Promise[Unit] probe.send(actor, TestMessage(p.future)) p.success(()) probe.expectMsgType[Ok.type] } it("should handle failure") { val actor = TestActorRef[TestActor](Props(classOf[TestActor])) val probe = TestProbe() val p = Promise[Unit] probe.send(actor, TestMessage(p.future)) p.failure(new RuntimeException()) probe.expectMsgType[Err.type] } } object FutureSubscribeSpec { sealed trait Rsp case object Ok extends Rsp case object Err extends Rsp case class TestMessage(future: Future[Unit]) class TestActor extends Actor with FutureSubscribe { import context._ override def receive: Receive = { case TestMessage(future) => subscribe0(future)(_ => Ok, _ => Err) context become respond(sender()) } private def respond(respond: ActorRef): Receive = { case x: Rsp => respond ! x } } }
Example 98
Source File: ActorSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master import akka.actor.{ActorRef, ActorSystem} import akka.testkit.{TestActorRef, TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, FunSpecLike, Matchers} import scala.concurrent.duration.FiniteDuration abstract class ActorSpec(name: String) extends TestKit(ActorSystem(name)) with FunSpecLike with Matchers with BeforeAndAfterAll { override def afterAll: Unit = { system.terminate() } def shouldTerminate(f: FiniteDuration)(ref: ActorRef): Unit = { val probe = TestProbe() probe.watch(ref) probe.expectTerminated(ref, f) } }
Example 99
Source File: MasterBridgeSpec.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.worker import akka.actor.ActorSystem import akka.testkit.{TestActorRef, TestKit, TestProbe} import io.hydrosphere.mist.core.CommonData._ import io.hydrosphere.mist.core.MockitoSugar import io.hydrosphere.mist.utils.akka.{ActorF, ActorRegHub} import mist.api.data.JsMap import org.apache.spark.SparkConf import org.scalatest.{BeforeAndAfterAll, FunSpec, FunSpecLike, Matchers} import scala.concurrent.duration._ class MasterBridgeSpec extends TestKit(ActorSystem("WorkerBridgeSpec")) with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfterAll { def mkInitInfo(sparkConf: Map[String, String]) = WorkerInitInfo(sparkConf, 1, 20 seconds, 20 seconds, "localhost:2005", "localhost:2003", "localhost:2004", 202020, "") it("should create named context with spark.streaming.stopSparkContextByDefault=false") { val sparkConf = Map( "spark.streaming.stopSparkContextByDefault" -> "true", "spark.master" -> "local[*]", "spark.driver.allowMultipleContexts" -> "true" ) val namedContext = MistScContext("test", 1 second, new SparkConf().setAll(sparkConf)) val propertyValue = namedContext.sc.getConf.getBoolean("spark.streaming.stopSparkContextByDefault", true) propertyValue shouldBe false namedContext.sc.stop() } it("should shutdown correctly") { val namedMock = mock[MistScContext] when(namedMock.getUIAddress()).thenReturn(Some("addr")) val regHub = TestProbe() val worker = TestProbe() val props = MasterBridge.props("id", regHub.ref, _ => namedMock, ActorF.static[(WorkerInitInfo, MistScContext)](worker.ref)) val bridge = TestActorRef(props) regHub.expectMsgType[ActorRegHub.Register] val remote = TestProbe() remote.send(bridge, mkInitInfo(Map.empty)) remote.expectMsgType[WorkerReady] remote.send(bridge, RunJobRequest("id", JobParams("path", "MyClass", JsMap.empty, action = Action.Execute))) worker.expectMsgType[RunJobRequest] remote.send(bridge, ShutdownWorker) remote.expectMsgType[RequestTermination.type] remote.send(bridge, ShutdownWorkerApp) remote.expectMsgType[Goodbye.type] } }
Example 100
Source File: FutureRetryUtilitySpec.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.util import akka.actor.{ActorSystem, Scheduler, Status} import akka.event.{Logging, LoggingAdapter} import akka.testkit.{TestKit, TestProbe} import org.scalatest.{Matchers, WordSpecLike} import scala.collection.mutable import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import scala.concurrent.ExecutionContext.Implicits.global class FutureRetryUtilitySpec extends TestKit(ActorSystem("MySpec")) with WordSpecLike with Matchers with FutureRetryUtility { implicit val schedule: Scheduler = system.scheduler implicit val logger: LoggingAdapter = Logging.getLogger(system, this) private final val delay: FiniteDuration = 2.seconds private final val retries: Int = 3 private def future(flag: Boolean) = if (flag) Future.successful(3) else Future.failed(new RuntimeException("Failure")) "retry function in FutureRetryUtility" must { "successfully returns whether, after retries, the future is eventually successful" in { Await.result(future(true).retry(delay, retries)(_ > 2), Duration.Inf) shouldBe 3 } "thrown an Exception whether, after retries, the future eventually returns an Exception" in { an[RuntimeException] shouldBe thrownBy(Await.result(future(false).retry(delay, retries)(_ => true), Duration.Inf)) } "consider the number of retries" in { val q = mutable.Queue(0) def future = { val nRetries = q.dequeue() if (nRetries < 2) { q.enqueue(nRetries + 1); Future.failed(new RuntimeException) } else { q.enqueue(nRetries + 1); Future.successful(nRetries) } } Await.result(future.retry(delay, retries)(_ > 2), Duration.Inf) shouldBe 3 } } "pipeTo function in FutureRetryUtility" must { "returns a successful future and send the content of it through pipe" in { val testProbe = TestProbe("actor-test") future(true).pipeTo(delay, retries, testProbe.testActor)() testProbe.expectMsg(3) } "return a failed future and send a status failure through pipe" in { val testProbe = TestProbe("actor-test") future(false).pipeTo(delay, retries, testProbe.testActor)() testProbe.expectMsgAllClassOf(classOf[Status.Failure]) } } }
Example 101
Source File: MetricsReportingManagerSpec.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.metrics.reporting import akka.actor.ActorSystem import akka.testkit.{TestActorRef, TestProbe} import com.github.vonnagy.service.container.AkkaTestkitSpecs2Support import com.github.vonnagy.service.container.health.{GetHealth, HealthInfo, HealthState} import com.typesafe.config.ConfigFactory import org.specs2.mutable.SpecificationLike class MetricsReportingManagerSpec extends AkkaTestkitSpecs2Support(ActorSystem("default", ConfigFactory.parseString("container.metrics.reporters.Slf4j.enabled=on"))) with SpecificationLike { // Run in order sequential "The MetricsReportingManager" should { val probe = TestProbe() val act = TestActorRef[MetricsReportingManager](MetricsReportingManager.props()) "be able to load the defined reporters" in { act.underlyingActor.reporters.size must be equalTo (1) } "be able to report it's health" in { probe.send(act, GetHealth) probe.expectMsgClass(classOf[HealthInfo]) must beEqualTo(HealthInfo("metrics-reporting", HealthState.OK, "The system is currently managing 1 metrics reporters", Some(List("com.github.vonnagy.service.container.metrics.reporting.Slf4jReporter")), List())) } "be able to stop the running reporters" in { act.underlyingActor.stopReporters act.underlyingActor.reporters.size must be equalTo (0) } } }
Example 102
Source File: RoutedServiceSpec.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http.routing import akka.actor._ import akka.http.scaladsl.model.{HttpEntity, MediaTypes, StatusCodes} import akka.http.scaladsl.server.{Directives, Route} import akka.testkit.{TestActorRef, TestProbe} import com.github.vonnagy.service.container.Specs2RouteTest import com.github.vonnagy.service.container.http.{DefaultMarshallers, RejectionResponse} import org.specs2.mutable.Specification class RoutedServiceSpec extends Specification with Directives with Specs2RouteTest { case class TestEntity(id: Int, name: String) val probe = new TestProbe(system) val httpAct = TestActorRef(Props(new Actor with RoutedService with DefaultMarshallers { def receive = routeReceive }), "http") val svc = httpAct.underlyingActor.asInstanceOf[RoutedService] def echoComplete[T]: T => Route = { x ⇒ complete(x.toString) } "The RoutedService" should { "allow for routes to be added after the system is already loaded" in { // This should create the actor and register the endpoints val r = new RoutedEndpoints { def route = { path("test2") { complete("test2") } } } probe.send(httpAct, AddRoute(r)) probe.expectMsg(RouteAdded) Get("/test2") ~> svc.buildRoute(svc.routes) ~> check { responseAs[String] must be equalTo "test2" } } "respond with UnprocessableEntity for requests resulting in a MalformedFormFieldRejection" in { implicit val unmarsh = svc.jsonUnmarshaller[TestEntity] implicit val rejMarsh = svc.jsonUnmarshaller[RejectionResponse] val postRoute = new RoutedEndpoints { def route = { post { path("test4") { entity(as[TestEntity]) { echoComplete } } } } } probe.send(httpAct, AddRoute(postRoute)) probe.expectMsg(RouteAdded) import svc.defaultJsonFormats val ent = TestEntity(100, "product") Post("/test4", HttpEntity(MediaTypes.`application/json`, svc.serialization.write(ent))) ~> handleRejections(svc.rejectionHandler)(svc.buildRoute(svc.routes)) ~> check { status === StatusCodes.UnprocessableEntity mediaType === MediaTypes.`application/json` responseAs[RejectionResponse] must not beNull } } "respond with RejectionResponse for requests that error out" in { implicit val rejMarsh = svc.jsonUnmarshaller[RejectionResponse] val postRoute = new RoutedEndpoints { def route = { get { path("test5") { ctx => throw new Exception("test") } } } } probe.send(httpAct, AddRoute(postRoute)) probe.expectMsg(RouteAdded) Get("/test5") ~> Route.seal(svc.buildRoute(svc.routes))(svc.routeSettings, exceptionHandler = svc.exceptionHandler, rejectionHandler = svc.rejectionHandler) ~> check { mediaType === MediaTypes.`application/json` responseAs[RejectionResponse] must not beNull } } } }
Example 103
Source File: RoutedEndpointsActorSpec.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http.routing import akka.actor._ import akka.testkit.{TestActorRef, TestProbe} import com.github.vonnagy.service.container.AkkaTestkitSpecs2Support import org.specs2.mutable.SpecificationLike class RoutedEndpointsActorSpec extends AkkaTestkitSpecs2Support with SpecificationLike { import system.dispatcher "The RoutedEndpointsActor" should { "allow actor to add routes" in { val probe = TestProbe() val svc = TestActorRef(new Actor { def receive = { case _ => } }, "service") svc.underlyingActor.context .actorOf(Props(new Actor with RoutedService { def receive = routeReceive }), "http") TestActorRef(new RoutedEndpointsActor { def receive = { case RouteAdded => probe.ref ! RouteAdded } override def route = { path("test") { complete("complete") } } }) probe.expectMsg(RouteAdded) must beEqualTo(RouteAdded) } } }
Example 104
Source File: HttpServiceSpec.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http import akka.actor.{ActorSystem, Props} import akka.testkit.{TestActorRef, TestProbe} import com.github.vonnagy.service.container.{AkkaTestkitSpecs2Support, TestUtils} import com.github.vonnagy.service.container.health.HealthState import com.typesafe.config.ConfigFactory import org.specs2.mutable.SpecificationLike class HttpServiceSpec extends AkkaTestkitSpecs2Support(ActorSystem("test", { val http = TestUtils.temporaryServerHostnameAndPort() val https = TestUtils.temporaryServerHostnameAndPort() ConfigFactory.parseString( s""" container.http.interface="${http._2}" container.http.port=${http._3} container.https.interface="${https._2}" container.https.port=${https._3} """)})) with SpecificationLike { sequential val probe = TestProbe() val act = TestActorRef[HttpService](Props(new HttpService(Nil)), probe.testActor, "service") "The HttpService" should { "be able to check the services health before it is started" in { act.underlyingActor.getHttpHealth must not be null act.underlyingActor.getHttpHealth.state must be equalTo HealthState.CRITICAL } "be able to start and Http service on a specified port" in { act.underlyingActor.httpSettings.isEmpty must beFalse act.underlyingActor.httpServer.isEmpty must beTrue probe.send(act, HttpStart) val msg = probe.expectMsg(HttpStarted) msg must be equalTo HttpStarted act.underlyingActor.httpServer.size must be equalTo(2) } "be able to check the services health after it is started" in { act.underlyingActor.getHttpHealth must not be null act.underlyingActor.getHttpHealth.state must be equalTo HealthState.OK } "be able to stop the Http service" in { act.underlyingActor.stopHttpServer val msg = probe.expectMsg(HttpStopped) msg must be equalTo HttpStopped act.underlyingActor.httpServer.isEmpty must beTrue } } }
Example 105
Source File: InMemoryPersistenceActorSpec.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.persistence import java.util.concurrent.TimeUnit import akka.actor.{ ActorSystem, Props } import akka.testkit.{ ImplicitSender, TestKit, TestProbe } import akka.util.Timeout import com.typesafe.scalalogging.LazyLogging import io.vamp.common.akka.IoC import io.vamp.common.vitals.InfoRequest import io.vamp.common.{ Artifact, Namespace, NamespaceProvider } import io.vamp.persistence.notification.UnsupportedPersistenceRequest import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike } import scala.concurrent.Await import scala.concurrent.duration._ object TestArtifact { val kind: String = "TestArtifact" } class TestArtifact extends Artifact { override def name = "TestArtifact" override def kind = "TestArtifact" override def metadata = Map("name" → "testArtifact") } class TestInMemoryPersistenceActor extends InMemoryPersistenceActor { override protected def type2string(`type`: Class[_]): String = `type` match { // test artifact case t if classOf[TestArtifact].isAssignableFrom(t) ⇒ TestArtifact.kind case _ ⇒ throwException(UnsupportedPersistenceRequest(`type`)) } } class InMemoryPersistenceActorSpec extends TestKit(ActorSystem("InMemoryPersistenceActorSpec")) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with NamespaceProvider with LazyLogging { implicit val namespace: Namespace = Namespace("default") implicit val timeout: Timeout = Timeout(5L, TimeUnit.SECONDS) override def afterAll { TestKit.shutdownActorSystem(system) } "InMemoryPersistenceActor" must { "reply to InfoRequest" in { val testProbe = TestProbe("test") val actors = Await.result(IoC.createActor(Props(classOf[InMemoryPersistenceActor])).map(_ :: Nil)(system.dispatcher), 5.seconds) val actor = actors.head val expectedResponse = Map("database" → Map( "status" → "valid", "artifacts" → Map(), "type" → "in-memory [no persistence]" ), "archiving" → true) testProbe.send(actor, InfoRequest) testProbe.expectMsgPF(30.seconds) { case response: Map[_, _] ⇒ logger.info(response.toString) assert(response == expectedResponse) case _ ⇒ fail("Unexpected message") } } "reply to Create" in { val testProbe = TestProbe("test") val actors = Await.result(IoC.createActor(Props(classOf[TestInMemoryPersistenceActor])).map(_ :: Nil)(system.dispatcher), 5.seconds) val actor = actors.head val artifact = new TestArtifact() val expectedResponse = List[TestArtifact](artifact) val source = "testSource" testProbe.send(actor, PersistenceActor.Create(artifact, Option(source))) testProbe.expectMsgPF(30.seconds) { case response: List[_] ⇒ logger.info(response.toString) assert(response === expectedResponse) case _ ⇒ fail("Unexpected message") } } } }
Example 106
Source File: IoCSpec.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.common.akka import java.util.concurrent.TimeUnit import akka.actor.{ ActorSystem, Props } import akka.testkit.{ ImplicitSender, TestKit, TestProbe } import akka.util.Timeout import com.typesafe.scalalogging.LazyLogging import io.vamp.common.notification.Notification import io.vamp.common.{ ClassMapper, Namespace, NamespaceProvider } import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike } import scala.concurrent.{ Await, Future } import scala.concurrent.duration._ class IoCSpec extends TestKit(ActorSystem("IoCSpec")) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with NamespaceProvider with LazyLogging { implicit val namespace: Namespace = Namespace("default") implicit val timeout: Timeout = Timeout(5L, TimeUnit.SECONDS) override def afterAll { TestKit.shutdownActorSystem(system) } "Echo actor" must { "echo message" in { val testProbe = TestProbe("test") val actors = Await.result(IoC.createActor(Props(classOf[EchoActor])).map(_ :: Nil)(system.dispatcher), 5.seconds) val actor = actors.head val testMessage = "Example Message" testProbe.send(actor, testMessage) testProbe.expectMsgPF(30.seconds) { case response: String ⇒ logger.info(response.toString) assert(response == testMessage) case _ ⇒ fail("Unexpected message") } } } } class EchoActorMapper extends ClassMapper { val name = "echo" val clazz: Class[_] = classOf[EchoActor] } class EchoActor extends CommonSupportForActors { override def receive: Receive = { case text: String ⇒ reply(echo(text)) } private def echo(text: String): Future[String] = Future { text } override def message(notification: Notification): String = "echo actor message" override def info(notification: Notification): Unit = log.info(s"echo actor info") override def reportException(notification: Notification): Exception = new Exception("Echo actor notification report") }
Example 107
Source File: ShellClientSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client.socket import java.util.UUID import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import org.apache.toree.communication.ZMQMessage import org.apache.toree.communication.security.SecurityActorType import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.client.ActorLoader import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import play.api.libs.json.Json class ShellClientSpec extends TestKit(ActorSystem("ShellActorSpec")) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { private val SignatureEnabled = true describe("ShellClientActor") { val socketFactory = mock[SocketFactory] val mockActorLoader = mock[ActorLoader] val probe : TestProbe = TestProbe() when(socketFactory.ShellClient( any(classOf[ActorSystem]), any(classOf[ActorRef]) )).thenReturn(probe.ref) val signatureManagerProbe = TestProbe() doReturn(system.actorSelection(signatureManagerProbe.ref.path.toString)) .when(mockActorLoader).load(SecurityActorType.SignatureManager) val shellClient = system.actorOf(Props( classOf[ShellClient], socketFactory, mockActorLoader, SignatureEnabled )) describe("send execute request") { it("should send execute request") { val request = ExecuteRequest( "foo", false, true, UserExpressions(), true ) val header = Header( UUID.randomUUID().toString, "spark", UUID.randomUUID().toString, MessageType.Incoming.ExecuteRequest.toString, "5.0" ) val kernelMessage = KernelMessage( Seq[Array[Byte]](), "", header, HeaderBuilder.empty, Metadata(), Json.toJson(request).toString ) shellClient ! kernelMessage // Echo back the kernel message sent to have a signature injected signatureManagerProbe.expectMsgClass(classOf[KernelMessage]) signatureManagerProbe.reply(kernelMessage) probe.expectMsgClass(classOf[ZMQMessage]) } } } }
Example 108
Source File: HeartbeatClientSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client.socket import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.client.ActorLoader import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Matchers._ import org.mockito.Mockito._ class HeartbeatClientSpec extends TestKit(ActorSystem("HeartbeatActorSpec")) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("HeartbeatClientActor") { val socketFactory = mock[SocketFactory] val mockActorLoader = mock[ActorLoader] val probe : TestProbe = TestProbe() when(socketFactory.HeartbeatClient(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref) val heartbeatClient = system.actorOf(Props( classOf[HeartbeatClient], socketFactory, mockActorLoader, true )) describe("send heartbeat") { it("should send ping ZMQMessage") { heartbeatClient ! HeartbeatMessage probe.expectMsgClass(classOf[ZMQMessage]) } } } }
Example 109
Source File: SparkKernelClientSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import org.apache.toree.comm.{CommCallbacks, CommStorage, CommRegistrar} import org.apache.toree.kernel.protocol.v5 import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.client.execution.ExecuteRequestTuple import scala.concurrent.duration._ import org.mockito.Mockito._ import org.mockito.Matchers.{eq => mockEq, _} import org.scalatest.mock.MockitoSugar import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers} class SparkKernelClientSpec extends TestKit(ActorSystem("SparkKernelClientActorSystem")) with Matchers with MockitoSugar with FunSpecLike with BeforeAndAfter { private val TestTargetName = "some target" private var mockActorLoader: ActorLoader = _ private var mockCommRegistrar: CommRegistrar = _ private var sparkKernelClient: SparkKernelClient = _ private var executeRequestProbe: TestProbe = _ private var shellClientProbe: TestProbe = _ before { mockActorLoader = mock[ActorLoader] mockCommRegistrar = mock[CommRegistrar] executeRequestProbe = TestProbe() when(mockActorLoader.load(MessageType.Incoming.ExecuteRequest)) .thenReturn(system.actorSelection(executeRequestProbe.ref.path.toString)) shellClientProbe = TestProbe() when(mockActorLoader.load(SocketType.ShellClient)) .thenReturn(system.actorSelection(shellClientProbe.ref.path.toString)) sparkKernelClient = new SparkKernelClient( mockActorLoader, system, mockCommRegistrar) } describe("SparkKernelClient") { describe("#execute") { it("should send an ExecuteRequest message") { val func = (x: Any) => println(x) sparkKernelClient.execute("val foo = 2") executeRequestProbe.expectMsgClass(classOf[ExecuteRequestTuple]) } } } }
Example 110
Source File: CodeCompleteHandlerSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.handler import akka.actor._ import akka.testkit.{TestProbe, ImplicitSender, TestKit} import org.apache.toree.Main import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.CompleteRequest import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5Test._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, BeforeAndAfter, Matchers} import org.mockito.Mockito._ import test.utils.MaxAkkaTestTimeout class CodeCompleteHandlerSpec extends TestKit( ActorSystem("CodeCompleteHandlerSpec", None, Some(Main.getClass.getClassLoader)) ) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter { var actorLoader: ActorLoader = _ var handlerActor: ActorRef = _ var kernelMessageRelayProbe: TestProbe = _ var interpreterProbe: TestProbe = _ var statusDispatchProbe: TestProbe = _ before { actorLoader = mock[ActorLoader] handlerActor = system.actorOf(Props(classOf[CodeCompleteHandler], actorLoader)) kernelMessageRelayProbe = TestProbe() when(actorLoader.load(SystemActorType.KernelMessageRelay)) .thenReturn(system.actorSelection(kernelMessageRelayProbe.ref.path.toString)) interpreterProbe = new TestProbe(system) when(actorLoader.load(SystemActorType.Interpreter)) .thenReturn(system.actorSelection(interpreterProbe.ref.path.toString)) statusDispatchProbe = new TestProbe(system) when(actorLoader.load(SystemActorType.StatusDispatch)) .thenReturn(system.actorSelection(statusDispatchProbe.ref.path.toString)) } def replyToHandlerWithOkAndResult() = { val expectedClass = classOf[CompleteRequest] interpreterProbe.expectMsgClass(expectedClass) interpreterProbe.reply((0, List[String]())) } def replyToHandlerWithOkAndBadResult() = { val expectedClass = classOf[CompleteRequest] interpreterProbe.expectMsgClass(expectedClass) interpreterProbe.reply("hello") } describe("CodeCompleteHandler (ActorLoader)") { it("should send a CompleteRequest") { handlerActor ! MockCompleteRequestKernelMessage replyToHandlerWithOkAndResult() kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) { case KernelMessage(_, _, header, _, _, _) => header.msg_type == MessageType.Outgoing.CompleteReply.toString } } it("should throw an error for bad JSON") { handlerActor ! MockKernelMessageWithBadJSON var result = false try { replyToHandlerWithOkAndResult() } catch { case t: Throwable => result = true } result should be (true) } it("should throw an error for bad code completion") { handlerActor ! MockCompleteRequestKernelMessage try { replyToHandlerWithOkAndBadResult() } catch { case error: Exception => error.getMessage should be ("Parse error in CodeCompleteHandler") } } it("should send an idle message") { handlerActor ! MockCompleteRequestKernelMessage replyToHandlerWithOkAndResult() statusDispatchProbe.fishForMessage(MaxAkkaTestTimeout) { case Tuple2(status, _) => status == KernelStatusType.Idle } } } }
Example 111
Source File: GenericSocketMessageHandlerSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.handler import akka.actor.{ActorSystem, Props, ActorRef, ActorSelection} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5Test._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import test.utils.MaxAkkaTestTimeout class GenericSocketMessageHandlerSpec extends TestKit( ActorSystem( "GenericSocketMessageHandlerSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("GenericSocketMessageHandler( ActorLoader, SocketType )") { // Create a mock ActorLoader for the Relay we are going to test val actorLoader: ActorLoader = mock[ActorLoader] // Create a probe for the ActorSelection that the ActorLoader will return val selectionProbe: TestProbe = TestProbe() val selection: ActorSelection = system.actorSelection(selectionProbe.ref.path.toString) when(actorLoader.load(SocketType.Control)).thenReturn(selection) // The Relay we are going to be testing against val genericHandler: ActorRef = system.actorOf( Props(classOf[GenericSocketMessageHandler], actorLoader, SocketType.Control) ) describe("#receive( KernelMessage )") { genericHandler ! MockKernelMessage it("should send the message to the selected actor"){ selectionProbe.expectMsg(MaxAkkaTestTimeout, MockKernelMessage) } } } }
Example 112
Source File: KernelInfoRequestHandlerSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.handler import akka.actor.{ActorSelection, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.Main import org.apache.toree.kernel.protocol.v5.content.KernelInfoReply import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5._ import org.mockito.AdditionalMatchers.{not => mockNot} import org.mockito.Matchers.{eq => mockEq} import com.typesafe.config.ConfigFactory import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import play.api.libs.json.Json import test.utils.MaxAkkaTestTimeout object KernelInfoRequestHandlerSpec { val config = """ akka { loglevel = "WARNING" }""" } class KernelInfoRequestHandlerSpec extends TestKit( ActorSystem("KernelInfoRequestHandlerSpec", ConfigFactory.parseString(KernelInfoRequestHandlerSpec.config), Main.getClass.getClassLoader) ) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { val actorLoader: ActorLoader = mock[ActorLoader] val actor = system.actorOf(Props(classOf[KernelInfoRequestHandler], actorLoader, LanguageInfo("test", "1.0.0", Some(".test")))) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)) .thenReturn(relaySelection) when(actorLoader.load(mockNot(mockEq(SystemActorType.KernelMessageRelay)))) .thenReturn(system.actorSelection("")) val header = Header("","","","","") val kernelMessage = new KernelMessage( Seq[Array[Byte]](), "test message", header, header, Metadata(), "{}" ) describe("Kernel Info Request Handler") { it("should return a KernelMessage containing kernel info response") { actor ! kernelMessage val reply = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage] val kernelInfo = Json.parse(reply.contentString).as[KernelInfoReply] kernelInfo.implementation should be ("spark") } } }
Example 113
Source File: ShellSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities} import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object ShellSpec { val config =""" akka { loglevel = "WARNING" }""" } class ShellSpec extends TestKit( ActorSystem( "ShellActorSpec", ConfigFactory.parseString(ShellSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Shell") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 114
Source File: IOPubSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object IOPubSpec { val config =""" akka { loglevel = "WARNING" }""" } class IOPubSpec extends TestKit( ActorSystem("IOPubActorSpec", ConfigFactory.parseString(IOPubSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("IOPubActor") { val socketFactory = mock[SocketFactory] val probe : TestProbe = TestProbe() when(socketFactory.IOPub(any(classOf[ActorSystem]))).thenReturn(probe.ref) val socket = system.actorOf(Props(classOf[IOPub], socketFactory)) // TODO test that the response type changed describe("#receive") { it("should reply with a ZMQMessage") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage socket ! MockKernelMessage probe.expectMsg(MaxAkkaTestTimeout, MockZMQMessage) } } } }
Example 115
Source File: HeartbeatSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object HeartbeatSpec { val config = """ akka { loglevel = "WARNING" }""" } class HeartbeatSpec extends TestKit( ActorSystem( "HeartbeatActorSpec", ConfigFactory.parseString(HeartbeatSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { val SomeMessage: String = "some message" val SomeZMQMessage: ZMQMessage = ZMQMessage(ByteString(SomeMessage.getBytes)) describe("HeartbeatActor") { val socketFactory = mock[SocketFactory] val probe : TestProbe = TestProbe() when(socketFactory.Heartbeat(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref) val heartbeat = system.actorOf(Props(classOf[Heartbeat], socketFactory)) describe("send heartbeat") { it("should receive and send same ZMQMessage") { heartbeat ! SomeZMQMessage probe.expectMsg(MaxAkkaTestTimeout, SomeZMQMessage) } } } }
Example 116
Source File: StdinSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.kernel.protocol.v5Test._ import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import com.typesafe.config.ConfigFactory import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import test.utils.MaxAkkaTestTimeout object StdinSpec { val config =""" akka { loglevel = "WARNING" }""" } class StdinSpec extends TestKit(ActorSystem( "StdinActorSpec", ConfigFactory.parseString(StdinSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Stdin") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 117
Source File: ActorLoaderSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5.{MessageType, SocketType} import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.TestProbeProxyActor import test.utils.MaxAkkaTestTimeout class ActorLoaderSpec extends TestKit( ActorSystem( "ActorLoaderSpecSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("ActorLoader"){ describe("#load( MessageType )"){ it("should load an ActorSelection that has been loaded into the system"){ val testProbe: TestProbe = TestProbe() system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), MessageType.Outgoing.ClearOutput.toString) val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(MessageType.Outgoing.ClearOutput) ! "<Test Message>" testProbe.expectMsg("<Test Message>") } it("should expect no message when there is no actor"){ val testProbe: TestProbe = TestProbe() val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(MessageType.Outgoing.CompleteReply) ! "<Test Message>" testProbe.expectNoMessage(MaxAkkaTestTimeout) // This is to test to see if there the messages go to the actor inbox or the dead mail inbox system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), MessageType.Outgoing.CompleteReply.toString) testProbe.expectNoMessage(MaxAkkaTestTimeout) } } describe("#load( SocketType )"){ it("should load an ActorSelection that has been loaded into the system"){ val testProbe: TestProbe = TestProbe() system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.Shell.toString) val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(SocketType.Shell) ! "<Test Message>" testProbe.expectMsg("<Test Message>") } it("should expect no message when there is no actor"){ val testProbe: TestProbe = TestProbe() val actorLoader: ActorLoader = SimpleActorLoader(system) actorLoader.load(SocketType.IOPub) ! "<Test Message>" testProbe.expectNoMessage(MaxAkkaTestTimeout) // This is to test to see if there the messages go to the actor inbox or the dead mail inbox system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.IOPub.toString) testProbe.expectNoMessage(MaxAkkaTestTimeout) } } } }
Example 118
Source File: SimpleActorLoaderSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import akka.actor.{ActorSelection, ActorSystem, Props} import akka.testkit.{TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5.MessageType import org.scalatest.{FunSpecLike, Matchers} import test.utils.TestProbeProxyActor import test.utils.MaxAkkaTestTimeout class SimpleActorLoaderSpec extends TestKit( ActorSystem( "SimpleActorLoaderSpecSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) ) ) with FunSpecLike with Matchers { describe("SimpleActorLoader") { //val system = ActorSystem("SimpleActorLoaderSystem") val testMessage: String = "Hello Message" describe("#load( MessageType )") { it("should load a MessageType Actor"){ // Create a new test probe to verify our selection works val messageTypeProbe: TestProbe = new TestProbe(system) // Add an actor to the system to send a message to system.actorOf( Props(classOf[TestProbeProxyActor], messageTypeProbe), name = MessageType.Outgoing.ExecuteInput.toString ) // Create the ActorLoader with our test system val actorLoader: SimpleActorLoader = SimpleActorLoader(system) // Get the actor and send it a message val loadedMessageActor: ActorSelection = actorLoader.load(MessageType.Outgoing.ExecuteInput) loadedMessageActor ! testMessage // Assert the probe received the message messageTypeProbe.expectMsg(MaxAkkaTestTimeout, testMessage) } } } }
Example 119
Source File: StatusDispatchSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.dispatch import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.KernelStatus import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers} import play.api.libs.json.Json import test.utils.MaxAkkaTestTimeout class StatusDispatchSpec extends TestKit( ActorSystem( "StatusDispatchSystem", None, Some(org.apache.toree.Main.getClass.getClassLoader) ) ) with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter{ var statusDispatchRef: ActorRef = _ var relayProbe: TestProbe = _ before { // Mock the relay with a probe relayProbe = TestProbe() // Mock the ActorLoader val mockActorLoader: ActorLoader = mock[ActorLoader] when(mockActorLoader.load(SystemActorType.KernelMessageRelay)) .thenReturn(system.actorSelection(relayProbe.ref.path.toString)) statusDispatchRef = system.actorOf(Props(classOf[StatusDispatch],mockActorLoader)) } describe("StatusDispatch") { describe("#receive( KernelStatusType )") { it("should send a status message to the relay") { statusDispatchRef ! KernelStatusType.Busy // Check the kernel message is the correct type val statusMessage: KernelMessage = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage] statusMessage.header.msg_type should be (MessageType.Outgoing.Status.toString) // Check the status is what we sent val status: KernelStatus = Json.parse(statusMessage.contentString).as[KernelStatus] status.execution_state should be (KernelStatusType.Busy.toString) } } describe("#receive( KernelStatusType, Header )") { it("should send a status message to the relay") { val tuple = Tuple2(KernelStatusType.Busy, mock[Header]) statusDispatchRef ! tuple // Check the kernel message is the correct type val statusMessage: KernelMessage = relayProbe.receiveOne(MaxAkkaTestTimeout).asInstanceOf[KernelMessage] statusMessage.header.msg_type should be (MessageType.Outgoing.Status.toString) // Check the status is what we sent val status: KernelStatus = Json.parse(statusMessage.contentString).as[KernelStatus] status.execution_state should be (KernelStatusType.Busy.toString) } } } }
Example 120
Source File: StreamMethodsSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.api import akka.actor.ActorSystem import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.apache.toree.kernel.protocol.v5 import org.apache.toree.kernel.protocol.v5.KernelMessage import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, BeforeAndAfter, Matchers} import play.api.libs.json.Json import test.utils.MaxAkkaTestTimeout import org.mockito.Mockito._ class StreamMethodsSpec extends TestKit( ActorSystem( "StreamMethodsSpec", None, Some(org.apache.toree.Main.getClass.getClassLoader) ) ) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar with BeforeAndAfter { private var kernelMessageRelayProbe: TestProbe = _ private var mockParentHeader: v5.ParentHeader = _ private var mockActorLoader: v5.kernel.ActorLoader = _ private var mockKernelMessage: v5.KernelMessage = _ private var streamMethods: StreamMethods = _ before { kernelMessageRelayProbe = TestProbe() mockParentHeader = mock[v5.ParentHeader] mockActorLoader = mock[v5.kernel.ActorLoader] doReturn(system.actorSelection(kernelMessageRelayProbe.ref.path)) .when(mockActorLoader).load(v5.SystemActorType.KernelMessageRelay) mockKernelMessage = mock[v5.KernelMessage] doReturn(mockParentHeader).when(mockKernelMessage).header streamMethods = new StreamMethods(mockActorLoader, mockKernelMessage) } describe("StreamMethods") { describe("#()") { it("should put the header of the given message as the parent header") { val expected = mockKernelMessage.header val actual = streamMethods.kmBuilder.build.parentHeader actual should be (expected) } } describe("#sendAll") { it("should send a message containing all of the given text") { val expected = "some text" streamMethods.sendAll(expected) val outgoingMessage = kernelMessageRelayProbe.receiveOne(MaxAkkaTestTimeout) val kernelMessage = outgoingMessage.asInstanceOf[KernelMessage] val actual = Json.parse(kernelMessage.contentString) .as[v5.content.StreamContent].text actual should be (expected) } } } }
Example 121
Source File: UserRepositorySpec.scala From gabbler with Apache License 2.0 | 5 votes |
package de.heikoseeberger.gabbler.user import akka.actor.ActorSystem import akka.persistence.inmemory.query.scaladsl.InMemoryReadJournal import akka.persistence.query.PersistenceQuery import akka.stream.ActorMaterializer import akka.testkit.TestProbe import org.scalatest.{ AsyncWordSpec, BeforeAndAfterAll, Matchers } import scala.concurrent.Await import scala.concurrent.duration.DurationInt class UserRepositorySpec extends AsyncWordSpec with Matchers with BeforeAndAfterAll { import UserRepository._ private implicit val system = ActorSystem() private implicit val mat = ActorMaterializer() private val readJournal = PersistenceQuery(system) .readJournalFor[InMemoryReadJournal](InMemoryReadJournal.Identifier) private val user = User(0, "jsnow", "Jon Snow", "[email protected]") "UserRepository" should { "correctly handle getting, adding and removing users" in { import user._ val userRepository = system.actorOf(UserRepository(readJournal)) val sender = TestProbe() implicit val senderRef = sender.ref userRepository ! GetUsers sender.expectMsg(Users(Set.empty)) userRepository ! AddUser(username, nickname, email) val userAdded = sender.expectMsg(UserAdded(user)) userRepository ! GetUsers sender.expectMsg(Users(Set(user))) userRepository ! AddUser(username, "Jon Targaryen", "[email protected]") sender.expectMsg(UsernameTaken(username)) userRepository ! RemoveUser(id) val userRemoved = sender.expectMsg(UserRemoved(user)) userRepository ! GetUsers sender.expectMsg(Users(Set.empty)) userRepository ! RemoveUser(id) sender.expectMsg(IdUnknown(id)) userRepository ! GetUserEvents(0) val userEvents = sender.expectMsgPF(hint = "source of user events") { case UserEvents(e) => e } userEvents .take(2) .runFold(Vector.empty[(Long, UserEvent)])(_ :+ _) .map( _ should contain inOrder ( (1, userAdded), // The first event has seqNo 1! (2, userRemoved) ) ) } } override protected def afterAll() = { Await.ready(system.terminate(), 42.seconds) super.afterAll() } }
Example 122
Source File: EtcdCoordinationSpec.scala From constructr with Apache License 2.0 | 5 votes |
package de.heikoseeberger.constructr.coordination.etcd import akka.Done import akka.actor.{ ActorSystem, AddressFromURIString } import akka.testkit.{ TestDuration, TestProbe } import com.typesafe.config.ConfigFactory import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec } import scala.concurrent.duration.{ Duration, DurationInt, FiniteDuration } import scala.concurrent.{ Await, Awaitable } import scala.util.Random object EtcdCoordinationSpec { private val coordinationHost = { val dockerHostPattern = """tcp://(\S+):\d{1,5}""".r sys.env .get("DOCKER_HOST") .collect { case dockerHostPattern(address) => address } .getOrElse("127.0.0.1") } } class EtcdCoordinationSpec extends WordSpec with Matchers with BeforeAndAfterAll { import EtcdCoordinationSpec._ private implicit val system = { val config = ConfigFactory .parseString(s"constructr.coordination.host = $coordinationHost") .withFallback(ConfigFactory.load()) ActorSystem("default", config) } private val address = AddressFromURIString("akka.tcp://default@a:2552") private val address2 = AddressFromURIString("akka.tcp://default@b:2552") "EtcdCoordination" should { "correctly interact with etcd" in { val coordination = new EtcdCoordination(randomString(), system) resultOf(coordination.getNodes()) shouldBe 'empty resultOf(coordination.lock(address, 10.seconds.dilated)) shouldBe true resultOf(coordination.lock(address, 10.seconds.dilated)) shouldBe true resultOf(coordination.lock(address2, 10.seconds.dilated)) shouldBe false resultOf(coordination.addSelf(address, 10.seconds.dilated)) shouldBe Done resultOf(coordination.getNodes()) shouldBe Set(address) resultOf(coordination.refresh(address, 1.second.dilated)) shouldBe Done resultOf(coordination.getNodes()) shouldBe Set(address) val probe = TestProbe() probe.within(5.seconds.dilated) { // 2 seconds should be enough, but who knows hows ... probe.awaitAssert { resultOf(coordination.getNodes()) shouldBe 'empty } } } } override protected def afterAll() = { Await.ready(system.terminate(), Duration.Inf) super.afterAll() } private def resultOf[A](awaitable: Awaitable[A], max: FiniteDuration = 3.seconds.dilated) = Await.result(awaitable, max) private def randomString() = math.abs(Random.nextInt).toString }
Example 123
Source File: SupervisionTest.scala From reactive-programming with Apache License 2.0 | 5 votes |
package com.test.week6 import akka.actor._ import akka.event.LoggingReceive import akka.pattern.ask import akka.testkit.TestProbe import com.test.TestSpec import scala.concurrent.duration._ class SupervisionTest extends TestSpec { case class Command(f: () ⇒ Unit) case object Count case object GetState case class CounterState(counter: Long) class Supervisor(tp: TestProbe, svs: SupervisorStrategy) extends Actor { val worker: ActorRef = context.actorOf(Props(new Actor with ActorLogging { var counter = 0L override def receive: Receive = LoggingReceive { case Command(f) ⇒ f() case Count ⇒ counter += 1 case GetState ⇒ sender() ! CounterState(counter) } override def preStart(): Unit = log.debug("Started") override def postStop(): Unit = log.debug("Stopped") }), "worker") tp watch worker override def receive = LoggingReceive { case msg ⇒ worker forward msg } override def supervisorStrategy: SupervisorStrategy = svs } def createSupervisor(tp: TestProbe)(svs: SupervisorStrategy) = system.actorOf(Props(new Supervisor(tp, svs)), s"sup-${randomId.take(3)}") "SupervisorStrategy" should "resume the worker, state should not change, so should be 1" in { val tp = probe val sup = createSupervisor(tp) { OneForOneStrategy() { case t: RuntimeException ⇒ SupervisorStrategy.Resume } } sup ! Count (sup ? GetState).futureValue shouldBe CounterState(1L) sup ! Command(() ⇒ throw new RuntimeException("resume")) (sup ? GetState).futureValue shouldBe CounterState(1L) tp.expectNoMsg(100.millis) // no Terminated message cleanup(sup) } it should "restart the worker, so the worker instance has been replaced, and state should be 0 again" in { val tp = probe val sup = createSupervisor(tp) { OneForOneStrategy() { case t: RuntimeException ⇒ SupervisorStrategy.Restart } } sup ! Count (sup ? GetState).futureValue shouldBe CounterState(1L) sup ! Command(() ⇒ throw new RuntimeException("restart")) (sup ? GetState).futureValue shouldBe CounterState(0L) tp.expectNoMsg(100.millis) // no Terminated message cleanup(sup) } it should "stop the worker, so worker in not there anymore and should not answer" in { val tp = probe val sup = createSupervisor(tp) { OneForOneStrategy() { case t: RuntimeException ⇒ SupervisorStrategy.Stop } } sup ! Command(() ⇒ throw new RuntimeException("stop")) tp.expectMsgPF[Unit](100.millis) { case Terminated(_) ⇒ } cleanup(sup) } }
Example 124
Source File: TestSpec.scala From reactive-programming with Apache License 2.0 | 5 votes |
package com.test import java.io.IOException import java.util.UUID import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.event.{ Logging, LoggingAdapter } import akka.testkit.TestProbe import akka.util.Timeout import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatest.exceptions.TestFailedException import org.scalatest._ import rx.lang.scala._ import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContextExecutor, Future } import scala.util.{ Random ⇒ Rnd, Try } object Random { def apply(): Rnd = new Rnd() } trait TestSpec extends FlatSpec with Matchers with ScalaFutures with TryValues with OptionValues with Eventually with BeforeAndAfterAll { implicit val system: ActorSystem = ActorSystem("test") implicit val ec: ExecutionContextExecutor = system.dispatcher val log: LoggingAdapter = Logging(system, this.getClass) implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds) implicit val timeout = Timeout(50.seconds) override protected def afterAll(): Unit = { system.terminate() } def cleanup(actors: ActorRef*): Unit = { actors.foreach { (actor: ActorRef) ⇒ actor ! PoisonPill probe watch actor } } implicit class PimpedByteArray(self: Array[Byte]) { def getString: String = new String(self) } implicit class PimpedFuture[T](self: Future[T]) { def toTry: Try[T] = Try(self.futureValue) } implicit class PimpedObservable[T](self: Observable[T]) { def waitFor: Unit = { self.toBlocking.toIterable.last } } implicit class MustBeWord[T](self: T) { def mustBe(pf: PartialFunction[T, Unit]): Unit = if (!pf.isDefinedAt(self)) throw new TestFailedException("Unexpected: " + self, 0) } object Socket { def apply() = new Socket } class Socket { def readFromMemory: Future[Array[Byte]] = Future { Thread.sleep(100) // sleep 100 millis "fromMemory".getBytes } def send(payload: Array[Byte], from: String, failed: Boolean): Future[Array[Byte]] = if (failed) Future.failed(new IOException(s"Network error: $from")) else { Future { Thread.sleep(250) // sleep 250 millis, not real life time, but hey s"${payload.getString}->$from".getBytes } } def sendToEurope(payload: Array[Byte], failed: Boolean = false): Future[Array[Byte]] = send(payload, "fromEurope", failed) def sendToUsa(payload: Array[Byte], failed: Boolean = false): Future[Array[Byte]] = send(payload, "fromUsa", failed) } }
Example 125
Source File: CurrentEventsByPersistenceIdQueryTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.sstreaming import akka.actor.{ ActorRef, Props } import akka.testkit.TestProbe import com.github.dnvriend.TestSpec import com.github.dnvriend.spark.datasources.SparkImplicits._ import com.github.dnvriend.spark.mapper.PersonEventMapper import org.apache.spark.sql.streaming.{ OutputMode, ProcessingTime } import org.apache.spark.sql.functions._ import org.scalatest.Ignore import scala.concurrent.duration._ @Ignore class CurrentEventsByPersistenceIdQueryTest extends TestSpec { def withPersistentActor(pid: String = randomId, schedule: Boolean = false)(f: ActorRef => TestProbe => Unit): Unit = { val tp = TestProbe() val ref = system.actorOf(Props(new PersonActor(pid, schedule))) try f(ref)(tp) finally killActors(ref) } it should "read events for pid" in withSparkSession { spark => import spark.implicits._ withPersistentActor("person", schedule = true) { ref => tp => tp.send(ref, "persist") tp.expectMsg("ack") val jdbcReadJournal = spark.readStream .schema(PersonEventMapper.schema) .option("pid", "person") .option("event-mapper", "com.github.dnvriend.spark.mapper.PersonEventMapper") .eventsByPersistenceId("jdbc-read-journal") jdbcReadJournal.printSchema() // val numOfEvents = jdbcReadJournal // .groupBy('persistence_id) // .agg(count('sequence_number).as("number_of_events")) val query = jdbcReadJournal .writeStream .format("console") .trigger(ProcessingTime(1.seconds)) .queryName("consoleStream") // .outputMode(OutputMode.Complete()) .outputMode(OutputMode.Append()) .start() query.awaitTermination(20.seconds) } } }
Example 126
Source File: DropRepeatedSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.server.api import akka.actor.ActorSystem import akka.pattern.pipe import akka.stream.Materializer import akka.stream.scaladsl.{Sink, Source} import akka.testkit.{TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.collection.immutable import scala.concurrent.ExecutionContext final class DropRepeatedSpec extends TestKit(ActorSystem(classOf[DropRepeatedSpec].getSimpleName)) with WordSpecLike with Matchers with BeforeAndAfterAll { private[this] implicit val materializer: Materializer = Materializer(system) private[this] implicit val executionContext: ExecutionContext = materializer.executionContext override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } "DropRepeated" should { "drop repeated elements" in { val probe = TestProbe() val input = immutable.Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5) val _ = Source(input) .via(DropRepeated()) .runWith(Sink.seq) .pipeTo(probe.ref) .failed .foreach(fail(_)) probe.expectMsg(Vector(1, 2, 3, 4, 5)) } "does not drop duplicate elements that are not repeated" in { val probe = TestProbe() val input = immutable.Seq(1, 1, 2, 2, 1, 1, 2, 2) val _ = Source(input) .via(DropRepeated()) .runWith(Sink.seq) .pipeTo(probe.ref) .failed .foreach(fail(_)) probe.expectMsg(Vector(1, 2, 1, 2)) } } }
Example 127
Source File: ActorMetricsSpec.scala From prometheus-akka with Apache License 2.0 | 5 votes |
package com.workday.prometheus.akka import scala.concurrent.{Await, Future} import scala.concurrent.duration.DurationInt import akka.actor._ import akka.monitor.instrumentation.CellInfo import akka.testkit.TestProbe class ActorMetricsSpec extends TestKitBaseSpec("ActorMetricsSpec") { import ActorMetricsTestActor._ "the actor metrics" should { "respect the configured include and exclude filters" in { val trackedActor = createTestActor("tracked-actor") val nonTrackedActor = createTestActor("non-tracked-actor") val excludedTrackedActor = createTestActor("tracked-explicitly-excluded-actor") actorMetricsRecorderOf(trackedActor) should not be empty actorMetricsRecorderOf(nonTrackedActor) shouldBe empty actorMetricsRecorderOf(excludedTrackedActor) shouldBe empty val metrics = actorMetricsRecorderOf(trackedActor).get metrics.actorName shouldEqual "actormetricsspec_user_tracked_actor" metrics.messages.get shouldEqual 1.0 } "handle concurrent metric getOrElseUpdate calls" in { implicit val ec = system.dispatcher val e = Entity("fake-actor-name", MetricsConfig.Actor) val futures = (1 to 100).map{ _ => Future(ActorMetrics.metricsFor(e)) } val future = Future.sequence(futures) val metrics = Await.result(future, 10.seconds) metrics.fold(metrics.head) { (compare, metric) => metric shouldEqual compare compare } } } def actorMetricsRecorderOf(ref: ActorRef): Option[ActorMetrics] = { val name = CellInfo.cellName(system, ref) val entity = Entity(name, MetricsConfig.Actor) if (ActorMetrics.hasMetricsFor(entity)) { ActorMetrics.metricsFor(entity) } else { None } } def createTestActor(name: String): ActorRef = { val actor = system.actorOf(Props[ActorMetricsTestActor], name) val initialiseListener = TestProbe() // Ensure that the router has been created before returning. actor.tell(Ping, initialiseListener.ref) initialiseListener.expectMsg(Pong) actor } }
Example 128
Source File: DispatcherMetricsSpec.scala From prometheus-akka with Apache License 2.0 | 5 votes |
package com.workday.prometheus.akka import scala.concurrent.Future import scala.concurrent.duration._ import scala.collection.JavaConverters._ import akka.actor._ import akka.dispatch.MessageDispatcher import akka.testkit.TestProbe class DispatcherMetricsSpec extends TestKitBaseSpec("DispatcherMetricsSpec") { sealed trait PoolType case object ForkJoinPoolType extends PoolType case object ThreadPoolType extends PoolType "the akka dispatcher metrics" should { "respect the configured include and exclude filters" in { val defaultDispatcher = forceInit(system.dispatchers.lookup("akka.actor.default-dispatcher")) val fjpDispatcher = forceInit(system.dispatchers.lookup("tracked-fjp")) val tpeDispatcher = forceInit(system.dispatchers.lookup("tracked-tpe")) val excludedDispatcher = forceInit(system.dispatchers.lookup("explicitly-excluded")) findDispatcherRecorder(defaultDispatcher.id, ForkJoinPoolType) shouldNot be(empty) findDispatcherRecorder(fjpDispatcher.id, ForkJoinPoolType) shouldNot be(empty) findDispatcherRecorder(tpeDispatcher.id, ThreadPoolType) shouldNot be(empty) findDispatcherRecorder(excludedDispatcher.id, ForkJoinPoolType) should be(empty) } } def findDispatcherRecorder(dispatcherName: String, poolType: PoolType): Map[String, Double] = { val metrics = poolType match { case ForkJoinPoolType => ForkJoinPoolMetrics.collect().asScala.toList case ThreadPoolType => ThreadPoolMetrics.collect().asScala.toList } val values = for(samples <- metrics; sample <- samples.samples.asScala if findUsingSuffix(sample.labelValues.asScala, dispatcherName)) yield (sample.name, sample.value) values.toMap } def findUsingSuffix(list: Seq[String], suffix: String): Boolean = { list.find(v => v.endsWith(suffix)).isDefined } def forceInit(dispatcher: MessageDispatcher): MessageDispatcher = { val listener = TestProbe() Future { listener.ref ! "init done" }(dispatcher) listener.expectMsg("init done") dispatcher } }
Example 129
Source File: RouterMetricsSpec.scala From prometheus-akka with Apache License 2.0 | 5 votes |
package com.workday.prometheus.akka import scala.concurrent.{Await, Future} import scala.concurrent.duration.DurationInt import akka.actor._ import akka.monitor.instrumentation.CellInfo import akka.routing._ import akka.testkit.TestProbe class RouterMetricsSpec extends TestKitBaseSpec("RouterMetricsSpec") { import RouterMetricsTestActor._ "the router metrics" should { "respect the configured include and exclude filters" in { val trackedRouter = createTestPoolRouter("tracked-pool-router") val nonTrackedRouter = createTestPoolRouter("non-tracked-pool-router") val excludedTrackedRouter = createTestPoolRouter("tracked-explicitly-excluded-pool-router") routerMetricsRecorderOf(trackedRouter) should not be empty routerMetricsRecorderOf(nonTrackedRouter) shouldBe empty routerMetricsRecorderOf(excludedTrackedRouter) shouldBe empty val metrics = routerMetricsRecorderOf(trackedRouter).get metrics.actorName shouldEqual "routermetricsspec_user_tracked_pool_router" metrics.messages.get shouldEqual 1.0 } "handle concurrent metric getOrElseUpdate calls" in { implicit val ec = system.dispatcher val e = Entity("fake-actor-name", MetricsConfig.Actor) val futures = (1 to 100).map{ _ => Future(ActorMetrics.metricsFor(e)) } val future = Future.sequence(futures) val metrics = Await.result(future, 10.seconds) metrics.fold(metrics.head) { (compare, metric) => metric shouldEqual compare compare } } } def routerMetricsRecorderOf(ref: ActorRef): Option[RouterMetrics] = { val name = CellInfo.cellName(system, ref) val entity = Entity(name, MetricsConfig.Router) if (RouterMetrics.hasMetricsFor(entity)) { RouterMetrics.metricsFor(entity) } else { None } } def createTestPoolRouter(routerName: String): ActorRef = { val router = system.actorOf(RoundRobinPool(5).props(Props[RouterMetricsTestActor]), routerName) val initialiseListener = TestProbe() // Ensure that the router has been created before returning. router.tell(Ping, initialiseListener.ref) initialiseListener.expectMsg(Pong) router } }
Example 130
Source File: OmmersPoolSpec.scala From mantis with Apache License 2.0 | 5 votes |
package io.iohk.ethereum.ommers import akka.actor.ActorSystem import akka.testkit.TestProbe import akka.util.ByteString import io.iohk.ethereum.Fixtures.Blocks.Block3125369 import io.iohk.ethereum.Timeouts import io.iohk.ethereum.domain.{Address, BlockchainImpl} import io.iohk.ethereum.ommers.OmmersPool.{AddOmmers, GetOmmers, RemoveOmmers} import io.iohk.ethereum.utils.MiningConfig import org.scalamock.scalatest.MockFactory import org.scalatest.{FlatSpec, Matchers} import scala.concurrent.duration._ class OmmersPoolSpec extends FlatSpec with Matchers with MockFactory { "OmmersPool" should "accept ommers" in new TestSetup { //just return header (blockchain.getBlockHeaderByHash _).expects(*).returns(Some(Block3125369.header)) ommersPool ! AddOmmers(Block3125369.header) ommersPool.!(GetOmmers(Block3125369.header.number + 1))(testProbe.ref) testProbe.expectMsg(Timeouts.normalTimeout, OmmersPool.Ommers(Seq(Block3125369.header))) } "OmmersPool" should "removes ommers ommers" in new TestSetup { //just return header (blockchain.getBlockHeaderByHash _).expects(*).returns(Some(Block3125369.header)) ommersPool ! AddOmmers(Block3125369.header) ommersPool ! AddOmmers(Block3125369.header.copy(number = 2)) ommersPool ! RemoveOmmers(Block3125369.header) ommersPool.!(GetOmmers(3))(testProbe.ref) testProbe.expectMsg(Timeouts.normalTimeout, OmmersPool.Ommers(Seq(Block3125369.header.copy(number = 2)))) } "OmmersPool" should "returns ommers when out of pool siez" in new TestSetup { //just return header (blockchain.getBlockHeaderByHash _).expects(*).returns(Some(Block3125369.header)) ommersPool ! AddOmmers(Block3125369.header.copy(number = 4)) ommersPool ! AddOmmers(Block3125369.header.copy(number = 20)) ommersPool ! AddOmmers(Block3125369.header.copy(number = 30)) ommersPool ! AddOmmers(Block3125369.header.copy(number = 40)) ommersPool ! AddOmmers(Block3125369.header.copy(number = 5)) ommersPool.!(GetOmmers(6))(testProbe.ref) testProbe.expectMsg(Timeouts.normalTimeout, OmmersPool.Ommers(Seq(Block3125369.header.copy(number = 5)))) } trait TestSetup extends MockFactory { implicit val system = ActorSystem("OmmersPoolSpec_System") val miningConfig = new MiningConfig { override val ommersPoolSize: Int = 3 override val coinbase: Address = Address(2) override val ommerPoolQueryTimeout: FiniteDuration = Timeouts.normalTimeout override val blockCacheSize: Int = 4 override val activeTimeout: FiniteDuration = Timeouts.normalTimeout override val headerExtraData: ByteString = ByteString.empty override val miningEnabled: Boolean = false override val ethashDir: String = "~/.ethash" override val mineRounds: Int = 100000 } val testProbe = TestProbe() val blockchain = mock[BlockchainImpl] val ommersPool = system.actorOf(OmmersPool.props(blockchain, miningConfig)) } }
Example 131
Source File: NetServiceSpec.scala From mantis with Apache License 2.0 | 5 votes |
package io.iohk.ethereum.jsonrpc import java.net.InetSocketAddress import akka.actor.ActorSystem import akka.agent.Agent import akka.testkit.TestProbe import io.iohk.ethereum.{NormalPatience, crypto} import io.iohk.ethereum.jsonrpc.NetService._ import io.iohk.ethereum.network.{Peer, PeerActor, PeerManagerActor} import io.iohk.ethereum.nodebuilder.SecureRandomBuilder import io.iohk.ethereum.utils.{NodeStatus, ServerStatus} import org.scalatest.concurrent.ScalaFutures import org.scalatest.{FlatSpec, Matchers} import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global class NetServiceSpec extends FlatSpec with Matchers with ScalaFutures with NormalPatience with SecureRandomBuilder { "NetService" should "return handshaked peer count" in new TestSetup { val resF = netService.peerCount(PeerCountRequest()) peerManager.expectMsg(PeerManagerActor.GetPeers) peerManager.reply(PeerManagerActor.Peers(Map( Peer(new InetSocketAddress(1), testRef, false) -> PeerActor.Status.Handshaked, Peer(new InetSocketAddress(2), testRef, false) -> PeerActor.Status.Handshaked, Peer(new InetSocketAddress(3), testRef, false) -> PeerActor.Status.Connecting))) resF.futureValue shouldBe Right(PeerCountResponse(2)) } it should "return listening response" in new TestSetup { netService.listening(ListeningRequest()).futureValue shouldBe Right(ListeningResponse(true)) } it should "return version response" in new TestSetup { netService.version(VersionRequest()).futureValue shouldBe Right(VersionResponse("1")) } trait TestSetup { implicit val system = ActorSystem("Testsystem") val testRef = TestProbe().ref val peerManager = TestProbe() val nodeStatus = NodeStatus(crypto.generateKeyPair(secureRandom), ServerStatus.Listening(new InetSocketAddress(9000)), discoveryStatus = ServerStatus.NotListening) val netService = new NetService(Agent(nodeStatus), peerManager.ref, NetServiceConfig(5.seconds)) } }
Example 132
Source File: TestSpec.scala From akka-serialization-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.event.{ Logging, LoggingAdapter } import akka.serialization.SerializationExtension import akka.stream.{ ActorMaterializer, Materializer } import akka.testkit.TestProbe import akka.util.Timeout import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatest.prop.PropertyChecks import org.scalatest.{ BeforeAndAfterAll, FlatSpec, GivenWhenThen, Matchers } import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.util.Try trait TestSpec extends FlatSpec with Matchers with GivenWhenThen with ScalaFutures with BeforeAndAfterAll with Eventually with PropertyChecks with AkkaPersistenceQueries with AkkaStreamUtils with InMemoryCleanup { implicit val timeout: Timeout = Timeout(10.seconds) implicit val system: ActorSystem = ActorSystem() implicit val ec: ExecutionContext = system.dispatcher implicit val mat: Materializer = ActorMaterializer() implicit val log: LoggingAdapter = Logging(system, this.getClass) implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds) val serialization = SerializationExtension(system) implicit class FutureToTry[T](f: Future[T]) { def toTry: Try[T] = Try(f.futureValue) } def killActors(actors: ActorRef*): Unit = { val probe = TestProbe() actors.foreach { actor ⇒ probe watch actor actor ! PoisonPill probe expectTerminated actor } } override protected def afterAll(): Unit = { system.terminate() system.whenTerminated.toTry should be a 'success } }
Example 133
Source File: PersonTest.scala From akka-serialization-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.domain import akka.actor.{ ActorRef, Props } import akka.pattern.ask import akka.persistence.query.EventEnvelope import akka.stream.scaladsl.{ Sink, Source } import akka.testkit.TestProbe import com.github.dnvriend.TestSpec import com.github.dnvriend.domain.Person._ import com.github.dnvriend.persistence.ProtobufReader import proto.person.Command._ class PersonTest extends TestSpec { import com.github.dnvriend.persistence.ProtobufFormats._ def withPerson(id: String)(f: ActorRef ⇒ TestProbe ⇒ Unit): Unit = { val tp = TestProbe() val ref = system.actorOf(Props(new Person(id))) try f(ref)(tp) finally killActors(ref) } "Person" should "register a name" in { withPerson("p1") { ref ⇒ tp ⇒ Source(List(RegisterNameCommand("dennis", "vriend"))) .mapAsync(1)(ref ? _).runWith(Sink.ignore).futureValue } withPerson("p1") { ref ⇒ tp ⇒ Source(List(RegisterNameCommand("dennis", "vriend"))) .mapAsync(1)(ref ? _).runWith(Sink.ignore).futureValue } // note that the persistence-query does not use the deserializer // so the protobuf must be deserialized inline eventsForPersistenceIdSource("p1").collect { case EventEnvelope(_, _, _, proto: NameRegisteredMessage) ⇒ implicitly[ProtobufReader[NameRegisteredEvent]].read(proto) }.testProbe { tp ⇒ tp.request(Int.MaxValue) tp.expectNext(NameRegisteredEvent("dennis", "vriend")) tp.expectNext(NameRegisteredEvent("dennis", "vriend")) tp.expectComplete() } } it should "update its name and surname" in { withPerson("p2") { ref ⇒ tp ⇒ Source(List(RegisterNameCommand("dennis", "vriend"), ChangeNameCommand("jimi"), ChangeSurnameCommand("hendrix"))) .mapAsync(1)(ref ? _).runWith(Sink.ignore).futureValue } eventsForPersistenceIdSource("p2").collect { case EventEnvelope(_, _, _, proto: NameRegisteredMessage) ⇒ implicitly[ProtobufReader[NameRegisteredEvent]].read(proto) case EventEnvelope(_, _, _, proto: NameChangedMessage) ⇒ implicitly[ProtobufReader[NameChangedEvent]].read(proto) case EventEnvelope(_, _, _, proto: SurnameChangedMessage) ⇒ implicitly[ProtobufReader[SurnameChangedEvent]].read(proto) }.testProbe { tp ⇒ tp.request(Int.MaxValue) tp.expectNext(NameRegisteredEvent("dennis", "vriend")) tp.expectNext(NameChangedEvent("jimi")) tp.expectNext(SurnameChangedEvent("hendrix")) tp.expectComplete() } } }
Example 134
Source File: AskActorSpec.scala From matcher with MIT License | 5 votes |
package com.wavesplatform.dex.actors import akka.actor.ActorRef import akka.testkit.TestProbe import com.wavesplatform.dex.test.matchers.DiffMatcherWithImplicits import com.wavesplatform.dex.time.SystemTime import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.DurationInt import scala.concurrent.{Await, Future, TimeoutException} class AskActorSpec extends AnyFreeSpec with Matchers with SystemTime with MatcherSpecLike with DiffMatcherWithImplicits { private val defaultTimeout = 5.seconds private val defaultResponse = "foo" "AskActor" - { "happy path" in test { (ref, future) => ref ! defaultResponse val actual = Await.result(future, defaultTimeout) actual should matchTo(defaultResponse) } "timeout" in test { (_, future) => Await.result(future.failed, defaultTimeout) shouldBe a[TimeoutException] } "unexpected response type" in test { (ref, future) => ref ! 100500 Await.result(future.failed, defaultTimeout) shouldBe a[IllegalArgumentException] } } private def test(f: (ActorRef, Future[String]) => Unit): Unit = { val (ref, future) = AskActor.mk[String](100.millis) val p = TestProbe() p.watch(ref) f(ref, future) p.expectTerminated(ref, defaultTimeout) } override protected def actorSystemName: String = "AskActorSpec" }
Example 135
Source File: ClusterSingletonHelperTest.scala From akka-tools with MIT License | 5 votes |
package no.nextgentel.oss.akkatools.cluster import akka.actor.{Actor, ActorRef, ActorSystem, Props} import akka.testkit.{TestKit, TestProbe} import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuiteLike, Matchers} import org.slf4j.LoggerFactory import scala.util.Random object ClusterSingletonHelperTest { val port = 20000 + Random.nextInt(20000) } class ClusterSingletonHelperTest (_system:ActorSystem) extends TestKit(_system) with FunSuiteLike with Matchers with BeforeAndAfterAll with BeforeAndAfter { def this() = this(ActorSystem("test-actor-system", ConfigFactory.parseString( s"""akka.actor.provider = "akka.cluster.ClusterActorRefProvider" |akka.remote.enabled-transports = ["akka.remote.netty.tcp"] |akka.remote.netty.tcp.hostname="localhost" |akka.remote.netty.tcp.port=${ClusterSingletonHelperTest.port} |akka.cluster.seed-nodes = ["akka.tcp://test-actor-system@localhost:${ClusterSingletonHelperTest.port}"] """.stripMargin ).withFallback(ConfigFactory.load("application-test.conf")))) override def afterAll { TestKit.shutdownActorSystem(system) } val log = LoggerFactory.getLogger(getClass) test("start and communicate with cluster-singleton") { val started = TestProbe() val proxy = ClusterSingletonHelper.startClusterSingleton(system, Props(new OurClusterSingleton(started.ref)), "ocl") started.expectMsg("started") val sender = TestProbe() sender.send(proxy, "ping") sender.expectMsg("pong") } } class OurClusterSingleton(started:ActorRef) extends Actor { started ! "started" def receive = { case "ping" => sender ! "pong" } }
Example 136
Source File: GeneralAggregateWithShardingTest.scala From akka-tools with MIT License | 5 votes |
package no.nextgentel.oss.akkatools.aggregate import java.util.{Arrays, UUID} import akka.actor.ActorSystem import akka.actor.Status.Failure import akka.testkit.{TestKit, TestProbe} import com.typesafe.config.ConfigFactory import no.nextgentel.oss.akkatools.aggregate.testAggregate.StateName._ import no.nextgentel.oss.akkatools.aggregate.testAggregate.{StateName, _} import no.nextgentel.oss.akkatools.testing.AggregateTesting import org.scalatest._ import org.slf4j.LoggerFactory import scala.util.Random object GeneralAggregateWithShardingTest { val port = 20000 + Random.nextInt(20000) } class GeneralAggregateWithShardingTest(_system:ActorSystem) extends TestKit(_system) with FunSuiteLike with Matchers with BeforeAndAfterAll with BeforeAndAfter { def this() = this(ActorSystem("test-actor-system", ConfigFactory.parseString( s"""akka.actor.provider = "akka.cluster.ClusterActorRefProvider" |akka.remote.enabled-transports = ["akka.remote.netty.tcp"] |akka.remote.netty.tcp.hostname="localhost" |akka.remote.netty.tcp.port=${GeneralAggregateWithShardingTest.port} |akka.cluster.seed-nodes = ["akka.tcp://test-actor-system@localhost:${GeneralAggregateWithShardingTest.port}"] """.stripMargin ).withFallback(ConfigFactory.load("application-test.conf")))) override def afterAll { TestKit.shutdownActorSystem(system) } val log = LoggerFactory.getLogger(getClass) private def generateId() = UUID.randomUUID().toString val seatIds = List("s1","id-used-in-Failed-in-onAfterValidationSuccess", "s2", "s3-This-id-is-going-to-be-discarded", "s4") trait TestEnv extends AggregateTesting[BookingState] { val id = generateId() val printShop = TestProbe() val cinema = TestProbe() val onSuccessDmForwardReceiver = TestProbe() val starter = new AggregateStarterSimple("booking", system).withAggregatePropsCreator { dmSelf => BookingAggregate.props(dmSelf, dmForwardAndConfirm(printShop.ref).path, dmForwardAndConfirm(cinema.ref).path, seatIds, dmForwardAndConfirm(onSuccessDmForwardReceiver.ref).path) } val main = starter.dispatcher starter.start() def assertState(correctState:BookingState): Unit = { assert(getState(id) == correctState) } } test("normal flow") { new TestEnv { // Make sure we start with empty state assertState(BookingState.empty()) val maxSeats = 2 val sender = TestProbe() // Open the booking println("1") sendDMBlocking(main, OpenBookingCmd(id, maxSeats), sender.ref) println("2") assertState(BookingState(OPEN, maxSeats, Set())) } } }
Example 137
Source File: GeneralAggregateBaseTest_handleSnapshotMessages.scala From akka-tools with MIT License | 5 votes |
package no.nextgentel.oss.akkatools.aggregate.aggregateTest_usingAggregateStateBase import java.util.UUID import akka.actor.{ActorPath, ActorSystem, Props} import akka.persistence.{DeleteMessagesFailure, DeleteMessagesSuccess, SaveSnapshotFailure, SaveSnapshotSuccess, SnapshotMetadata, SnapshotOffer} import akka.testkit.{TestKit, TestProbe} import com.typesafe.config.ConfigFactory import no.nextgentel.oss.akkatools.aggregate._ import no.nextgentel.oss.akkatools.testing.AggregateTesting import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuiteLike, Matchers} import org.slf4j.LoggerFactory override def onSnapshotOffer(offer: SnapshotOffer): Unit = { state = offer.snapshot.asInstanceOf[StringState] } override def acceptSnapshotRequest(req: SaveSnapshotOfCurrentState): Boolean = { if (state == StringState("WAT")) { state = StringState("SAVED") true } else { state = StringState("WAT") //So it works second time false } } override def onSnapshotSuccess(success: SaveSnapshotSuccess): Unit = { state = StringState("SUCCESS_SNAP") } override def onSnapshotFailure(failure: SaveSnapshotFailure): Unit = { state = StringState("FAIL_SNAP") } override def onDeleteMessagesSuccess(success: DeleteMessagesSuccess): Unit = { state = StringState("SUCCESS_MSG") } override def onDeleteMessagesFailure(failure: DeleteMessagesFailure): Unit = { state = StringState("FAIL_MSG") } // Used as prefix/base when constructing the persistenceId to use - the unique ID is extracted runtime from actorPath which is construced by Sharding-coordinator override def persistenceIdBase(): String = "/x/" } case class StringEv(data: String) case class StringState(data:String) extends AggregateStateBase[StringEv, StringState] { override def transitionState(event: StringEv): StateTransition[StringEv, StringState] = StateTransition(StringState(event.data)) }
Example 138
Source File: ActorWithDMSupportTest.scala From akka-tools with MIT License | 5 votes |
package no.nextgentel.oss.akkatools.persistence import java.util.concurrent.TimeUnit import akka.actor.{Props, ActorSystem} import akka.testkit.{TestProbe, TestKit} import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Matchers, FunSuiteLike} import scala.concurrent.duration.FiniteDuration class ActorWithDMSupportTest(_system:ActorSystem) extends TestKit(_system) with FunSuiteLike with Matchers with BeforeAndAfterAll with BeforeAndAfter { def this() = this(ActorSystem("ActorWithDMSupportTest", ConfigFactory.load("application-test.conf"))) test("success with dm") { val a = system.actorOf(Props(new TestActorWithDMSupport())) val s = TestProbe() // send raw s.send(a, "sendok") s.expectMsg("ok") // send via dm and withNewPayload val dm = DurableMessage(1L, "sendok", s.ref.path) s.send(a, dm) s.expectMsg(dm.withNewPayload("ok")) // send raw - do nothing s.send(a, "silent") // send silent - wait for configm s.send(a, DurableMessage(1L, "silent", s.ref.path)) s.expectMsg( DurableMessageReceived(1,None) ) // send noconfirm - with dm s.send(a, DurableMessage(1L, "no-confirm", s.ref.path)) s.expectNoMessage(FiniteDuration(500, TimeUnit.MILLISECONDS)) // send noconfirm - with dm s.send(a, DurableMessage(1L, "no-confirm-custom", s.ref.path)) s.expectNoMessage(FiniteDuration(500, TimeUnit.MILLISECONDS)) // send noconfirm - without dm s.send(a, "no-confirm") s.expectNoMessage(FiniteDuration(500, TimeUnit.MILLISECONDS)) // send noconfirm - without dm s.send(a, "no-confirm-custom") s.expectNoMessage(FiniteDuration(500, TimeUnit.MILLISECONDS)) } } class TestActorWithDMSupport extends ActorWithDMSupport { // All raw messages or payloads in DMs are passed to this function. override def receivePayload = { case "sendok" => send(sender.path, "ok") case "silent" => Unit case "no-confirm" => throw new LogWarningAndSkipDMConfirmException("something went wrong") case "no-confirm-custom" => throw new CustomLogWarningAndSkipDMConfirm() } } class CustomLogWarningAndSkipDMConfirm extends Exception("") with LogWarningAndSkipDMConfirm
Example 139
Source File: AkkaQuickstartSpec.scala From didactic-computing-machine with GNU Affero General Public License v3.0 | 5 votes |
//#full-example package com.lightbend.akka.sample import org.scalatest.{ BeforeAndAfterAll, FlatSpecLike, Matchers } import akka.actor.{ Actor, Props, ActorSystem } import akka.testkit.{ ImplicitSender, TestKit, TestActorRef, TestProbe } import scala.concurrent.duration._ import Greeter._ import Printer._ //#test-classes class AkkaQuickstartSpec(_system: ActorSystem) extends TestKit(_system) with Matchers with FlatSpecLike with BeforeAndAfterAll { //#test-classes def this() = this(ActorSystem("AkkaQuickstartSpec")) override def afterAll: Unit = { shutdown(system) } //#first-test //#specification-example "A Greeter Actor" should "pass on a greeting message when instructed to" in { //#specification-example val testProbe = TestProbe() val helloGreetingMessage = "hello" val helloGreeter = system.actorOf(Greeter.props(helloGreetingMessage, testProbe.ref)) val greetPerson = "Akka" helloGreeter ! WhoToGreet(greetPerson) helloGreeter ! Greet testProbe.expectMsg(500 millis, Greeting(s"$helloGreetingMessage, $greetPerson")) } //#first-test } //#full-example
Example 140
Source File: TestSpec.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams import akka.NotUsed import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.event.{ Logging, LoggingAdapter } import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestProbe import akka.util.Timeout import com.github.dnvriend.streams.util.ClasspathResources import org.scalatest._ import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatestplus.play.guice.GuiceOneServerPerSuite import play.api.inject.BindingKey import play.api.libs.json.{ Format, Json } import play.api.test.WsTestClient import scala.collection.immutable._ import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.reflect.ClassTag import scala.util.Try object Person { implicit val format: Format[Person] = Json.format[Person] } final case class Person(firstName: String, age: Int) class TestSpec extends FlatSpec with Matchers with GivenWhenThen with OptionValues with TryValues with ScalaFutures with WsTestClient with BeforeAndAfterAll with BeforeAndAfterEach with Eventually with ClasspathResources with GuiceOneServerPerSuite { def getComponent[A: ClassTag] = app.injector.instanceOf[A] def getNamedComponent[A](name: String)(implicit ct: ClassTag[A]): A = app.injector.instanceOf[A](BindingKey(ct.runtimeClass.asInstanceOf[Class[A]]).qualifiedWith(name)) // set the port number of the HTTP server override lazy val port: Int = 8081 implicit val timeout: Timeout = 1.second implicit val pc: PatienceConfig = PatienceConfig(timeout = 30.seconds, interval = 300.millis) implicit val system: ActorSystem = getComponent[ActorSystem] implicit val ec: ExecutionContext = getComponent[ExecutionContext] implicit val mat: Materializer = getComponent[Materializer] val log: LoggingAdapter = Logging(system, this.getClass) // ================================== Supporting Operations ==================================== def id: String = java.util.UUID.randomUUID().toString implicit class FutureToTry[T](f: Future[T]) { def toTry: Try[T] = Try(f.futureValue) } implicit class SourceOps[A](src: Source[A, NotUsed]) { def testProbe(f: TestSubscriber.Probe[A] ⇒ Unit): Unit = f(src.runWith(TestSink.probe(system))) } def withIterator[T](start: Int = 0)(f: Source[Int, NotUsed] ⇒ T): T = f(Source.fromIterator(() ⇒ Iterator from start)) def fromCollection[A](xs: Iterable[A])(f: TestSubscriber.Probe[A] ⇒ Unit): Unit = f(Source(xs).runWith(TestSink.probe(system))) def killActors(refs: ActorRef*): Unit = { val tp = TestProbe() refs.foreach { ref ⇒ tp watch ref tp.send(ref, PoisonPill) tp.expectTerminated(ref) } } }
Example 141
Source File: ActorRefWithAckTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.sink import akka.actor.{ Actor, ActorRef, Props } import akka.stream.scaladsl.{ Sink, Source } import akka.stream.testkit.TestPublisher import akka.stream.testkit.scaladsl.TestSource import akka.testkit.TestProbe import com.github.dnvriend.streams.TestSpec import scala.concurrent.duration._ import scala.reflect.ClassTag // see: https://github.com/akka/akka/blob/4acc1cca6a27be0ff80f801de3640f91343dce94/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefBackpressureSinkSpec.scala object ActorRefWithAckTest { final val InitMessage = "start" final val CompleteMessage = "done" final val AckMessage = "ack" class Forwarder(ref: ActorRef) extends Actor { def receive = { case msg @ `InitMessage` ⇒ sender() ! AckMessage ref forward msg case msg @ `CompleteMessage` ⇒ ref forward msg case msg ⇒ sender() ! AckMessage ref forward msg } } } class ActorRefWithAckTest extends TestSpec { import ActorRefWithAckTest._ def createActor[A: ClassTag](testProbeRef: ActorRef): ActorRef = system.actorOf(Props(implicitly[ClassTag[A]].runtimeClass, testProbeRef)) def withForwarder(xs: Int*)(f: TestProbe ⇒ Unit): Unit = { val tp = TestProbe() val ref = createActor[Forwarder](tp.ref) Source(xs.toList).runWith(Sink.actorRefWithAck(ref, InitMessage, AckMessage, CompleteMessage)) try f(tp) finally killActors(ref) } def withTestPublisher[A](f: (TestPublisher.Probe[A], TestProbe, ActorRef) ⇒ Unit): Unit = { val tp = TestProbe() val ref = createActor[Forwarder](tp.ref) val pub: TestPublisher.Probe[A] = TestSource.probe[A].to(Sink.actorRefWithAck(ref, InitMessage, AckMessage, CompleteMessage)).run() try f(pub, tp, ref) finally killActors(ref) } it should "send the elements to the ActorRef" in { // which means that the forwarder actor that acts as a sink // will initially receive an InitMessage // next it will receive each `payload` element, here 1, 2 and 3, // finally the forwarder will receive the CompletedMessage, stating that // the producer completes the stream because there are no more elements (a finite stream) withForwarder(1, 2, 3) { tp ⇒ tp.expectMsg(InitMessage) tp.expectMsg(1) tp.expectMsg(2) tp.expectMsg(3) tp.expectMsg(CompleteMessage) tp.expectNoMsg(100.millis) } } it should "send the elements to the ActorRef manually 1, 2 and 3" in { withTestPublisher[Int] { (pub, tp, _) ⇒ pub.sendNext(1) tp.expectMsg(InitMessage) tp.expectMsg(1) pub.sendNext(2) tp.expectMsg(2) pub.sendNext(3) tp.expectMsg(3) pub.sendComplete() tp.expectMsg(CompleteMessage) tp.expectNoMsg(100.millis) } } it should "cancel stream when actor terminates" in { withTestPublisher[Int] { (pub, tp, ref) ⇒ pub.sendNext(1) tp.expectMsg(InitMessage) tp.expectMsg(1) killActors(ref) pub.expectCancellation() } } }
Example 142
Source File: ActorSubscriberTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.sink import akka.Done import akka.actor.Actor.Receive import akka.actor.{ ActorRef, Props } import akka.event.LoggingReceive import akka.stream.actor.ActorSubscriberMessage.{ OnComplete, OnError, OnNext } import akka.stream.actor.{ ActorSubscriber, OneByOneRequestStrategy, RequestStrategy } import akka.stream.scaladsl.{ Sink, Source } import akka.stream.testkit.TestPublisher import akka.stream.testkit.scaladsl.TestSource import akka.testkit.TestProbe import com.github.dnvriend.streams.TestSpec import com.github.dnvriend.streams.sink.ActorSubscriberTest.TestActorSubscriber import scala.concurrent.Future import scala.reflect.ClassTag object ActorSubscriberTest { final val OnNextMessage = "onNext" final val OnCompleteMessage = "onComplete" final val OnErrorMessage = "onError" class TestActorSubscriber(ref: ActorRef) extends ActorSubscriber { override protected val requestStrategy: RequestStrategy = OneByOneRequestStrategy override def receive: Receive = LoggingReceive { case OnNext(msg) ⇒ ref ! OnNextMessage case OnComplete ⇒ ref ! OnCompleteMessage case OnError(cause) ⇒ ref ! OnErrorMessage } } } //class ActorSubscriberTest extends TestSpec { // def withForwarder(xs: Int*)(f: TestProbe ⇒ Unit): Unit = { // val tp = TestProbe() // val ref = new TestActorSubscriber(tp.ref) // Source(xs.toList).to(Sink.actorSubscriber(Props())).mapMaterializedValue(_ ⇒ Future.successful[Done]).run() // try f(tp) finally killActors(ref) // } // //}
Example 143
Source File: AmqpSubscriberPerfSpec.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.amqp import akka.Done import akka.actor.{Actor, ActorSystem, Props} import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, HttpResponse} import akka.pattern._ import akka.stream.ActorMaterializer import akka.testkit.{TestKit, TestProbe} import dispatch.url import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Ignore} import rhttpc.transport.{Deserializer, InboundQueueData, OutboundQueueData, Serializer} import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import scala.util.{Random, Try} @Ignore class AmqpSubscriberPerfSpec extends TestKit(ActorSystem("AmqpSubscriberPerfSpec")) with FlatSpecLike with BeforeAndAfterAll { import system.dispatcher implicit val materializer = ActorMaterializer() implicit def serializer[Msg] = new Serializer[Msg] { override def serialize(obj: Msg): String = obj.toString } implicit def deserializer[Msg] = new Deserializer[Msg] { override def deserialize(value: String): Try[Msg] = Try(value.asInstanceOf[Msg]) } val queueName = "request" val outboundQueueData = OutboundQueueData(queueName, autoDelete = true, durability = false) val inboundQueueData = InboundQueueData(queueName, batchSize = 10, parallelConsumers = 10, autoDelete = true, durability = false) val count = 100 private val interface = "localhost" private val port = 8081 def handle(request: HttpRequest) = { val delay = 5 + Random.nextInt(10) after(delay.seconds, system.scheduler)(Future.successful(HttpResponse())) } it should "have a good throughput" in { val bound = Await.result( Http().bindAndHandleAsync( handle, interface, port ), 5.seconds ) val http = dispatch.Http() // .configure(_.setMaxConnections(count) // .setExecutorService(Executors.newFixedThreadPool(count))) val connection = Await.result(AmqpConnectionFactory.connect(system), 5 seconds) val transport = AmqpTransport( connection = connection ) val publisher = transport.publisher[String](outboundQueueData) val probe = TestProbe() val actor = system.actorOf(Props(new Actor { override def receive: Receive = { case str: String => http(url(s"http://$interface:$port") OK identity).map(_ => Done).pipeTo(self)(sender()) case Done => probe.ref ! Done sender() ! Done } })) val subscriber = transport.subscriber[String](inboundQueueData, actor) subscriber.start() try { measureMeanThroughput(count) { (1 to count).foreach { _ => publisher.publish("x") } probe.receiveWhile(10 minutes, messages = count) { case a => a } } } finally { Await.result(subscriber.stop(), 5.seconds) connection.close(5 * 1000) Await.result(bound.unbind(), 5.seconds) } } def measureMeanThroughput(count: Int)(consume: => Unit) = { val before = System.currentTimeMillis() consume val msgsPerSecond = count / ((System.currentTimeMillis() - before).toDouble / 1000) println(s"Throughput was: $msgsPerSecond msgs/sec") } override protected def afterAll(): Unit = { shutdown() } }
Example 144
Source File: InMemTransportSpec.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.inmem import akka.actor.{ActorSystem, Status} import akka.testkit.{TestKit, TestProbe} import org.scalatest._ import rhttpc.transport.PubSubTransport import scala.concurrent.Await import scala.concurrent.duration._ class InMemTransportSpec extends TestKit(ActorSystem("InMemTransportSpec")) with fixture.FlatSpecLike with BeforeAndAfterAll { import rhttpc.transport.dumb._ val someQueueName = "fooQueue" val someMessage = "fooMessage" val someMessage2 = "fooMessage2" it should "delivery message to consumer subscribed before publishing" in { transport => val probe = TestProbe() val subscriber = transport.subscriber[String](someQueueName, probe.testActor) subscriber.start() val publisher = transport.publisher[String](someQueueName) publisher.publish(someMessage) probe.expectMsg(someMessage) probe.reply(Unit) } it should "delivery message to consumer subscribed after publishing" in { transport => val probe = TestProbe() val publisher = transport.publisher[String](someQueueName) val subscriber = transport.subscriber[String](someQueueName, probe.testActor) subscriber.start() publisher.publish(someMessage) probe.expectMsg(someMessage) probe.reply(Unit) } it should "delivery message to consumer started after publishing" in { transport => val probe = TestProbe() val publisher = transport.publisher[String](someQueueName) val subscriber = transport.subscriber[String](someQueueName, probe.testActor) publisher.publish(someMessage) subscriber.start() probe.expectMsg(someMessage) probe.reply(Unit) } it should "delivery message to multiple consumers" in { transport => val probe1 = TestProbe() val subscriber = transport.subscriber[String](someQueueName, probe1.testActor) subscriber.start() val probe2 = TestProbe() val subscriber2 = transport.subscriber[String](someQueueName, probe2.testActor) subscriber2.start() val publisher = transport.publisher[String](someQueueName) publisher.publish(someMessage) publisher.publish(someMessage2) probe1.expectMsg(someMessage) probe1.reply(Unit) probe2.expectMsg(someMessage2) probe2.reply(Unit) } it should "retry message if failure" in { transport => val probe = TestProbe() val subscriber = transport.subscriber[String](someQueueName, probe.testActor) subscriber.start() val publisher = transport.publisher[String](someQueueName) publisher.publish(someMessage) probe.expectMsg(someMessage) probe.reply(Status.Failure(new Exception("failure"))) probe.expectMsg(someMessage) probe.reply(Unit) } override type FixtureParam = PubSubTransport override protected def withFixture(test: OneArgTest): Outcome = { val transport = InMemTransport(retryDelay = 0.seconds) try { test(transport) } finally { Await.result(transport.stop(), InMemDefaults.stopTimeout) } } override protected def afterAll(): Unit = { shutdown() } }
Example 145
Source File: MessageDispatcherActorSpec.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.client.subscription import java.util.UUID import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.scalatest._ import rhttpc.client.protocol.{Correlated, SuccessExchange} class MessageDispatcherActorSpec extends TestKit(ActorSystem("MessageDispatcherActorSpec")) with ImplicitSender with FlatSpecLike with Matchers { it should "ack after promise -> confirm -> reply -> consumed" in { val actor = system.actorOf(Props[MessageDispatcherActor]) val sub = SubscriptionOnResponse(UUID.randomUUID().toString) actor ! RegisterSubscriptionPromise(sub) val replyMock = TestProbe() actor ! ConfirmOrRegisterSubscription(sub, replyMock.ref) val ackProbe = TestProbe() ackProbe.send(actor, Correlated(SuccessExchange("fooReq", "foo"), sub.correlationId)) replyMock.expectMsg(MessageFromSubscription("foo", sub)) ackProbe.expectNoMsg() replyMock.reply("ok") ackProbe.expectMsg("ok") () } it should "ack after promise -> reply -> confirm -> consumed" in { val actor = system.actorOf(Props[MessageDispatcherActor]) val sub = SubscriptionOnResponse(UUID.randomUUID().toString) actor ! RegisterSubscriptionPromise(sub) val ackProbe = TestProbe() ackProbe.send(actor, Correlated(SuccessExchange("fooReq", "foo"), sub.correlationId)) val replyMock = TestProbe() actor ! ConfirmOrRegisterSubscription(sub, replyMock.ref) replyMock.expectMsg(MessageFromSubscription("foo", sub)) ackProbe.expectNoMsg() replyMock.reply("ok") ackProbe.expectMsg("ok") () } }
Example 146
Source File: Step4_SecondaryPersistenceSpec.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.testkit.TestKit import akka.testkit.ImplicitSender import org.scalatest.BeforeAndAfterAll import org.scalatest.Matchers import org.scalatest.FunSuiteLike import akka.actor.ActorSystem import akka.testkit.TestProbe import scala.concurrent.duration._ import Arbiter._ import Persistence._ import org.scalactic.ConversionCheckedTripleEquals class Step4_SecondaryPersistenceSpec extends TestKit(ActorSystem("Step4SecondaryPersistenceSpec")) with FunSuiteLike with BeforeAndAfterAll with Matchers with ConversionCheckedTripleEquals with ImplicitSender with Tools { override def afterAll(): Unit = { system.shutdown() } test("case1: Secondary should not acknowledge snapshots until persisted") { import Replicator._ val arbiter = TestProbe() val persistence = TestProbe() val replicator = TestProbe() val secondary = system.actorOf(Replica.props(arbiter.ref, probeProps(persistence)), "case1-secondary") val client = session(secondary) arbiter.expectMsg(Join) arbiter.send(secondary, JoinedSecondary) client.get("k1") should ===(None) replicator.send(secondary, Snapshot("k1", Some("v1"), 0L)) val persistId = persistence.expectMsgPF() { case Persist("k1", Some("v1"), id) => id } withClue("secondary replica should already serve the received update while waiting for persistence: ") { client.get("k1") should ===(Some("v1")) } replicator.expectNoMsg(500.milliseconds) persistence.reply(Persisted("k1", persistId)) replicator.expectMsg(SnapshotAck("k1", 0L)) client.get("k1") should ===(Some("v1")) } test("case2: Secondary should retry persistence in every 100 milliseconds") { import Replicator._ val arbiter = TestProbe() val persistence = TestProbe() val replicator = TestProbe() val secondary = system.actorOf(Replica.props(arbiter.ref, probeProps(persistence)), "case2-secondary") val client = session(secondary) arbiter.expectMsg(Join) arbiter.send(secondary, JoinedSecondary) client.get("k1") should ===(None) replicator.send(secondary, Snapshot("k1", Some("v1"), 0L)) val persistId = persistence.expectMsgPF() { case Persist("k1", Some("v1"), id) => id } withClue("secondary replica should already serve the received update while waiting for persistence: ") { client.get("k1") should ===(Some("v1")) } // Persistence should be retried persistence.expectMsg(200.milliseconds, Persist("k1", Some("v1"), persistId)) persistence.expectMsg(200.milliseconds, Persist("k1", Some("v1"), persistId)) replicator.expectNoMsg(500.milliseconds) persistence.reply(Persisted("k1", persistId)) replicator.expectMsg(SnapshotAck("k1", 0L)) client.get("k1") should ===(Some("v1")) } }
Example 147
Source File: Tools.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.actor.ActorSystem import scala.concurrent.duration.FiniteDuration import akka.testkit.TestProbe import akka.actor.{ ActorRef, Actor } import org.scalatest.Matchers import org.scalatest.FunSuiteLike import akka.actor.Props import akka.testkit.TestKit import akka.testkit.ImplicitSender import scala.concurrent.duration._ object Tools { class TestRefWrappingActor(val probe: TestProbe) extends Actor { def receive = { case msg => probe.ref forward msg } } } trait Tools { this: TestKit with FunSuiteLike with Matchers with ImplicitSender => import Arbiter._ import Tools._ def probeProps(probe: TestProbe): Props = Props(classOf[TestRefWrappingActor], probe) class Session(val probe: TestProbe, val replica: ActorRef) { import Replica._ @volatile private var seq = 0L private def nextSeq: Long = { val next = seq seq += 1 next } @volatile private var referenceMap = Map.empty[String, String] def waitAck(s: Long): Unit = probe.expectMsg(OperationAck(s)) def waitFailed(s: Long): Unit = probe.expectMsg(OperationFailed(s)) def set(key: String, value: String): Long = { referenceMap += key -> value val s = nextSeq probe.send(replica, Insert(key, value, s)) s } def setAcked(key: String, value: String): Unit = waitAck(set(key, value)) def remove(key: String): Long = { referenceMap -= key val s = nextSeq probe.send(replica, Remove(key, s)) s } def removeAcked(key: String): Unit = waitAck(remove(key)) def getAndVerify(key: String): Unit = { val s = nextSeq probe.send(replica, Get(key, s)) probe.expectMsg(GetResult(key, referenceMap.get(key), s)) } def get(key: String): Option[String] = { val s = nextSeq probe.send(replica, Get(key, s)) probe.expectMsgType[GetResult].valueOption } def nothingHappens(duration: FiniteDuration): Unit = probe.expectNoMsg(duration) } def session(replica: ActorRef)(implicit system: ActorSystem) = new Session(TestProbe(), replica) }
Example 148
Source File: IntegrationSpec.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.actor.{ Actor, Props, ActorRef, ActorSystem } import akka.testkit.{ TestProbe, ImplicitSender, TestKit } import org.scalatest.{ BeforeAndAfterAll, FlatSpec, Matchers } import scala.concurrent.duration._ import org.scalatest.FunSuiteLike import org.scalactic.ConversionCheckedTripleEquals class IntegrationSpec(_system: ActorSystem) extends TestKit(_system) with FunSuiteLike with Matchers with BeforeAndAfterAll with ConversionCheckedTripleEquals with ImplicitSender with Tools { import Replica._ import Replicator._ import Arbiter._ def this() = this(ActorSystem("ReplicatorSpec")) override def afterAll: Unit = system.shutdown() }
Example 149
Source File: Step6_NewSecondarySpec.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.testkit.TestKit import akka.testkit.ImplicitSender import org.scalatest.BeforeAndAfterAll import org.scalatest.Matchers import org.scalatest.FunSuiteLike import akka.actor.ActorSystem import akka.testkit.TestProbe import Arbiter._ import Replicator._ import org.scalactic.ConversionCheckedTripleEquals class Step6_NewSecondarySpec extends TestKit(ActorSystem("Step6NewSecondarySpec")) with FunSuiteLike with BeforeAndAfterAll with Matchers with ConversionCheckedTripleEquals with ImplicitSender with Tools { override def afterAll(): Unit = { system.shutdown() } test("case1: Primary must start replication to new replicas") { val arbiter = TestProbe() val primary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case1-primary") val user = session(primary) val secondary = TestProbe() arbiter.expectMsg(Join) arbiter.send(primary, JoinedPrimary) user.setAcked("k1", "v1") arbiter.send(primary, Replicas(Set(primary, secondary.ref))) secondary.expectMsg(Snapshot("k1", Some("v1"), 0L)) secondary.reply(SnapshotAck("k1", 0L)) val ack1 = user.set("k1", "v2") secondary.expectMsg(Snapshot("k1", Some("v2"), 1L)) secondary.reply(SnapshotAck("k1", 1L)) user.waitAck(ack1) val ack2 = user.remove("k1") secondary.expectMsg(Snapshot("k1", None, 2L)) secondary.reply(SnapshotAck("k1", 2L)) user.waitAck(ack2) } test("case2: Primary must stop replication to removed replicas and stop Replicator") { val arbiter = TestProbe() val primary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case2-primary") val user = session(primary) val secondary = TestProbe() arbiter.expectMsg(Join) arbiter.send(primary, JoinedPrimary) arbiter.send(primary, Replicas(Set(primary, secondary.ref))) val ack1 = user.set("k1", "v1") secondary.expectMsg(Snapshot("k1", Some("v1"), 0L)) val replicator = secondary.lastSender secondary.reply(SnapshotAck("k1", 0L)) user.waitAck(ack1) watch(replicator) arbiter.send(primary, Replicas(Set(primary))) expectTerminated(replicator) } test("case3: Primary must stop replication to removed replicas and waive their outstanding acknowledgements") { val arbiter = TestProbe() val primary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case3-primary") val user = session(primary) val secondary = TestProbe() arbiter.expectMsg(Join) arbiter.send(primary, JoinedPrimary) arbiter.send(primary, Replicas(Set(primary, secondary.ref))) val ack1 = user.set("k1", "v1") secondary.expectMsg(Snapshot("k1", Some("v1"), 0L)) secondary.reply(SnapshotAck("k1", 0L)) user.waitAck(ack1) val ack2 = user.set("k1", "v2") secondary.expectMsg(Snapshot("k1", Some("v2"), 1L)) arbiter.send(primary, Replicas(Set(primary))) user.waitAck(ack2) } }
Example 150
Source File: Step1_PrimarySpec.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.testkit.TestKit import akka.actor.ActorSystem import org.scalatest.FunSuiteLike import org.scalatest.BeforeAndAfterAll import org.scalatest.Matchers import akka.testkit.ImplicitSender import akka.testkit.TestProbe import scala.concurrent.duration._ import kvstore.Persistence.{ Persisted, Persist } import kvstore.Replica.OperationFailed import kvstore.Replicator.{ Snapshot } import scala.util.Random import scala.util.control.NonFatal import org.scalactic.ConversionCheckedTripleEquals class Step1_PrimarySpec extends TestKit(ActorSystem("Step1PrimarySpec")) with FunSuiteLike with BeforeAndAfterAll with Matchers with ConversionCheckedTripleEquals with ImplicitSender with Tools { override def afterAll(): Unit = { system.shutdown() } import Arbiter._ test("case1: Primary (in isolation) should properly register itself to the provided Arbiter") { val arbiter = TestProbe() system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case1-primary") arbiter.expectMsg(Join) } test("case2: Primary (in isolation) should react properly to Insert, Remove, Get") { val arbiter = TestProbe() val primary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case2-primary") val client = session(primary) arbiter.expectMsg(Join) arbiter.send(primary, JoinedPrimary) client.getAndVerify("k1") client.setAcked("k1", "v1") client.getAndVerify("k1") client.getAndVerify("k2") client.setAcked("k2", "v2") client.getAndVerify("k2") client.removeAcked("k1") client.getAndVerify("k1") } }
Example 151
Source File: Step3_ReplicatorSpec.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.testkit.{ TestProbe, TestKit, ImplicitSender } import org.scalatest.BeforeAndAfterAll import org.scalatest.Matchers import org.scalatest.FunSuiteLike import akka.actor.ActorSystem import scala.concurrent.duration._ import kvstore.Arbiter.{ JoinedSecondary, Join } import kvstore.Persistence.{ Persisted, Persist } import kvstore.Replicator.{ SnapshotAck, Snapshot, Replicate } import org.scalactic.ConversionCheckedTripleEquals class Step3_ReplicatorSpec extends TestKit(ActorSystem("Step3ReplicatorSpec")) with FunSuiteLike with BeforeAndAfterAll with Matchers with ConversionCheckedTripleEquals with ImplicitSender with Tools { override def afterAll(): Unit = { system.shutdown() } test("case1: Replicator should send snapshots when asked to replicate") { val secondary = TestProbe() val replicator = system.actorOf(Replicator.props(secondary.ref), "case1-replicator") replicator ! Replicate("k1", Some("v1"), 0L) secondary.expectMsg(Snapshot("k1", Some("v1"), 0L)) secondary.ignoreMsg({ case Snapshot(_, _, 0L) => true }) secondary.reply(SnapshotAck("k1", 0L)) replicator ! Replicate("k1", Some("v2"), 1L) secondary.expectMsg(Snapshot("k1", Some("v2"), 1L)) secondary.ignoreMsg({ case Snapshot(_, _, 1L) => true }) secondary.reply(SnapshotAck("k1", 1L)) replicator ! Replicate("k2", Some("v1"), 2L) secondary.expectMsg(Snapshot("k2", Some("v1"), 2L)) secondary.ignoreMsg({ case Snapshot(_, _, 2L) => true }) secondary.reply(SnapshotAck("k2", 2L)) replicator ! Replicate("k1", None, 3L) secondary.expectMsg(Snapshot("k1", None, 3L)) secondary.reply(SnapshotAck("k1", 3L)) } test("case2: Replicator should retry until acknowledged by secondary") { val secondary = TestProbe() val replicator = system.actorOf(Replicator.props(secondary.ref), "case2-replicator") replicator ! Replicate("k1", Some("v1"), 0L) secondary.expectMsg(Snapshot("k1", Some("v1"), 0L)) secondary.expectMsg(300.milliseconds, Snapshot("k1", Some("v1"), 0L)) secondary.expectMsg(300.milliseconds, Snapshot("k1", Some("v1"), 0L)) secondary.reply(SnapshotAck("k1", 0L)) } }
Example 152
Source File: BlackListTests.scala From EncryCore with GNU General Public License v3.0 | 5 votes |
package encry.network import java.net.{InetAddress, InetSocketAddress} import akka.actor.ActorSystem import akka.testkit.{TestActorRef, TestProbe} import encry.modifiers.InstanceFactory import encry.network.BlackList.BanReason._ import encry.network.PeerConnectionHandler.{ConnectedPeer, Outgoing} import encry.network.PeerConnectionHandler.ReceivableMessages.CloseConnection import encry.network.PeersKeeper.BanPeer import encry.settings.TestNetSettings import org.encryfoundation.common.network.BasicMessagesRepo.Handshake import org.scalatest.{BeforeAndAfterAll, Matchers, OneInstancePerTest, WordSpecLike} import scala.concurrent.duration._ class BlackListTests extends WordSpecLike with Matchers with BeforeAndAfterAll with InstanceFactory with OneInstancePerTest with TestNetSettings { implicit val system: ActorSystem = ActorSystem() override def afterAll(): Unit = system.terminate() val knowPeersSettings = testNetSettings.copy( network = settings.network.copy( knownPeers = List(new InetSocketAddress("172.16.11.11", 9001)), connectOnlyWithKnownPeers = Some(true) ), blackList = settings.blackList.copy( banTime = 2 seconds, cleanupTime = 3 seconds )) "Peers keeper" should { "handle ban peer message correctly" in { val peersKeeper: TestActorRef[PeersKeeper] = TestActorRef[PeersKeeper](PeersKeeper.props(knowPeersSettings, TestProbe().ref, TestProbe().ref)) val address: InetSocketAddress = new InetSocketAddress("0.0.0.0", 9000) val peerHandler: TestProbe = TestProbe() val connectedPeer: ConnectedPeer = ConnectedPeer( address, peerHandler.ref, Outgoing, Handshake(protocolToBytes(knowPeersSettings.network.appVersion), "test node", Some(address), System.currentTimeMillis()) ) peersKeeper ! BanPeer(connectedPeer, SpamSender) peerHandler.expectMsg(CloseConnection) peersKeeper.underlyingActor.blackList.contains(address.getAddress) shouldBe true } "cleanup black list by scheduler correctly" in { val peersKeeper: TestActorRef[PeersKeeper] = TestActorRef[PeersKeeper](PeersKeeper.props(knowPeersSettings, TestProbe().ref, TestProbe().ref)) val address: InetSocketAddress = new InetSocketAddress("0.0.0.0", 9000) val peerHandler: TestProbe = TestProbe() val connectedPeer: ConnectedPeer = ConnectedPeer( address, peerHandler.ref, Outgoing, Handshake(protocolToBytes(knowPeersSettings.network.appVersion), "test node", Some(address), System.currentTimeMillis()) ) peersKeeper ! BanPeer(connectedPeer, SentPeersMessageWithoutRequest) Thread.sleep(6000) peersKeeper.underlyingActor.blackList.contains(address.getAddress) shouldBe false } "don't remove peer from black list before ban time expired" in { val peersKeeper: TestActorRef[PeersKeeper] = TestActorRef[PeersKeeper](PeersKeeper.props(knowPeersSettings, TestProbe().ref, TestProbe().ref)) val address: InetSocketAddress = new InetSocketAddress("0.0.0.0", 9000) val peerHandler: TestProbe = TestProbe() val connectedPeer: ConnectedPeer = ConnectedPeer( address, peerHandler.ref, Outgoing, Handshake(protocolToBytes(knowPeersSettings.network.appVersion), "test node", Some(address), System.currentTimeMillis()) ) Thread.sleep(4000) peersKeeper ! BanPeer(connectedPeer, CorruptedSerializedBytes) Thread.sleep(2000) peersKeeper.underlyingActor.blackList.contains(address.getAddress) shouldBe true } } }
Example 153
Source File: DMUtils.scala From EncryCore with GNU General Public License v3.0 | 5 votes |
package encry.network.DeliveryManagerTests import java.net.InetSocketAddress import akka.actor.ActorSystem import akka.testkit.{TestActorRef, TestProbe} import encry.local.miner.Miner.{DisableMining, StartMining} import encry.modifiers.InstanceFactory import encry.network.DeliveryManager import encry.network.DeliveryManager.FullBlockChainIsSynced import encry.network.NodeViewSynchronizer.ReceivableMessages.UpdatedHistory import encry.network.PeerConnectionHandler.{ConnectedPeer, Incoming} import encry.settings.EncryAppSettings import encry.view.history.History import org.encryfoundation.common.modifiers.history.Block import org.encryfoundation.common.network.BasicMessagesRepo.Handshake import org.encryfoundation.common.utils.TaggedTypes.ModifierId import scala.collection.mutable import scala.collection.mutable.WrappedArray object DMUtils extends InstanceFactory { def initialiseDeliveryManager(isBlockChainSynced: Boolean, isMining: Boolean, settings: EncryAppSettings) (implicit actorSystem: ActorSystem): (TestActorRef[DeliveryManager], History) = { val history: History = generateDummyHistory(settings) val deliveryManager: TestActorRef[DeliveryManager] = TestActorRef[DeliveryManager](DeliveryManager .props(None, TestProbe().ref, TestProbe().ref, TestProbe().ref, TestProbe().ref, TestProbe().ref, settings)) deliveryManager ! UpdatedHistory(history) if (isMining) deliveryManager ! StartMining else deliveryManager ! DisableMining if (isBlockChainSynced) deliveryManager ! FullBlockChainIsSynced (deliveryManager, history) } def generateBlocks(qty: Int, history: History): (History, List[Block]) = (0 until qty).foldLeft(history, List.empty[Block]) { case ((prevHistory, blocks), _) => val block: Block = generateNextBlock(prevHistory) prevHistory.append(block.header) prevHistory.append(block.payload) val a = prevHistory.reportModifierIsValid(block) (a, blocks :+ block) } def toKey(id: ModifierId): WrappedArray.ofByte = new mutable.WrappedArray.ofByte(id) def createPeer(port: Int, host: String, settings: EncryAppSettings)(implicit system: ActorSystem): (InetSocketAddress, ConnectedPeer) = { val address = new InetSocketAddress(host, port) val peer: ConnectedPeer = ConnectedPeer(address, TestProbe().ref, Incoming, Handshake(protocolToBytes(settings.network.appVersion), host, Some(address), System.currentTimeMillis())) (address, peer) } }
Example 154
Source File: MemoryPoolTests.scala From EncryCore with GNU General Public License v3.0 | 5 votes |
package encry.view.mempool import akka.actor.ActorSystem import akka.testkit.{ TestActorRef, TestProbe } import com.typesafe.scalalogging.StrictLogging import encry.modifiers.InstanceFactory import encry.settings.{ EncryAppSettings, TestNetSettings } import encry.utils.NetworkTimeProvider import encry.view.mempool.MemoryPool.{ NewTransaction, TransactionsForMiner } import org.scalatest.{ BeforeAndAfterAll, Matchers, OneInstancePerTest, WordSpecLike } import scala.concurrent.duration._ class MemoryPoolTests extends WordSpecLike with Matchers with InstanceFactory with BeforeAndAfterAll with OneInstancePerTest with TestNetSettings with StrictLogging { implicit val system: ActorSystem = ActorSystem() override def afterAll(): Unit = system.terminate() val timeProvider: NetworkTimeProvider = new NetworkTimeProvider(testNetSettings.ntp) "MemoryPool" should { "add new unique transactions" in { val mempool = MemoryPoolStorage.empty(testNetSettings, timeProvider) val transactions = genValidPaymentTxs(10) val (newMempool, validTxs) = mempool.validateTransactions(transactions) newMempool.size shouldBe 10 validTxs.map(_.encodedId).forall(transactions.map(_.encodedId).contains) shouldBe true } "reject not unique transactions" in { val mempool = MemoryPoolStorage.empty(testNetSettings, timeProvider) val transactions = genValidPaymentTxs(10) val (newMempool, validTxs) = mempool.validateTransactions(transactions) val (newMempoolAgain, validTxsAgain) = newMempool.validateTransactions(validTxs) newMempoolAgain.size shouldBe 10 validTxsAgain.size shouldBe 0 } "mempoolMaxCapacity works correct" in { val mempool = MemoryPoolStorage.empty(testNetSettings, timeProvider) val transactions = genValidPaymentTxs(11) val (newMempool, validTxs) = mempool.validateTransactions(transactions) newMempool.size shouldBe 10 validTxs.size shouldBe 10 } "getTransactionsForMiner works fine" in { val mempool = MemoryPoolStorage.empty(testNetSettings, timeProvider) val transactions = (0 until 10).map(k => coinbaseAt(k)) val (newMempool, _) = mempool.validateTransactions(transactions) val (uPool, txs) = newMempool.getTransactionsForMiner uPool.size shouldBe 0 txs.map(_.encodedId).forall(transactions.map(_.encodedId).contains) shouldBe true transactions.map(_.encodedId).forall(txs.map(_.encodedId).contains) shouldBe true } } "Mempool actor" should { "send transactions to miner" in { val miner1 = TestProbe() val mempool1: TestActorRef[MemoryPool] = TestActorRef[MemoryPool](MemoryPool.props(testNetSettings, timeProvider, miner1.ref, Some(TestProbe().ref))) val transactions1 = (0 until 4).map { k => val a = coinbaseAt(k) a } transactions1.foreach(mempool1 ! NewTransaction(_)) mempool1.underlyingActor.memoryPool.size shouldBe 4 logger.info(s"generated: ${transactions1.map(_.encodedId)}") miner1.expectMsg(20.seconds, TransactionsForMiner(transactions1)) } } }
Example 155
Source File: ProcessTest.scala From process with Apache License 2.0 | 5 votes |
package processframework import java.lang import akka.actor.{ ActorContext, ActorRef, ActorSystem, Props } import akka.testkit.{ ImplicitSender, TestKit, TestProbe } import org.scalatest._ import org.scalatest.concurrent.Eventually import scala.concurrent.duration._ object ProcessTest { case object Start case object Response case class Command(i: Int) case object Completed extends Process.Event class MockStep(service: ActorRef, retryInt: Duration)(implicit val context: ActorContext) extends ProcessStep[Int] { override val retryInterval = retryInt def execute()(implicit process: akka.actor.ActorRef) = { state ⇒ service ! Command(state) } def receiveCommand = { case Response ⇒ Completed } def updateState = { case Completed ⇒ state ⇒ markDone(state + 1) } } class Process1(service: ActorRef, retryInterval: Duration) extends Process[Int] { import context.dispatcher var state = 0 val process = new MockStep(service, retryInterval) def receive = { case Start ⇒ process.run() } } } class ProcessTest extends BaseSpec { import ProcessTest._ "Process" should { "have a happy flow" in { val service = TestProbe() val process = system.actorOf(Props(new Process1(service.ref, Duration.Inf)), "Process1") process ! processframework.Process.GetState expectMsg(0) process ! Start service.expectMsg(Command(0)) service.reply(Response) eventually { process ! processframework.Process.GetState expectMsg(1) } process ! Start expectNoMsg(250 millis) process ! processframework.Process.GetState expectMsg(1) } "does not retry by default" in { val service = TestProbe() val process = system.actorOf(Props(new Process1(service.ref, Duration.Inf)), "Process2") process ! processframework.Process.GetState expectMsg(0) process ! Start service.expectMsg(Command(0)) expectNoMsg() } "retries execution until succeeded" in { val service = TestProbe() val process = system.actorOf(Props(new Process1(service.ref, 150 millis)), "Process3") process ! processframework.Process.GetState expectMsg(0) process ! Start service.expectMsg(Command(0)) service.expectMsg(1000.millis, Command(0)) service.expectMsg(1000.millis, Command(0)) service.reply(Response) expectNoMsg() } } }
Example 156
Source File: EmptyStepSpec.scala From process with Apache License 2.0 | 5 votes |
package processframework import akka.actor.ActorContext import akka.testkit.TestProbe import scala.reflect.ClassTag case class NoState() class EmptyStepSpec extends BaseSpec with ProcessStepTestSupport[NoState, EmptyStep[NoState]] { def createTestProbe(): TestProbe = TestProbe() def createProcessStep(executeProbe: TestProbe)(implicit context: ActorContext): EmptyStep[NoState] = EmptyStep[NoState]() "EmptyStep" should { "be completed directly" in { val emptyStep = processStep() emptyStep.isCompleted shouldBe true } "be executable" in { val emptyStep = processStep() emptyStep.execute() } "give a MatchError when receiving a command" in { val emptyStep = processStep() intercept[MatchError] { emptyStep.receiveCommand(ProcessStepTestSupport.ACommand) } intercept[MatchError] { emptyStep.handleReceiveCommand(ProcessStepTestSupport.ACommand) } } "give a MatchError when updating state for an event" in { val emptyStep = processStep() intercept[MatchError] { emptyStep.updateState(ProcessStepTestSupport.AnEvent) } intercept[MatchError] { emptyStep.handleUpdateState(ProcessStepTestSupport.AnEvent) } } } }
Example 157
Source File: ProcessStepTestSupport.scala From process with Apache License 2.0 | 5 votes |
package processframework import akka.pattern.ask import akka.actor.{ ActorRef, ActorContext, Actor, Props } import akka.util.Timeout import scala.concurrent.duration._ import scala.concurrent.Await import scala.reflect.ClassTag import akka.testkit.{ TestProbe, TestKit } import org.scalatest.BeforeAndAfterEach object ProcessStepTestSupport { case object GetStep case object ACommand case object AnEvent extends Process.Event } trait ProcessStepTestSupport[S, PS <: ProcessStep[S]] { this: TestKit with BeforeAndAfterEach ⇒ implicit val timeout: Timeout = 1 second var testProbe: TestProbe = null var processActor: ActorRef = null override protected def beforeEach(): Unit = { testProbe = createTestProbe() processActor = createProcessActor() } def createTestProbe(): TestProbe def createProcessStep(executeProbe: TestProbe)(implicit context: ActorContext): PS def createProcessActor() = system.actorOf(Props(new Actor { val step = createProcessStep(testProbe) def receive = { case msg if sender() == step ⇒ testActor forward msg case ProcessStepTestSupport.GetStep ⇒ sender() ! step case e: Process.Event ⇒ testActor ! e } })) def processStep()(implicit classTag: ClassTag[PS]): PS = Await.result[PS]((processActor ? ProcessStepTestSupport.GetStep).mapTo[PS], 2 seconds) }
Example 158
Source File: BasicServiceTest.scala From wookiee with Apache License 2.0 | 5 votes |
package com.oracle.infy.qa import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory import com.webtrends.harness.service.messages.GetMetaDetails import com.webtrends.harness.service.meta.ServiceMetaDetails import com.webtrends.harness.service.test.{BaseWookieeScalaTest, TestHarness} class BasicServiceTest extends BaseWookieeScalaTest { override def config = ConfigFactory.empty() override def servicesMap = Some(Map("base" -> classOf[BasicService])) "BasicService" should { "start itself up" in { val probe = TestProbe() val testService = TestHarness.harness.get.getService("base") assert(testService.isDefined, "Basic Service was not registered") probe.send(testService.get, GetMetaDetails) ServiceMetaDetails(false) mustEqual probe.expectMsg(ServiceMetaDetails(false)) } } }
Example 159
Source File: LoggerSpec.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness.logging import akka.actor.ActorSystem import akka.testkit.{TestKit, TestProbe} import ch.qos.logback.classic.Level import com.webtrends.harness.TestKitSpecificationWithJUnit import org.slf4j.LoggerFactory class LoggerSpec extends TestKitSpecificationWithJUnit(ActorSystem("harness")) with LoggingAdapter { val probe = new TestProbe(system) val appender = setupAppender() sequential "logging" should { "allow for logging that is received by a mediator actor using Scala string interpolation" in { Logger.registerMediator(probe.ref) val logger = Logger("test") val x = 0 logger.trace(s"testing ${x}123...") val msg = Trace(LoggerFactory getLogger "test", "testing 0123...", None, None, Nil, None) Logger.unregisterMediator(probe.ref) probe.expectMsgClass(classOf[Trace]) must be equalTo msg } "allow for logging that is received by a mediator actor using Java string interpolation" in { Logger.registerMediator(probe.ref) val logger = Logger("test") logger.debug("testing {}123...", 0) val msg = Debug(LoggerFactory getLogger "test", "testing {}123...", None, None, Seq(0), None) Logger.unregisterMediator(probe.ref) probe.expectMsgClass(classOf[Debug]) must be equalTo msg } "allow for logging that is handle directly by the underlying logging framework using Scala string interpolation" in { val logger = Logger("test") val x = 0 logger.info(s"testing ${x}123...") appender.lastMessage.get must be equalTo "testing 0123..." } "allow for logging that is handle directly by the underlying logging framework using Java string interpolation" in { val logger = Logger("test") logger.warn("testing {}123...", 0) appender.lastMessage.get must be equalTo "testing 0123..." } "allow for logging that is handle directly by the underlying logging framework using Scala string interpolation and handles a Throwable" in { val logger = Logger("test") logger.error("testing {}123...", 0) appender.lastMessage.get must be equalTo "testing 0123..." } "don't log if try succeeds" in { val logger = Logger("test") logger.error("testing {}123...", 0) tryAndLogError({ true }) appender.lastMessage.get must be equalTo "testing 0123..." } "do log if try fails" in { val logger = Logger("test") logger.error("testing {}123...", 0) tryAndLogError({ 5 / 0 }) appender.lastMessage.get must be equalTo "/ by zero" } } step { TestKit.shutdownActorSystem(system) } private def setupAppender(): TestingAppender = { val root = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger] root.setLevel(Level.ALL) val appender = new TestingAppender() appender.start() root.addAppender(appender) appender } }
Example 160
Source File: LoggingActorSpec.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness.logging import akka.actor.{ActorSystem, Props} import akka.event.Logging.{InitializeLogger, LoggerInitialized} import akka.testkit.{TestKit, TestProbe} import com.typesafe.config.ConfigFactory import com.webtrends.harness.TestKitSpecificationWithJUnit class LoggingActorSpec extends TestKitSpecificationWithJUnit(ActorSystem("test", ConfigFactory.parseString( """logging.use-actor=off"""))) { val logger = system.actorOf(Props[LoggingActor]) "Logging" should { "test logging initialization" in { val probe = TestProbe() probe.send(logger, InitializeLogger(null)) LoggerInitialized must beEqualTo(probe.expectMsg(LoggerInitialized)) } } step { TestKit.shutdownActorSystem(system) } }
Example 161
Source File: ServiceSpec.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness import akka.actor.ActorSystem import akka.testkit.{TestActorRef, TestKit, TestProbe} import com.webtrends.harness.health.{ComponentState, HealthComponent} import com.webtrends.harness.service.messages._ import com.webtrends.harness.service.meta.ServiceMetaDetails import org.specs2.mutable.SpecificationLike case class TestClass(val name: String, val value: Int) class ServiceSpec extends TestKit(ActorSystem("harness")) with SpecificationLike { val act = TestActorRef(new TestService) //val httpAct = TestActorRef(new TestHttpService) "services " should { " be able to be loaded and pinged" in { val probe = TestProbe() probe.send(act, Ping) Pong must beEqualTo(probe.expectMsg(Pong)) } " be able to be loaded and sent a ready message" in { val probe = TestProbe() probe.send(act, Ready) Ready must beEqualTo(probe.expectMsg(Ready)) } " be able to be loaded and checked" in { val probe = TestProbe() probe.send(act, CheckHealth) val comp = HealthComponent("testservice", ComponentState.NORMAL, "test") comp.addComponent(HealthComponent("childcomponent", ComponentState.DEGRADED, "test")) comp must beEqualTo(probe.expectMsg(comp)) } //todo only HttpService should be able to do this } step { TestKit.shutdownActorSystem(system) } }
Example 162
Source File: ConfigSpec.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness import java.io.{BufferedWriter, File, FileWriter} import java.util.concurrent.TimeUnit import akka.actor.{Actor, ActorSystem, Props} import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory import com.webtrends.harness.app.HarnessActor.ConfigChange import com.webtrends.harness.config.ConfigWatcherActor import com.webtrends.harness.health.{ComponentState, HealthComponent} import com.webtrends.harness.service.messages.CheckHealth import org.specs2.mutable.SpecificationWithJUnit import scala.concurrent.ExecutionContextExecutor import scala.concurrent.duration.FiniteDuration import scala.reflect.io.{Directory, Path} class ConfigSpec extends SpecificationWithJUnit { implicit val dur = FiniteDuration(2, TimeUnit.SECONDS) new File("services/test/conf").mkdirs() implicit val sys = ActorSystem("system", ConfigFactory.parseString( """ akka.actor.provider = "akka.actor.LocalActorRefProvider" services { path = "services" } """).withFallback(ConfigFactory.load)) implicit val ec: ExecutionContextExecutor = sys.dispatcher val probe = TestProbe() val parent = sys.actorOf(Props(new Actor { val child = context.actorOf(ConfigWatcherActor.props, "child") def receive = { case x if sender == child => probe.ref forward x case x => child forward x } })) sequential "config " should { "be in good health" in { probe.send(parent, CheckHealth) val msg = probe.expectMsgClass(classOf[HealthComponent]) msg.state equals ComponentState.NORMAL } "detect changes in config" in { val file = new File("services/test/conf/test.conf") val bw = new BufferedWriter(new FileWriter(file)) bw.write("test = \"value\"") bw.close() val msg = probe.expectMsgClass(classOf[ConfigChange]) msg.isInstanceOf[ConfigChange] } } step { sys.terminate().onComplete { _ => Directory(Path(new File("services"))).deleteRecursively() } } }
Example 163
Source File: IngestorsEndpointSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.ingest.http import akka.actor.Actor import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.testkit.{TestActorRef, TestKit, TestProbe} import hydra.common.util.ActorUtils import hydra.ingest.IngestorInfo import hydra.ingest.services.IngestorRegistry.{FindAll, FindByName, LookupResult} import org.joda.time.DateTime import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ class IngestorsEndpointSpec extends Matchers with AnyWordSpecLike with ScalatestRouteTest with HydraIngestJsonSupport { val ingestorsRoute = new IngestorRegistryEndpoint().route override def afterAll = { super.afterAll() TestKit.shutdownActorSystem( system, verifySystemShutdown = true, duration = 10 seconds ) } val probe = TestProbe() val ingestorInfo = IngestorInfo( ActorUtils.actorName(probe.ref), "test", probe.ref.path, DateTime.now ) val registry = TestActorRef( new Actor { override def receive = { case FindByName("tester") => sender ! LookupResult(Seq(ingestorInfo)) case FindAll => sender ! LookupResult(Seq(ingestorInfo)) } }, "ingestor_registry" ).underlyingActor "The ingestors endpoint" should { "returns all ingestors" in { Get("/ingestors") ~> ingestorsRoute ~> check { val r = responseAs[Seq[IngestorInfo]] r.size shouldBe 1 r(0).path shouldBe ingestorInfo.path r(0).group shouldBe ingestorInfo.group r(0).path shouldBe ingestorInfo.path r(0).registeredAt shouldBe ingestorInfo.registeredAt.withMillisOfSecond( 0 ) } } } }
Example 164
Source File: IngestionSocketActorSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.ingest.services import org.scalatest.matchers.should.Matchers import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.BeforeAndAfterAll import akka.actor.ActorSystem import akka.actor.Props import akka.testkit.TestProbe class IngestionSocketActorSpec extends AnyFlatSpecLike with Matchers with BeforeAndAfterAll { private implicit val system: ActorSystem = ActorSystem() override def afterAll(): Unit = { system.terminate() } private def getIngestActorRef = system.actorOf(Props[IngestionSocketActor]) it should "ack the init message in waiting state" in { val ingestActor = getIngestActorRef val probe = TestProbe() ingestActor.tell(SocketInit, probe.ref) probe.expectMsg(SocketAck) } it should "ack the init message in initialized state" in { val ingestActor = getIngestActorRef val probe = TestProbe() ingestActor ! SocketStarted(probe.ref) ingestActor.tell(SocketInit, probe.ref) probe.expectMsg(SocketAck) } private def testIngestionMessageAck(ingestionMessages: IncomingMessage*) = { it should s"ack the incoming messages of form: $ingestionMessages" in { val ingestActor = getIngestActorRef val probe = TestProbe() ingestActor ! SocketStarted(probe.ref) ingestActor.tell(SocketInit, probe.ref) probe.expectMsg(SocketAck) ingestionMessages.foreach { ingestionMessage => ingestActor.tell(ingestionMessage, probe.ref) probe.expectMsgClass(classOf[SimpleOutgoingMessage]) probe.expectMsg(SocketAck) } } } testIngestionMessageAck(IncomingMessage("-c HELP")) testIngestionMessageAck(IncomingMessage("-c SET hydra-ack = replicated")) testIngestionMessageAck(IncomingMessage("-c WHAT")) }
Example 165
Source File: RabbitIngestorSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.rabbit import akka.actor.{ActorSystem, Props} import akka.testkit.TestActors.ForwardActor import akka.testkit.{ImplicitSender, TestKit, TestProbe} import hydra.core.ingest.HydraRequest import hydra.core.protocol._ import hydra.core.transport.{AckStrategy, HydraRecord} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import scala.concurrent.duration._ class RabbitIngestorSpec extends TestKit(ActorSystem("rabbit-ingestor-spec")) with Matchers with AnyFunSpecLike with ImplicitSender with BeforeAndAfterAll { val ingestor = system.actorOf(Props[RabbitIngestor]) val probe = TestProbe() val rabbitTransport = system.actorOf(Props(new ForwardActor(probe.ref)), "rabbit_transport") override def afterAll = TestKit.shutdownActorSystem(system, verifySystemShutdown = true) describe("When using the rabbit ingestor") { it("Joins if exchange provided") { val request = HydraRequest( "123", "{'name': 'test'}", None, Map(RabbitRecord.HYDRA_RABBIT_EXCHANGE -> "test.exchange") ) ingestor ! Publish(request) expectMsg(10.seconds, Join) } it("Joins if queue provided") { val request = HydraRequest( "123", "{'name': 'test'}", None, Map(RabbitRecord.HYDRA_RABBIT_QUEUE -> "test.queue") ) ingestor ! Publish(request) expectMsg(10.seconds, Join) } it("Ignores") { val request = HydraRequest("123", "test string") ingestor ! Publish(request) expectMsg(10.seconds, Ignore) } it("transports") { ingestor ! Ingest( TestRecord("test", "test", "", AckStrategy.NoAck), AckStrategy.NoAck ) probe.expectMsg( Produce( TestRecord("test", "test", "", AckStrategy.NoAck), self, AckStrategy.NoAck ) ) } } } case class TestRecord( destination: String, payload: String, key: String, ackStrategy: AckStrategy ) extends HydraRecord[String, String]
Example 166
Source File: LoggingAdapterSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.common.logging import akka.actor.{Actor, ActorSystem} import akka.testkit.{TestActorRef, TestKit, TestProbe} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll class LoggingAdapterSpec extends TestKit(ActorSystem("test")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll { override def afterAll = TestKit.shutdownActorSystem(system) describe("The logging adapter") { it("allows an actor to use the logger") { val act = TestActorRef(new Actor with ActorLoggingAdapter { override def receive = { case _ => log.info("got it"); sender ! "got it" } }, "logger-test") act.underlyingActor.log.getName shouldBe "akka.testkit.TestActorRef" // Send a message and make sure we get a response back val probe = TestProbe() probe.send(act, "test") probe.expectMsgType[String] shouldBe "got it" } } }
Example 167
Source File: TransportOpsSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.core.ingest import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.TestActors.ForwardActor import akka.testkit.{ImplicitSender, TestKit, TestProbe} import com.pluralsight.hydra.reflect.DoNotScan import hydra.core.akka.ActorInitializationException import hydra.core.protocol.{IngestorError, Produce} import hydra.core.test.TestRecordFactory import hydra.core.transport.AckStrategy.NoAck import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import scala.concurrent.Await import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ class TransportOpsSpec extends TestKit(ActorSystem("test")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll with ImplicitSender with ScalaFutures { override def afterAll() = TestKit.shutdownActorSystem(system) val supervisor = TestProbe() val tm = TestProbe() val transport = system.actorOf(Props(new ForwardActor(tm.ref)), "test-transport") describe("TransportOps") { it("looks up a transport") { val t = system.actorOf(Props(classOf[TestTransportIngestor], supervisor.ref)) t ! "hello" expectMsg("hi!") } it("won't initialize if transport can't be found") { val t = system.actorOf(Props[TestTransportIngestorError]) t ! "hello" expectNoMessage() } it("transports a record") { val req = HydraRequest("123", "test-produce") val t = system.actorOf(Props(classOf[TestTransportIngestor], supervisor.ref)) t ! req whenReady(TestRecordFactory.build(req))(r => tm.expectMsg(Produce(r, self, NoAck)) ) } } } @DoNotScan class TestTransportIngestor(supervisor: ActorRef) extends Ingestor with TransportOps { override val recordFactory = TestRecordFactory override def initTimeout = 500 millis ingest { case "hello" => sender ! "hi!" case req: HydraRequest => val record = Await.result(TestRecordFactory.build(req), 3.seconds) transport(record, NoAck) } override def transportName = "test-transport" } class TestTransportIngestorError extends Ingestor with TransportOps { override val recordFactory = TestRecordFactory override def transportName = "test-transport-unknown" }
Example 168
Source File: TransportCallbackSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.core.transport import akka.actor.ActorSystem import akka.testkit.{ImplicitSender, TestKit, TestProbe} import hydra.core.protocol.{RecordNotProduced, RecordProduced} import hydra.core.test.{TestRecord, TestRecordMetadata} import hydra.core.transport.Transport.{Confirm, TransportError} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import scala.concurrent.duration._ class TransportCallbackSpec extends TestKit(ActorSystem("test")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll with ImplicitSender { private val ingestor = TestProbe() private val supervisor = TestProbe() override def afterAll() { super.afterAll() TestKit.shutdownActorSystem(system, verifySystemShutdown = true) } describe("Transports Acks") { it("handles empty callbacks") { NoCallback.onCompletion( -1, None, Some(new IllegalArgumentException("test")) ) ingestor.expectNoMessage(3 seconds) supervisor.expectNoMessage(3 seconds) } it("handles simple/transport only callbacks") { val probe = TestProbe() new TransportSupervisorCallback(probe.ref) .onCompletion(-11, None, Some(new IllegalArgumentException("test"))) ingestor.expectNoMessage(3 seconds) supervisor.expectNoMessage(3 seconds) probe.expectMsg(TransportError(-11)) new TransportSupervisorCallback(probe.ref).onCompletion( -11, Some(TestRecordMetadata(1, 0, "", AckStrategy.NoAck)), None ) ingestor.expectNoMessage(3 seconds) supervisor.expectNoMessage(3 seconds) probe.expectMsg(Confirm(-11)) } it("handles ingestor callbacks") { val rec = TestRecord("OK", "1", "test", AckStrategy.NoAck) val transport = TestProbe() val cb = new IngestorCallback[String, String]( rec, ingestor.ref, supervisor.ref, transport.ref ) cb.onCompletion( 1, Some(TestRecordMetadata(1, 0, "", AckStrategy.NoAck)), None ) ingestor.expectMsgPF() { case RecordProduced(md, sup) => sup shouldBe supervisor.ref md shouldBe a[TestRecordMetadata] } transport.expectMsg(Confirm(1)) cb.onCompletion(1, None, Some(new IllegalArgumentException("test"))) ingestor.expectMsgPF() { case RecordNotProduced(r, e, s) => r shouldBe rec e.getMessage shouldBe "test" s shouldBe supervisor.ref } transport.expectMsg(TransportError(1)) } } }
Example 169
Source File: JsonReceiverSpec.scala From incubator-retired-iota with Apache License 2.0 | 5 votes |
package org.apache.iota.fey import java.nio.file.{Files, Paths} import akka.actor.ActorRef import akka.testkit.{EventFilter, TestProbe} import ch.qos.logback.classic.Level import scala.concurrent.duration.{DurationInt, FiniteDuration} class JsonReceiverSpec extends BaseAkkaSpec with LoggingTest{ class ReceiverTest(verifyActor: ActorRef) extends JsonReceiver{ override def execute(): Unit = { verifyActor ! "EXECUTED" Thread.sleep(500) } override def exceptionOnRun(e: Exception): Unit = { verifyActor ! "INTERRUPTED" } } val verifyTB = TestProbe("RECEIVER-TEST") val receiver = new ReceiverTest(verifyTB.ref) "Executing validJson in JsonReceiver" should { "return false when json schema is not right" in { receiver.validJson(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid)) should be(false) } "log message to Error" in { ("Incorrect JSON schema \n/ensembles/0 \n\tErrors: Property command missing") should beLoggedAt(Level.ERROR) } "return true when Json schema is valid" in { receiver.validJson(getJSValueFromString(Utils_JSONTest.create_json_test)) should be(true) } } "Executing checkForLocation in JsonReceiver" should { "log message at Debug level" in { receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.test_json_schema_invalid)) "Location not defined in JSON" should beLoggedAt(Level.DEBUG) } "download jar dynamically from URL" in { receiver.checkForLocation(getJSValueFromString(Utils_JSONTest.location_test)) Files.exists(Paths.get(s"${CONFIG.DYNAMIC_JAR_REPO}/fey-stream.jar")) should be(true) } } var watchThread: Thread = _ "Start a Thread with the JSON receiver" should { "Start Thread" in { watchThread = new Thread(receiver, "TESTING-RECEIVERS-IN-THREAD") watchThread.setDaemon(true) watchThread.start() TestProbe().isThreadRunning("TESTING-RECEIVERS-IN-THREAD") should be(true) } "execute execute() method inside run" in { verifyTB.expectMsgAllOf(600.milliseconds,"EXECUTED","EXECUTED") } } "Interrupting the receiver Thread" should { "Throw Interrupted exception" in { EventFilter[InterruptedException]() intercept { watchThread.interrupt() watchThread.join() } } "execute exceptionOnRun method" in { verifyTB.receiveWhile(1200.milliseconds) { case "EXECUTED" => } verifyTB.expectMsg("INTERRUPTED") } } }
Example 170
Source File: WatchServiceReceiverSpec.scala From incubator-retired-iota with Apache License 2.0 | 5 votes |
package org.apache.iota.fey import java.nio.file.{Files, Paths} import java.nio.charset.StandardCharsets import akka.testkit.{EventFilter, TestProbe} import scala.concurrent.duration.{DurationInt, FiniteDuration} import java.io.File import ch.qos.logback.classic.Level class WatchServiceReceiverSpec extends BaseAkkaSpec{ val watcherTB = TestProbe("WATCH-SERVICE") var watchFileTask:WatchServiceReceiver = _ val watchTestDir = s"${CONFIG.JSON_REPOSITORY}/watchtest" "Creating WatchServiceReceiver" should { "process initial files in the JSON repository" in { CONFIG.JSON_EXTENSION = "json.not" watchFileTask = new WatchServiceReceiver(watcherTB.ref) watcherTB.expectMsgAllClassOf(classOf[JsonReceiverActor.JSON_RECEIVED]) CONFIG.JSON_EXTENSION = "json.test" } } var watchThread: Thread = _ "Start a Thread with WatchServiceReceiver" should { "Start Thread" in { watchThread = new Thread(watchFileTask, "TESTING-WATCHER-IN-THREAD") watchThread.setDaemon(true) watchThread.start() TestProbe().isThreadRunning("TESTING-WATCHER-IN-THREAD") should be(true) } } "Start watching directory" should { "Starting receiving CREATED event" taggedAs(SlowTest) in { watchFileTask.watch(Paths.get(watchTestDir)) Files.write(Paths.get(s"$watchTestDir/watched.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8)) watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED]) } "Starting receiving UPDATE event" taggedAs(SlowTest) in { Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.delete_json_test.getBytes(StandardCharsets.UTF_8)) Thread.sleep(200) Files.write(Paths.get(s"$watchTestDir/watched-update.json.test"), Utils_JSONTest.create_json_test.getBytes(StandardCharsets.UTF_8)) watcherTB.expectMsgAllClassOf(20.seconds, classOf[JsonReceiverActor.JSON_RECEIVED]) } } "processJson" should { "log to warn level when json has invalid schema" in { Files.write(Paths.get(s"$watchTestDir/watched-invalid.json.test"), Utils_JSONTest.test_json_schema_invalid.getBytes(StandardCharsets.UTF_8)) watchFileTask.processJson(s"$watchTestDir/watched-invalid.json.test",new File(s"$watchTestDir/watched-invalid.json.test")) s"File $watchTestDir/watched-invalid.json.test not processed. Incorrect JSON schema" should beLoggedAt(Level.WARN) } } "interrupt watchservice" should{ "interrupt thread" in { watchThread.interrupt() } } }
Example 171
Source File: BaseAkkaSpec.scala From incubator-retired-iota with Apache License 2.0 | 5 votes |
package org.apache.iota.fey import java.nio.file.Paths import akka.actor.{ActorIdentity, ActorRef, ActorSystem, Identify, Props} import akka.testkit.{EventFilter, TestEvent, TestProbe} import com.typesafe.config.ConfigFactory import org.scalatest.BeforeAndAfterAll import play.api.libs.json._ import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.concurrent.Await class BaseAkkaSpec extends BaseSpec with BeforeAndAfterAll with LoggingTest{ //Load default configuration for Fey when running tests resetCapturedLogs() CONFIG.loadUserConfiguration(Paths.get(TestSetup.configTest.toURI()).toFile().getAbsolutePath) TestSetup.setup() val systemName = "FEY-TEST" implicit val system = ActorSystem(systemName, ConfigFactory.parseString("""akka.loggers = ["akka.testkit.TestEventListener"]""")) system.eventStream.publish(TestEvent.Mute(EventFilter.debug())) system.eventStream.publish(TestEvent.Mute(EventFilter.info())) system.eventStream.publish(TestEvent.Mute(EventFilter.warning())) system.eventStream.publish(TestEvent.Mute(EventFilter.error())) val globalIdentifierName = "GLOBAL-IDENTIFIER" val globalIdentifierRef = system.actorOf(Props[IdentifyFeyActors],globalIdentifierName) override protected def afterAll(): Unit = { //Force reload of GenericActor's jar Utils.loadedJars.remove("fey-test-actor.jar") Monitor.events.removeAllNodes() Await.ready(system.terminate(), 20.seconds) } implicit class TestProbeOps(probe: TestProbe) { def expectActor(path: String, max: FiniteDuration = 3.seconds): ActorRef = { probe.within(max) { var actor = null: ActorRef probe.awaitAssert { (probe.system actorSelection path).tell(Identify(path), probe.ref) probe.expectMsgPF(100 milliseconds) { case ActorIdentity(`path`, Some(ref)) => actor = ref } } actor } } def expectActorInSystem(path: String, lookInSystem: ActorSystem, max: FiniteDuration = 3.seconds): ActorRef = { probe.within(max) { var actor = null: ActorRef probe.awaitAssert { (lookInSystem actorSelection path).tell(Identify(path), probe.ref) probe.expectMsgPF(100 milliseconds) { case ActorIdentity(`path`, Some(ref)) => actor = ref } } actor } } def verifyActorTermination(actor: ActorRef)(implicit system: ActorSystem): Unit = { val watcher = TestProbe() watcher.watch(actor) watcher.expectTerminated(actor) } def notExpectActor(path: String, max: FiniteDuration = 3.seconds): Unit = { probe.within(max) { probe.awaitAssert { (probe.system actorSelection path).tell(Identify(path), probe.ref) probe.expectMsgPF(100 milliseconds) { case ActorIdentity(`path`, None) => } } } } def isThreadRunning(threadName: String): Boolean = { Thread.getAllStackTraces.keySet().toArray .map(_.asInstanceOf[Thread]) .find(_.getName == threadName) match { case Some(thread) => if(thread.isAlive) true else false case None => false } } } //Utils Functions def getJSValueFromString(json: String): JsValue = { Json.parse(json) } }
Example 172
Source File: ReplicatedAWSetSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.crdt.pure import akka.actor._ import akka.remote.testkit._ import akka.remote.transport.ThrottlerTransportAdapter.Direction import akka.testkit.TestProbe import com.rbmhtechnology.eventuate._ import com.rbmhtechnology.eventuate.crdt.pure.AWSetService.AWSet import com.rbmhtechnology.eventuate.crdt.pure.CRDTTypes.Operation import com.typesafe.config.ConfigFactory class ReplicatedAWSetSpecLeveldb extends ReplicatedAWSetSpec with MultiNodeSupportLeveldb class ReplicatedAWSetSpecLeveldbMultiJvmNode1 extends ReplicatedAWSetSpecLeveldb class ReplicatedAWSetSpecLeveldbMultiJvmNode2 extends ReplicatedAWSetSpecLeveldb object ReplicatedORSetConfig extends MultiNodeReplicationConfig { val nodeA = role("nodeA") val nodeB = role("nodeB") val customConfig = ConfigFactory.parseString( """ |eventuate.log.write-batch-size = 200 |eventuate.log.replication.remote-read-timeout = 2s """.stripMargin) testTransport(on = true) setConfig(customConfig.withFallback(MultiNodeConfigLeveldb.providerConfig)) } abstract class ReplicatedAWSetSpec extends MultiNodeSpec(ReplicatedORSetConfig) with MultiNodeWordSpec with MultiNodeReplicationEndpoint { import ReplicatedORSetConfig._ import CRDTTestDSL.AWSetCRDT def initialParticipants: Int = roles.size muteDeadLetters(classOf[AnyRef])(system) "A replicated AWSet" must { "converge" in { val probe = TestProbe() runOn(nodeA) { val endpoint = createEndpoint(nodeA.name, Set(node(nodeB).address.toReplicationConnection)) val service = new AWSetService[Int]("A", endpoint.log) { override private[crdt] def onChange(crdt: AWSet[Int], operation: Option[Operation]): Unit = probe.ref ! ops.value(crdt) } service.add("x", 1) probe.expectMsg(Set(1)) probe.expectMsg(Set(1, 2)) // network partition testConductor.blackhole(nodeA, nodeB, Direction.Both).await enterBarrier("broken") // this is concurrent to service.remove("x", 1) on node B service.add("x", 1) probe.expectMsg(Set(1, 2)) enterBarrier("repair") testConductor.passThrough(nodeA, nodeB, Direction.Both).await probe.expectMsg(Set(1, 2)) service.remove("x", 2) probe.expectMsg(Set(1)) } runOn(nodeB) { val endpoint = createEndpoint(nodeB.name, Set(node(nodeA).address.toReplicationConnection)) val service = new AWSetService[Int]("B", endpoint.log) { override private[crdt] def onChange(crdt: AWSet[Int], operation: Option[Operation]): Unit = probe.ref ! ops.value(crdt) } service.value("x") probe.expectMsg(Set(1)) service.add("x", 2) probe.expectMsg(Set(1, 2)) enterBarrier("broken") // this is concurrent to service.add("x", 1) on node A service.remove("x", 1) probe.expectMsg(Set(2)) enterBarrier("repair") // add has precedence over (concurrent) remove probe.expectMsg(Set(1, 2)) probe.expectMsg(Set(1)) } enterBarrier("finish") } } }
Example 173
Source File: ReplicatedORSetSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.crdt import akka.actor._ import akka.remote.testkit._ import akka.remote.transport.ThrottlerTransportAdapter.Direction import akka.testkit.TestProbe import com.rbmhtechnology.eventuate._ import com.typesafe.config.ConfigFactory class ReplicatedORSetSpecLeveldb extends ReplicatedORSetSpec with MultiNodeSupportLeveldb class ReplicatedORSetSpecLeveldbMultiJvmNode1 extends ReplicatedORSetSpecLeveldb class ReplicatedORSetSpecLeveldbMultiJvmNode2 extends ReplicatedORSetSpecLeveldb object ReplicatedORSetConfig extends MultiNodeReplicationConfig { val nodeA = role("nodeA") val nodeB = role("nodeB") val customConfig = ConfigFactory.parseString( """ |eventuate.log.write-batch-size = 200 |eventuate.log.replication.remote-read-timeout = 2s """.stripMargin) testTransport(on = true) setConfig(customConfig.withFallback(MultiNodeConfigLeveldb.providerConfig)) } abstract class ReplicatedORSetSpec extends MultiNodeSpec(ReplicatedORSetConfig) with MultiNodeWordSpec with MultiNodeReplicationEndpoint { import ReplicatedORSetConfig._ def initialParticipants: Int = roles.size muteDeadLetters(classOf[AnyRef])(system) "A replicated ORSet" must { "converge" in { val probe = TestProbe() runOn(nodeA) { val endpoint = createEndpoint(nodeA.name, Set(node(nodeB).address.toReplicationConnection)) val service = new ORSetService[Int]("A", endpoint.log) { override private[crdt] def onChange(crdt: ORSet[Int], operation: Any): Unit = probe.ref ! crdt.value } service.add("x", 1) probe.expectMsg(Set(1)) probe.expectMsg(Set(1, 2)) // network partition testConductor.blackhole(nodeA, nodeB, Direction.Both).await enterBarrier("broken") // this is concurrent to service.remove("x", 1) on node B service.add("x", 1) probe.expectMsg(Set(1, 2)) enterBarrier("repair") testConductor.passThrough(nodeA, nodeB, Direction.Both).await probe.expectMsg(Set(1, 2)) service.remove("x", 2) probe.expectMsg(Set(1)) } runOn(nodeB) { val endpoint = createEndpoint(nodeB.name, Set(node(nodeA).address.toReplicationConnection)) val service = new ORSetService[Int]("B", endpoint.log) { override private[crdt] def onChange(crdt: ORSet[Int], operation: Any): Unit = probe.ref ! crdt.value } service.value("x") probe.expectMsg(Set(1)) service.add("x", 2) probe.expectMsg(Set(1, 2)) enterBarrier("broken") // this is concurrent to service.add("x", 1) on node A service.remove("x", 1) probe.expectMsg(Set(2)) enterBarrier("repair") // add has precedence over (concurrent) remove probe.expectMsg(Set(1, 2)) probe.expectMsg(Set(1)) } enterBarrier("finish") } } }
Example 174
Source File: BasicReplicationSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate import akka.actor._ import akka.remote.testkit._ import akka.testkit.TestProbe import com.typesafe.config.Config import scala.collection.immutable.Seq import scala.util._ class BasicReplicationConfig(providerConfig: Config) extends MultiNodeReplicationConfig { val nodeA = role("nodeA") val nodeB = role("nodeB") val nodeC = role("nodeC") setConfig(providerConfig) } object BasicReplicationSpec { class ReplicatedActor(val id: String, val eventLog: ActorRef, probe: ActorRef) extends EventsourcedActor { def onCommand = { case s: String => persist(s) { case Success(e) => case Failure(e) => throw e } } def onEvent = { case s: String => probe ! s } } } abstract class BasicReplicationSpec(config: BasicReplicationConfig) extends MultiNodeSpec(config) with MultiNodeWordSpec with MultiNodeReplicationEndpoint { import BasicReplicationSpec._ import config._ def initialParticipants: Int = roles.size def assertPartialOrder[A](events: Seq[A], sample: A*): Unit = { val indices = sample.map(events.indexOf) assert(indices == indices.sorted) } muteDeadLetters(classOf[AnyRef])(system) "Event log replication" must { "replicate all events by default" in { val probe = TestProbe() runOn(nodeA) { val endpoint = createEndpoint(nodeA.name, Set(node(nodeB).address.toReplicationConnection)) val actor = system.actorOf(Props(new ReplicatedActor("pa", endpoint.log, probe.ref))) actor ! "A1" actor ! "A2" } runOn(nodeB) { val endpoint = createEndpoint(nodeB.name, Set( node(nodeA).address.toReplicationConnection, node(nodeC).address.toReplicationConnection)) val actor = system.actorOf(Props(new ReplicatedActor("pb", endpoint.log, probe.ref))) actor ! "B1" actor ! "B2" } runOn(nodeC) { val endpoint = createEndpoint(nodeC.name, Set(node(nodeB).address.toReplicationConnection)) val actor = system.actorOf(Props(new ReplicatedActor("pc", endpoint.log, probe.ref))) actor ! "C1" actor ! "C2" } val actual = probe.expectMsgAllOf("A1", "A2", "B1", "B2", "C1", "C2") assertPartialOrder(actual, "A1", "A2") assertPartialOrder(actual, "B1", "B2") assertPartialOrder(actual, "C1", "C2") enterBarrier("finish") } } }
Example 175
Source File: BasicPersistOnEventSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate import akka.actor._ import akka.remote.testkit._ import akka.remote.transport.ThrottlerTransportAdapter.Direction import akka.testkit.TestProbe import com.rbmhtechnology.eventuate.EventsourcedView.Handler import com.typesafe.config._ class BasicPersistOnEventConfig(providerConfig: Config) extends MultiNodeReplicationConfig { val nodeA = role("nodeA") val nodeB = role("nodeB") testTransport(on = true) setConfig(ConfigFactory.parseString("eventuate.log.replication.remote-read-timeout = 2s").withFallback(providerConfig)) } object BasicPersistOnEventSpec { case class Ping(num: Int) case class Pong(num: Int) class PingActor(val id: String, val eventLog: ActorRef, probe: ActorRef) extends EventsourcedActor with PersistOnEvent { override def onCommand = { case Ping(i) => persist(Ping(i))(Handler.empty) } override def onEvent = { case p @ Pong(10) => probe ! p case p @ Pong(5) => probe ! p case p @ Ping(6) => probe ! p case Pong(i) => persistOnEvent(Ping(i + 1)) } } class PongActor(val id: String, val eventLog: ActorRef) extends EventsourcedActor with PersistOnEvent { override def onCommand = { case _ => } override def onEvent = { case Ping(i) => persistOnEvent(Pong(i)) } } } abstract class BasicPersistOnEventSpec(config: BasicPersistOnEventConfig) extends MultiNodeSpec(config) with MultiNodeWordSpec with MultiNodeReplicationEndpoint { import BasicPersistOnEventSpec._ import config._ muteDeadLetters(classOf[AnyRef])(system) def initialParticipants: Int = roles.size "Event-sourced actors" when { "located at different locations" can { "play partition-tolerant event-driven ping-pong" in { val probe = TestProbe() runOn(nodeA) { val endpoint = createEndpoint(nodeA.name, Set(node(nodeB).address.toReplicationConnection)) val pingActor = system.actorOf(Props(new PingActor("ping", endpoint.log, probe.ref))) pingActor ! Ping(1) probe.expectMsg(Pong(5)) testConductor.blackhole(nodeA, nodeB, Direction.Both).await // partitioned from PongActor pingActor ! Ping(6) probe.expectMsg(Ping(6)) testConductor.passThrough(nodeA, nodeB, Direction.Both).await probe.expectMsg(Pong(10)) } runOn(nodeB) { val endpoint = createEndpoint(nodeB.name, Set(node(nodeA).address.toReplicationConnection)) system.actorOf(Props(new PongActor("pong", endpoint.log))) } enterBarrier("finish") } } } }
Example 176
Source File: FailureDetectionSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate import akka.remote.testkit.MultiNodeSpec import akka.remote.transport.ThrottlerTransportAdapter.Direction import akka.testkit.TestProbe import com.rbmhtechnology.eventuate.ReplicationProtocol.ReplicationReadTimeoutException import com.typesafe.config._ class FailureDetectionConfig(providerConfig: Config) extends MultiNodeReplicationConfig { val nodeA = role("nodeA") val nodeB = role("nodeB") testTransport(on = true) val customConfig = ConfigFactory.parseString(""" |eventuate.log.replication.remote-read-timeout = 1s |eventuate.log.replication.failure-detection-limit = 10s """.stripMargin) setConfig(customConfig.withFallback(providerConfig)) } abstract class FailureDetectionSpec(config: FailureDetectionConfig) extends MultiNodeSpec(config) with MultiNodeWordSpec with MultiNodeReplicationEndpoint { import ReplicationEndpoint._ import config._ def initialParticipants: Int = roles.size muteDeadLetters(classOf[AnyRef])(system) "Event log replication" must { "detect replication server availability" in { val probeAvailable1 = new TestProbe(system) val probeAvailable2 = new TestProbe(system) val probeUnavailable = new TestProbe(system) system.eventStream.subscribe(probeAvailable1.ref, classOf[Available]) system.eventStream.subscribe(probeUnavailable.ref, classOf[Unavailable]) enterBarrier("subscribe") runOn(nodeA) { createEndpoint(nodeA.name, Set(node(nodeB).address.toReplicationConnection)) probeAvailable1.expectMsg(Available(nodeB.name, logName)) enterBarrier("connected") testConductor.blackhole(nodeA, nodeB, Direction.Both).await probeUnavailable.expectMsgPF() { case Unavailable(nodeB.name, logName, causes) if causes.nonEmpty => causes.head shouldBe a[ReplicationReadTimeoutException] } system.eventStream.subscribe(probeAvailable2.ref, classOf[Available]) enterBarrier("repair") testConductor.passThrough(nodeA, nodeB, Direction.Both).await probeAvailable2.expectMsg(Available(nodeB.name, logName)) } runOn(nodeB) { createEndpoint(nodeB.name, Set(node(nodeA).address.toReplicationConnection)) probeAvailable1.expectMsg(Available(nodeA.name, logName)) enterBarrier("connected") probeUnavailable.expectMsgPF() { case Unavailable(nodeA.name, logName, causes) if causes.nonEmpty => causes.head shouldBe a[ReplicationReadTimeoutException] } system.eventStream.subscribe(probeAvailable2.ref, classOf[Available]) enterBarrier("repair") probeAvailable2.expectMsg(Available(nodeA.name, logName)) } enterBarrier("finish") } } }
Example 177
Source File: EventsourcedActorCausalitySpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate import akka.actor._ import akka.testkit.TestProbe import org.scalatest._ import scala.collection.immutable.Seq import scala.util._ object EventsourcedActorCausalitySpec { class Collaborator(val id: String, val eventLog: ActorRef, handles: Set[String], probe: ActorRef) extends EventsourcedActor { def onCommand = { case s: String => persist(s) { case Success(e) => case Failure(e) => throw e } } def onEvent = { case s: String if handles.contains(s) => probe ! ((s, lastVectorTimestamp, currentVersion)) } } } trait EventsourcedActorCausalitySpec extends WordSpec with Matchers with MultiLocationSpec { import ReplicationIntegrationSpec.replicationConnection import EventsourcedActorCausalitySpec._ def assertPartialOrder[A](events: Seq[A], sample: A*): Unit = { val indices = sample.map(events.indexOf) assert(indices == indices.sorted) } "Event-sourced actors" when { "located at different locations" can { "track causality" in { val logName = "L1" val locationA = location("A") val locationB = location("B") val endpointA = locationA.endpoint(Set(logName), Set(replicationConnection(locationB.port))) val endpointB = locationB.endpoint(Set(logName), Set(replicationConnection(locationA.port))) val logA = endpointA.logs(logName) val logB = endpointB.logs(logName) val logIdA = endpointA.logId(logName) val logIdB = endpointB.logId(logName) val probeA1 = new TestProbe(locationA.system) val probeA2 = new TestProbe(locationA.system) val probeA3 = new TestProbe(locationA.system) val probeB = new TestProbe(locationB.system) val actorA1 = locationA.system.actorOf(Props(new Collaborator("pa1", logA, Set("e1", "e2", "e5"), probeA1.ref))) val actorA2 = locationA.system.actorOf(Props(new Collaborator("pa2", logA, Set("e3", "e5", "e6"), probeA2.ref))) val actorA3 = locationA.system.actorOf(Props(new Collaborator("pa3", logA, Set("e4"), probeA3.ref))) val actorB = locationB.system.actorOf(Props(new Collaborator("pb", logB, Set("e1", "e6"), probeB.ref))) def vectorTime(a: Long, b: Long) = (a, b) match { case (0L, 0L) => VectorTime() case (a, 0L) => VectorTime(logIdA -> a) case (0L, b) => VectorTime(logIdB -> b) case (a, b) => VectorTime(logIdA -> a, logIdB -> b) } actorB ! "e1" probeA1.expectMsg(("e1", vectorTime(0, 1), vectorTime(0, 1))) probeB.expectMsg(("e1", vectorTime(0, 1), vectorTime(0, 1))) actorA1 ! "e2" probeA1.expectMsg(("e2", vectorTime(2, 1), vectorTime(2, 1))) actorA2 ! "e3" probeA2.expectMsg(("e3", vectorTime(3, 0), vectorTime(3, 0))) actorA3 ! "e4" probeA3.expectMsg(("e4", vectorTime(4, 0), vectorTime(4, 0))) actorA1 ! "e5" probeA1.expectMsg(("e5", vectorTime(5, 1), vectorTime(5, 1))) probeA2.expectMsg(("e5", vectorTime(5, 1), vectorTime(5, 1))) actorA2 ! "e6" probeA2.expectMsg(("e6", vectorTime(6, 1), vectorTime(6, 1))) probeB.expectMsg(("e6", vectorTime(6, 1), vectorTime(6, 1))) // ----------------------------------------------------------- // Please note: // - e2 <-> e3 (because e1 -> e2 and e1 <-> e3) // - e3 <-> e4 (but plausible clocks reports e3 -> e4) // ----------------------------------------------------------- } } } }
Example 178
Source File: package.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate import akka.pattern.ask import akka.testkit.TestProbe import akka.util.Timeout import com.rbmhtechnology.eventuate.EventsourcingProtocol._ import com.rbmhtechnology.eventuate.ReplicationFilter.NoFilter import com.rbmhtechnology.eventuate.ReplicationProtocol._ import scala.collection.immutable.Seq import scala.concurrent._ import scala.concurrent.duration._ package object utilities { val timeoutDuration = 20.seconds implicit class AwaitHelper[T](awaitable: Awaitable[T]) { def await: T = Await.result(awaitable, timeoutDuration) } def write(target: ReplicationTarget, events: Seq[String], aggregateId: Option[String] = None): Unit = { val system = target.endpoint.system val probe = TestProbe()(system) target.log ! Write(events.map(DurableEvent(_, target.logId, emitterAggregateId = aggregateId)), system.deadLetters, probe.ref, 0, 0) probe.expectMsgClass(classOf[WriteSuccess]) } def read(target: ReplicationTarget): Seq[String] = { import target.endpoint.system.dispatcher implicit val timeout = Timeout(3.seconds) def readEvents: Future[ReplicationReadSuccess] = target.log.ask(ReplicationRead(1L, Int.MaxValue, Int.MaxValue, NoFilter, DurableEvent.UndefinedLogId, target.endpoint.system.deadLetters, VectorTime())).mapTo[ReplicationReadSuccess] val reading = for { res <- readEvents } yield res.events.map(_.payload.asInstanceOf[String]) reading.await } def replicate(from: ReplicationTarget, to: ReplicationTarget, num: Int = Int.MaxValue): Int = { import to.endpoint.system.dispatcher implicit val timeout = Timeout(3.seconds) def readProgress: Future[GetReplicationProgressSuccess] = to.log.ask(GetReplicationProgress(from.logId)).mapTo[GetReplicationProgressSuccess] def readEvents(reply: GetReplicationProgressSuccess): Future[ReplicationReadSuccess] = from.log.ask(ReplicationRead(reply.storedReplicationProgress + 1, num, Int.MaxValue, NoFilter, to.logId, to.endpoint.system.deadLetters, reply.currentTargetVersionVector)).mapTo[ReplicationReadSuccess] def writeEvents(reply: ReplicationReadSuccess): Future[ReplicationWriteSuccess] = to.log.ask(ReplicationWrite(reply.events, Map(from.logId -> ReplicationMetadata(reply.replicationProgress, VectorTime.Zero)))).mapTo[ReplicationWriteSuccess] val replication = for { rps <- readProgress res <- readEvents(rps) wes <- writeEvents(res) } yield wes.events.size replication.await } }
Example 179
Source File: EventLogPartitioningSpecCassandra.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.log import akka.actor.ActorSystem import akka.testkit.{ TestKit, TestProbe } import com.rbmhtechnology.eventuate.EventsourcingProtocol._ import com.rbmhtechnology.eventuate.SingleLocationSpecCassandra import com.typesafe.config._ import scala.collection.immutable.Seq object EventLogPartitioningSpecCassandra { val config: Config = ConfigFactory.parseString( """ |akka.loglevel = "ERROR" |akka.test.single-expect-default = 20s | |eventuate.snapshot.filesystem.dir = target/test-snapshot | |eventuate.log.write-batch-size = 3 |eventuate.log.cassandra.partition-size = 5 |eventuate.log.cassandra.default-port = 9142 """.stripMargin) } class EventLogPartitioningSpecCassandra extends TestKit(ActorSystem("test", EventLogPartitioningSpecCassandra.config)) with EventLogSpecSupport with SingleLocationSpecCassandra { import EventLogSpec._ def replay(fromSequenceNr: Long): Seq[(Any, Long)] = { val probe = TestProbe() log.tell(Replay(fromSequenceNr, None, 0), replyToProbe.ref) replyToProbe.expectMsgClass(classOf[ReplaySuccess]).events.map { event => (event.payload, event.localSequenceNr) } } "A Cassandra event log" must { "fill a partition with a single batch" in { writeEmittedEvents(List(event("a"), event("b"), event("c"), event("d"), event("e"))) replay(1L) should be(List(("a", 1L), ("b", 2L), ("c", 3L), ("d", 4L), ("e", 5L))) replay(4L) should be(List(("d", 4L), ("e", 5L))) replay(5L) should be(List(("e", 5L))) replay(6L) should be(List()) } "fill a partition with more than one batch" in { writeEmittedEvents(List(event("a"), event("b"), event("c"))) writeEmittedEvents(List(event("d"), event("e"))) replay(1L) should be(List(("a", 1L), ("b", 2L), ("c", 3L), ("d", 4L), ("e", 5L))) replay(5L) should be(List(("e", 5L))) replay(6L) should be(List()) } "switch to the next partition if the current partition is full" in { writeEmittedEvents(List(event("a"), event("b"), event("c"), event("d"), event("e"))) writeEmittedEvents(List(event("f"), event("g"))) replay(1L) should be(List(("a", 1L), ("b", 2L), ("c", 3L), ("d", 4L), ("e", 5L), ("f", 6L), ("g", 7L))) replay(5L) should be(List(("e", 5L), ("f", 6L), ("g", 7L))) replay(6L) should be(List(("f", 6L), ("g", 7L))) } "switch to the next partition if the current partition isn't full but doesn't provide enough remaining space for a batch" in { val eventsA = List(event("a"), event("b"), event("c"), event("d")) val eventsB = List(event("f"), event("g")) log ! Write(eventsA, system.deadLetters, replyToProbe.ref, 0, 0) log ! Write(eventsB, system.deadLetters, replyToProbe.ref, 0, 0) val expectedA = eventsA.zipWithIndex.map { case (event, idx) => event.copy(vectorTimestamp = timestamp(1L + idx), processId = logId, localLogId = logId, localSequenceNr = 1L + idx) } val expectedB = eventsB.zipWithIndex.map { case (event, idx) => event.copy(vectorTimestamp = timestamp(6L + idx), processId = logId, localLogId = logId, localSequenceNr = 6L + idx) } replyToProbe.expectMsg(WriteSuccess(expectedA, 0, 0)) replyToProbe.expectMsg(WriteSuccess(expectedB, 0, 0)) replay(1L) should be(List(("a", 1L), ("b", 2L), ("c", 3L), ("d", 4L), ("f", 6L), ("g", 7L))) replay(5L) should be(List(("f", 6L), ("g", 7L))) replay(6L) should be(List(("f", 6L), ("g", 7L))) } "reject batches larger than the maximum partition size" in { val events = Vector(event("a"), event("b"), event("c"), event("d"), event("e"), event("f")) log ! Write(events, system.deadLetters, replyToProbe.ref, 0, 0) replyToProbe.expectMsgClass(classOf[WriteFailure]) } } }
Example 180
Source File: ActorStorageProvider.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.vertx import akka.actor.{ ActorSystem, Status } import akka.pattern.ask import akka.testkit.TestProbe import akka.util.Timeout import com.rbmhtechnology.eventuate.adapter.vertx.api.StorageProvider import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } class ActorStorageProvider(defaultId: String)(implicit system: ActorSystem) extends StorageProvider { implicit val timeout = Timeout(20.seconds) val probe = TestProbe() override def readProgress(id: String)(implicit executionContext: ExecutionContext): Future[Long] = probe.ref.ask(read(id)).mapTo[Long] override def writeProgress(id: String, sequenceNr: Long)(implicit executionContext: ExecutionContext): Future[Long] = probe.ref.ask(write(id, sequenceNr)).mapTo[Long] def expectRead(replySequenceNr: Long, id: String = defaultId): Unit = { probe.expectMsg(read(id)) probe.reply(replySequenceNr) } def expectWrite(sequenceNr: Long, id: String = defaultId): Unit = { probe.expectMsg(write(id, sequenceNr)) probe.reply(sequenceNr) } def expectWriteAndFail(sequenceNr: Long, failure: Throwable, id: String = defaultId): Unit = { probe.expectMsg(write(id, sequenceNr)) probe.reply(Status.Failure(failure)) } def expectWriteAnyOf(sequenceNrs: Seq[Long], id: String = defaultId): Unit = { probe.expectMsgAnyOf(sequenceNrs.map(write(id, _)): _*) probe.reply(sequenceNrs.max) } def expectNoMsg(duration: FiniteDuration): Unit = { probe.expectNoMsg(duration) } private def read(id: String): String = s"read[$id]" private def write(id: String, sequenceNr: Long): String = s"write[$id]-$sequenceNr" }
Example 181
Source File: VertxEventBusProbes.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.vertx import akka.actor.ActorSystem import akka.testkit.{ TestKit, TestProbe } import com.rbmhtechnology.eventuate.adapter.vertx.utilities.EventBusMessage import io.vertx.core.eventbus.Message import org.scalatest.{ BeforeAndAfterEach, Suite } trait VertxEventBusProbes extends BeforeAndAfterEach { this: TestKit with Suite with VertxEnvironment => import VertxHandlerConverters._ var endpoint1: EventBusEndpoint = _ var endpoint2: EventBusEndpoint = _ override def beforeEach(): Unit = { super.beforeEach() endpoint1 = EventBusEndpoint.withId("1") endpoint2 = EventBusEndpoint.withId("2") } def eventBusProbe(endpoint: String): TestProbe = { val probe = TestProbe() val handler = (m: Message[String]) => probe.ref ! EventBusMessage(m.body(), m, endpoint) vertx.eventBus().consumer[String](endpoint, handler.asVertxHandler) probe } object EventBusEndpoint { def apply(address: String): EventBusEndpoint = new EventBusEndpoint(address, eventBusProbe(address)) def withId(id: String): EventBusEndpoint = apply(endpointAddress(id)) } case class EventBusEndpoint(address: String, probe: TestProbe) }
Example 182
Source File: package.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.vertx import akka.testkit.TestProbe import scala.concurrent.duration.Duration import scala.reflect.ClassTag import scala.util.Failure package object utilities { implicit class VertxTestProbeExtension(probe: TestProbe) { def expectVertxMsg[T](body: T, max: Duration = Duration.Undefined)(implicit t: ClassTag[T]): EventBusMessage[T] = { probe.expectMsgPF[EventBusMessage[T]](max, hint = s"EventBusMessage($body, _, _)") { case m: EventBusMessage[T] if m.body == body => m } } def receiveNVertxMsg[T](n: Int): Seq[EventBusMessage[T]] = probe.receiveN(n).asInstanceOf[Seq[EventBusMessage[T]]] def expectFailure[T](max: Duration = Duration.Undefined)(implicit t: ClassTag[T]): T = { probe.expectMsgPF[T](max, hint = s"Failure($t)") { case f @ Failure(err: T) => err } } } }
Example 183
Source File: PersistOnEventWithRecoverySpecLeveldb.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate import java.util.UUID import akka.actor.Actor import akka.actor.ActorRef import akka.actor.Props import akka.testkit.TestProbe import com.rbmhtechnology.eventuate.ReplicationIntegrationSpec.replicationConnection import com.rbmhtechnology.eventuate.utilities._ import org.apache.commons.io.FileUtils import org.scalatest.Matchers import org.scalatest.WordSpec import scala.concurrent.duration.DurationInt object PersistOnEventWithRecoverySpecLeveldb { class OnBEmitRandomActor(val eventLog: ActorRef, probe: TestProbe) extends EventsourcedActor with PersistOnEvent { override def id = getClass.getName override def onCommand = Actor.emptyBehavior override def onEvent = { case "A" => case "B" => persistOnEvent(UUID.randomUUID().toString) case uuid: String => probe.ref ! uuid } } def persistOnEventProbe(locationA1: Location, log: ActorRef) = { val probe = locationA1.probe locationA1.system.actorOf(Props(new OnBEmitRandomActor(log, probe))) probe } val noMsgTimeout = 100.millis } class PersistOnEventWithRecoverySpecLeveldb extends WordSpec with Matchers with MultiLocationSpecLeveldb { import RecoverySpecLeveldb._ import PersistOnEventWithRecoverySpecLeveldb._ override val logFactory: String => Props = id => SingleLocationSpecLeveldb.TestEventLog.props(id, batching = true) "An EventsourcedActor with PersistOnEvent" must { "not re-attempt persistence on successful write after reordering of events through disaster recovery" in { val locationB = location("B", customConfig = RecoverySpecLeveldb.config) def newLocationA = location("A", customConfig = RecoverySpecLeveldb.config) val locationA1 = newLocationA val endpointB = locationB.endpoint(Set("L1"), Set(replicationConnection(locationA1.port))) def newEndpointA(l: Location, activate: Boolean) = l.endpoint(Set("L1"), Set(replicationConnection(locationB.port)), activate = activate) val endpointA1 = newEndpointA(locationA1, activate = true) val targetA = endpointA1.target("L1") val logDirA = logDirectory(targetA) val targetB = endpointB.target("L1") val a1Probe = persistOnEventProbe(locationA1, targetA.log) write(targetA, List("A")) write(targetB, List("B")) val event = a1Probe.expectMsgClass(classOf[String]) assertConvergence(Set("A", "B", event), endpointA1, endpointB) locationA1.terminate().await FileUtils.deleteDirectory(logDirA) val locationA2 = newLocationA val endpointA2 = newEndpointA(locationA2, activate = false) endpointA2.recover().await val a2Probe = persistOnEventProbe(locationA2, endpointA2.logs("L1")) a2Probe.expectMsg(event) a2Probe.expectNoMsg(noMsgTimeout) assertConvergence(Set("A", "B", event), endpointA2, endpointB) } } }
Example 184
Source File: ConsulCoordinationSpec.scala From constructr-consul with Apache License 2.0 | 5 votes |
package com.tecsisa.constructr.coordination.consul import akka.Done import akka.actor.{ ActorSystem, AddressFromURIString } import akka.testkit.{ TestDuration, TestProbe } import com.typesafe.config.ConfigFactory import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec } import scala.concurrent.duration.{ Duration, DurationInt, FiniteDuration } import scala.concurrent.{ Await, Awaitable } import scala.util.Random object ConsulCoordinationSpec { private val coordinationHost = { val dockerHostPattern = """tcp://(\S+):\d{1,5}""".r sys.env .get("DOCKER_HOST") .collect { case dockerHostPattern(address) => address } .getOrElse("127.0.0.1") } } class ConsulCoordinationSpec extends WordSpec with Matchers with BeforeAndAfterAll { import ConsulCoordinationSpec._ private implicit val system = { val config = ConfigFactory .parseString(s"constructr.coordination.host = $coordinationHost") .withFallback(ConfigFactory.load()) ActorSystem("default", config) } private val address1 = AddressFromURIString("akka.tcp://default@a:2552") private val address2 = AddressFromURIString("akka.tcp://default@b:2552") "ConsulCoordination" should { "correctly interact with consul" in { val coordination = new ConsulCoordination(randomString(), system) // Getting nodes resultOf(coordination.getNodes()) shouldBe 'empty // Lock (ttl >= 10s) resultOf(coordination.lock(address1, 10.seconds)) shouldBe true resultOf(coordination.lock(address1, 10.seconds)) shouldBe true resultOf(coordination.lock(address2, 10.seconds)) shouldBe false // Add self resultOf(coordination.addSelf(address1, 10.seconds)) shouldBe Done resultOf(coordination.getNodes()) shouldBe Set(address1) // Refresh resultOf(coordination.refresh(address1, 10.seconds)) shouldBe Done resultOf(coordination.getNodes()) shouldBe Set(address1) val probe = TestProbe() import probe._ awaitAssert( resultOf(coordination.getNodes()) shouldBe 'empty, 25.seconds // Wait until open sessions expire ) } } override protected def afterAll() = { Await.ready(system.terminate(), Duration.Inf) super.afterAll() } private def resultOf[A](awaitable: Awaitable[A], max: FiniteDuration = 3.seconds.dilated) = Await.result(awaitable, max) private def randomString() = math.abs(Random.nextInt).toString }
Example 185
Source File: LoginHandlerSpec.scala From gatling-imap with GNU Affero General Public License v3.0 | 5 votes |
package com.linagora.gatling.imap.protocol.command import akka.actor.ActorSystem import akka.testkit.TestProbe import com.linagora.gatling.imap.Fixture.bart import com.linagora.gatling.imap.protocol.{Command, Response, UserId} import com.linagora.gatling.imap.{CyrusServer, ImapTestUtils, RunningServer} import com.sun.mail.imap.protocol.IMAPResponse import org.scalatest.matchers.{MatchResult, Matcher} import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec} import org.slf4j import org.slf4j.LoggerFactory import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global class LoginHandlerSpec extends WordSpec with ImapTestUtils with BeforeAndAfterEach with Matchers { val logger: slf4j.Logger = LoggerFactory.getLogger(this.getClass.getCanonicalName) private val server: RunningServer = CyrusServer.start() override def beforeEach(): Unit = { server.addUser(bart) } override protected def afterEach(): Unit = { system.terminate() server.stop() } implicit lazy val system: ActorSystem = ActorSystem("LoginHandlerSpec") "Login handler" should { "send the response back when logged in" in { val probe = TestProbe() val sessionFuture = connect(server.mappedImapPort()) sessionFuture.onComplete(session => { val handler = system.actorOf(LoginHandler.props(session.get)) probe.send(handler, Command.Login(UserId(1), bart)) }) probe.expectMsgPF(1.minute) { case Response.LoggedIn(responses) => responses.isOk shouldBe true } } } object IMAPResponseMatchers { class HasTagMatcher(tag: String) extends Matcher[IMAPResponse] { def apply(left: IMAPResponse): MatchResult = { val name = left.getTag MatchResult( name == tag, s"""ImapResponse doesn't have tag "$tag"""", s"""ImapResponse has tag "$tag"""" ) } } class IsOkMatcher() extends Matcher[IMAPResponse] { def apply(left: IMAPResponse): MatchResult = { MatchResult( left.isOK, s"""ImapResponse isn't OK """, s"""ImapResponse is OK """ ) } } def isOk = new IsOkMatcher() def hasTag(tag: String) = new HasTagMatcher(tag) } }
Example 186
Source File: ImapSessionsSpec.scala From gatling-imap with GNU Affero General Public License v3.0 | 5 votes |
package com.linagora.gatling.imap.protocol.command import java.util.Properties import akka.actor.ActorSystem import akka.testkit.TestProbe import com.linagora.gatling.imap.Fixture.bart import com.linagora.gatling.imap.protocol.{Command, ImapProtocol, ImapResponses, ImapSessions, Response, UserId} import com.linagora.gatling.imap.{CyrusServer, ImapTestUtils, RunningServer} import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec} import org.slf4j.{Logger, LoggerFactory} import scala.concurrent.duration._ class ImapSessionsSpec extends WordSpec with Matchers with ImapTestUtils with BeforeAndAfterEach { val logger: Logger = LoggerFactory.getLogger(this.getClass.getCanonicalName) private val server: RunningServer = CyrusServer.start() override def beforeEach(): Unit = { server.addUser(bart) } override protected def afterEach(): Unit = { system.terminate() server.stop() } implicit lazy val system: ActorSystem = ActorSystem("LoginHandlerSpec") "the imap sessions actor" should { "log a user in" in { val config = new Properties() val protocol = ImapProtocol("localhost", server.mappedImapPort(), config) val sessions = system.actorOf(ImapSessions.props(protocol)) val probe = TestProbe() val userId = UserId(1) probe.send(sessions, Command.Connect(userId)) probe.expectMsg(10.second, Response.Connected(ImapResponses.empty)) probe.send(sessions, Command.Login(userId, bart)) probe.expectMsgPF(10.second) { case Response.LoggedIn(responses: ImapResponses) => responses.isOk shouldBe true } } } }
Example 187
Source File: DatadogRegistrySpec.scala From akka-http-metrics with Apache License 2.0 | 5 votes |
package fr.davit.akka.http.metrics.datadog import java.net.InetSocketAddress import akka.actor.ActorSystem import akka.http.scaladsl.model.StatusCodes import akka.io.{IO, Udp} import akka.testkit.{TestKit, TestProbe} import com.timgroup.statsd.NonBlockingStatsDClient import fr.davit.akka.http.metrics.core.HttpMetricsRegistry.{PathDimension, StatusGroupDimension} import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class DatadogRegistrySpec extends TestKit(ActorSystem("DatadogRegistrySpec")) with AnyFlatSpecLike with Matchers with BeforeAndAfterAll { val dimensions = Seq(StatusGroupDimension(StatusCodes.OK), PathDimension("/api")) def withFixture(test: (TestProbe, DatadogRegistry) => Any) = { val statsd = TestProbe() statsd.send(IO(Udp), Udp.Bind(statsd.ref, new InetSocketAddress(0))) val port = statsd.expectMsgType[Udp.Bound].localAddress.getPort val socket = statsd.sender() val client = new NonBlockingStatsDClient("", "localhost", port) val registry = DatadogRegistry(client) try { test(statsd, registry) } finally { client.close() socket ! Udp.Unbind } } override def afterAll(): Unit = { shutdown() super.afterAll() } "DatadogRegistry" should "send active datagrams to the statsd server" in withFixture { (statsd, registry) => registry.active.inc() statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_active:1|c" } it should "send requests datagrams to the statsd server" in withFixture { (statsd, registry) => registry.requests.inc() statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_count:1|c" } it should "send receivedBytes datagrams to the statsd server" in withFixture { (statsd, registry) => registry.receivedBytes.update(3) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_bytes:3|d" registry.receivedBytes.update(3, dimensions) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.requests_bytes:3|d|#path:/api,status:2xx" } it should "send responses datagrams to the statsd server" in withFixture { (statsd, registry) => registry.responses.inc() statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_count:1|c" registry.responses.inc(dimensions) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_count:1|c|#path:/api,status:2xx" } it should "send errors datagrams to the statsd server" in withFixture { (statsd, registry) => registry.errors.inc() statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_errors_count:1|c" registry.errors.inc(dimensions) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_errors_count:1|c|#path:/api,status:2xx" } it should "send duration datagrams to the statsd server" in withFixture { (statsd, registry) => registry.duration.observe(3.seconds) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_duration:3000|d" registry.duration.observe(3.seconds, dimensions) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_duration:3000|d|#path:/api,status:2xx" } it should "send sentBytes datagrams to the statsd server" in withFixture { (statsd, registry) => registry.sentBytes.update(3) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_bytes:3|d" registry.sentBytes.update(3, dimensions) statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.responses_bytes:3|d|#path:/api,status:2xx" } it should "send connected datagrams to the statsd server" in withFixture { (statsd, registry) => registry.connected.inc() statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.connections_active:1|c" } it should "send connections datagrams to the statsd server" in withFixture { (statsd, registry) => registry.connections.inc() statsd.expectMsgType[Udp.Received].data.utf8String shouldBe "akka.http.connections_count:1|c" } }
Example 188
Source File: HTTPInterfaceSpec.scala From reactive-kafka-microservice-template with Apache License 2.0 | 5 votes |
package akka import akka.event.Logging import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.stream.QueueOfferResult import akka.stream.QueueOfferResult.Enqueued import akka.stream.scaladsl.SourceQueueWithComplete import akka.testkit.{TestActorRef, TestProbe} import com.omearac.consumers.{DataConsumer, EventConsumer} import com.omearac.http.routes.{ConsumerCommands, ProducerCommands} import com.omearac.producers.DataProducer import org.scalatest.{Matchers, WordSpec} import scala.concurrent.Future class HTTPInterfaceSpec extends WordSpec with Matchers with ScalatestRouteTest with ConsumerCommands with ProducerCommands { val log = Logging(system, this.getClass.getName) //Mocks for DataConsumer Tests val dataConsumer = TestActorRef(new DataConsumer) val manager = TestProbe() dataConsumer.underlyingActor.consumerStreamManager = manager.ref //Mocks for EventConsumer Tests val eventConsumer = TestActorRef(new EventConsumer) eventConsumer.underlyingActor.consumerStreamManager = manager.ref //Mocks for DataProducer Tests val dataProducer = TestActorRef(new DataProducer) val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] { override def complete(): Unit = println("complete") override def fail(ex: Throwable): Unit = println("fail") override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued} override def watchCompletion(): Future[Done] = Future{Done} } "The HTTP interface to control the DataConsumerStream" should { "return a Already Stopped message for GET requests to /data_consumer/stop" in { Get("/data_consumer/stop") ~> dataConsumerHttpCommands ~> check { responseAs[String] shouldEqual "Data Consumer Stream Already Stopped" } } "return a Stream Started response for GET requests to /data_consumer/start" in { Get("/data_consumer/start") ~> dataConsumerHttpCommands ~> check { responseAs[String] shouldEqual "Data Consumer Stream Started" } } } "The HTTP interface to control the EventConsumerStream" should { "return a Already Stopped message for GET requests to /event_consumer/stop" in { Get("/event_consumer/stop") ~> eventConsumerHttpCommands ~> check { responseAs[String] shouldEqual "Event Consumer Stream Already Stopped" } } "return a Stream Started response for GET requests to /data_consumer/start" in { Get("/event_consumer/start") ~> eventConsumerHttpCommands ~> check { responseAs[String] shouldEqual "Event Consumer Stream Started" } } } "The HTTP interface to tell the DataProducer Actor to publish messages to Kafka" should { "return a Messages Produced message for GET requests to /data_producer/produce/10" in { dataProducer.underlyingActor.producerStream = mockProducerStream val producing = dataProducer.underlyingActor.publishData dataProducer.underlyingActor.context.become(producing) Get("/data_producer/produce/10") ~> producerHttpCommands ~> check { responseAs[String] shouldEqual "10 messages Produced as Ordered, Boss!" } } } }
Example 189
Source File: ProducerStreamSpec.scala From reactive-kafka-microservice-template with Apache License 2.0 | 5 votes |
package akka.kafka import akka.actor.ActorSystem import akka.stream.scaladsl.{Sink, Source} import akka.testkit.{DefaultTimeout, ImplicitSender, TestKit, TestProbe} import com.omearac.consumers.ConsumerStream import com.omearac.producers.ProducerStream import com.omearac.settings.Settings import com.omearac.shared.JsonMessageConversion.Conversion import com.omearac.shared.KafkaMessages.{ExampleAppEvent, KafkaMessage} import org.apache.kafka.clients.producer.ProducerRecord import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} class ProducerStreamSpec extends TestKit(ActorSystem("ProducerStreamSpec")) with DefaultTimeout with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with ConsumerStream with ProducerStream { val settings = Settings(system).KafkaProducers val probe = TestProbe() override def afterAll: Unit = { shutdown() } "Sending KafkaMessages to the KafkaMessage producerStream" should { "be converted to JSON and obtained by the Stream Sink " in { //Creating Producer Stream Components for publishing KafkaMessages val producerProps = settings.KafkaProducerInfo("KafkaMessage") val numOfMessages = 50 val kafkaMsgs = for { i <- 0 to numOfMessages} yield KafkaMessage("sometime", "somestuff", i) val producerSource= Source(kafkaMsgs) val producerFlow = createStreamFlow[KafkaMessage](producerProps) val producerSink = Sink.actorRef(probe.ref, "complete") val jsonKafkaMsgs = for { msg <- kafkaMsgs} yield Conversion[KafkaMessage].convertToJson(msg) producerSource.via(producerFlow).runWith(producerSink) for (i <- 0 to jsonKafkaMsgs.length) { probe.expectMsgPF(){ case m: ProducerRecord[_,_] => if (jsonKafkaMsgs.contains(m.value())) () else fail() case "complete" => () } } } } "Sending ExampleAppEvent messages to the EventMessage producerStream" should { "be converted to JSON and obtained by the Stream Sink " in { //Creating Producer Stream Components for publishing ExampleAppEvent messages val producerProps = settings.KafkaProducerInfo("ExampleAppEvent") val numOfMessages = 50 val eventMsgs = for { i <- 0 to 50} yield ExampleAppEvent("sometime", "senderID", s"Event number $i occured") val producerSource= Source(eventMsgs) val producerFlow = createStreamFlow[ExampleAppEvent](producerProps) val producerSink = Sink.actorRef(probe.ref, "complete") val jsonAppEventMsgs = for{ msg <- eventMsgs} yield Conversion[ExampleAppEvent].convertToJson(msg) producerSource.via(producerFlow).runWith(producerSink) for (i <- 0 to jsonAppEventMsgs.length){ probe.expectMsgPF(){ case m: ProducerRecord[_,_] => if (jsonAppEventMsgs.contains(m.value())) () else fail() case "complete" => () } } } } }
Example 190
Source File: EventProducerSpec.scala From reactive-kafka-microservice-template with Apache License 2.0 | 5 votes |
package akka.kafka import java.util.Date import akka.Done import akka.actor.ActorSystem import akka.serialization.Serialization import akka.stream.QueueOfferResult import akka.stream.QueueOfferResult.Enqueued import akka.stream.scaladsl.SourceQueueWithComplete import akka.testkit.{DefaultTimeout, EventFilter, ImplicitSender, TestActorRef, TestKit, TestProbe} import com.omearac.producers.EventProducer import com.omearac.shared.AkkaStreams import com.omearac.shared.EventMessages.{ActivatedProducerStream, MessagesPublished} import com.omearac.shared.KafkaMessages.ExampleAppEvent import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.Future class EventProducerSpec extends TestKit(ActorSystem("EventProducerSpec",ConfigFactory.parseString(""" akka.loggers = ["akka.testkit.TestEventListener"] """))) with DefaultTimeout with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with AkkaStreams { val testProducer = TestActorRef(new EventProducer) val producerActor = testProducer.underlyingActor val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] { override def complete(): Unit = println("complete") override def fail(ex: Throwable): Unit = println("fail") override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued} override def watchCompletion(): Future[Done] = Future{Done} } override def afterAll: Unit = { shutdown() } //Create an test event listener for the local message bus val testEventListener = TestProbe() system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent]) "Sending ActivatedProducerStream to EventProducer in receive state" should { "save the stream ref and change state to producing " in { testProducer ! ActivatedProducerStream(mockProducerStream, "TestTopic") Thread.sleep(500) producerActor.producerStream should be(mockProducerStream) EventFilter.error(message = "EventProducer got the unknown message while producing: testMessage", occurrences = 1) intercept { testProducer ! "testMessage" } } } "Sending ExampleAppEvent to system bus while EventProducer is in publishEvent state" should { "offer the ExampleAppEvent to the stream " in { val producingState = producerActor.publishEvent producerActor.context.become(producingState) producerActor.producerStream = mockProducerStream val dateFormat = new java.text.SimpleDateFormat("dd:MM:yy:HH:mm:ss.SSS") lazy val timetag = dateFormat.format(new Date(System.currentTimeMillis())) val eventMsg = MessagesPublished(5) val testMessage = ExampleAppEvent(timetag,Serialization.serializedActorPath(self),eventMsg.toString) system.eventStream.publish(testMessage) testEventListener.expectMsgPF(){ case ExampleAppEvent(_,_,m) => if (m == eventMsg.toString) () else fail() } } } }
Example 191
Source File: DataProducerSpec.scala From reactive-kafka-microservice-template with Apache License 2.0 | 5 votes |
package akka.kafka import akka.Done import akka.actor.ActorSystem import akka.stream.QueueOfferResult import akka.stream.QueueOfferResult.Enqueued import akka.stream.scaladsl.SourceQueueWithComplete import akka.testkit.{DefaultTimeout, EventFilter, ImplicitSender, TestActorRef, TestKit, TestProbe} import com.omearac.producers.DataProducer import com.omearac.producers.DataProducer.PublishMessages import com.omearac.shared.AkkaStreams import com.omearac.shared.EventMessages.{ActivatedProducerStream, MessagesPublished} import com.omearac.shared.KafkaMessages.ExampleAppEvent import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.concurrent.Future class DataProducerSpec extends TestKit(ActorSystem("DataProducerSpec", ConfigFactory.parseString( """ akka.loggers = ["akka.testkit.TestEventListener"] """))) with DefaultTimeout with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with AkkaStreams { val testProducer = TestActorRef(new DataProducer) val producerActor = testProducer.underlyingActor val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] { override def complete(): Unit = println("complete") override def fail(ex: Throwable): Unit = println("fail") override def offer(elem: Any): Future[QueueOfferResult] = Future { Enqueued } override def watchCompletion(): Future[Done] = Future { Done } } override def afterAll: Unit = { shutdown() } //Create an test event listener for the local message bus val testEventListener = TestProbe() system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent]) "Sending ActivatedProducerStream to DataProducer in receive state" should { "save the stream ref and change state to producing " in { testProducer ! ActivatedProducerStream(mockProducerStream, "TestTopic") Thread.sleep(500) producerActor.producerStream should be(mockProducerStream) EventFilter.error(message = "DataProducer got the unknown message while producing: testMessage", occurrences = 1) intercept { testProducer ! "testMessage" } } } "Sending PublishMessages(number: Int) to DataProducer in publishData state" should { "return MessagesPublished(number: Int) and publish the local event " in { val producing = producerActor.publishData producerActor.context.become(producing) producerActor.producerStream = mockProducerStream val resultMessage = MessagesPublished(5) testProducer ! PublishMessages(5) expectMsg(resultMessage) testEventListener.expectMsgPF() { case ExampleAppEvent(_, _, m) => if (m == resultMessage.toString) () else fail() } } } }
Example 192
Source File: ProducerStreamManagerSpec.scala From reactive-kafka-microservice-template with Apache License 2.0 | 5 votes |
package akka.kafka import akka.actor.ActorSystem import akka.stream.scaladsl.SourceQueueWithComplete import akka.testkit.{DefaultTimeout, ImplicitSender, TestActorRef, TestKit, TestProbe} import com.omearac.producers.ProducerStreamManager import com.omearac.producers.ProducerStreamManager.InitializeProducerStream import com.omearac.shared.AkkaStreams import com.omearac.shared.EventMessages.ActivatedProducerStream import com.omearac.shared.KafkaMessages.{ExampleAppEvent, KafkaMessage} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} class ProducerStreamManagerSpec extends TestKit(ActorSystem("ProducerStreamManagerSpec")) with DefaultTimeout with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with AkkaStreams { val testProducerStreamManager = TestActorRef(new ProducerStreamManager) val producerStreamManagerActor = testProducerStreamManager.underlyingActor //Create an test event listener for the local message bus val testEventListener = TestProbe() system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent]) override def afterAll: Unit = { shutdown() } "Sending InitializeProducerStream(self, KafkaMessage) to ProducerStreamManager" should { "initialize the stream for that particular message type, return ActivatedProducerStream(streaRef, \"TempChannel1\") and produce local event " in { testProducerStreamManager ! InitializeProducerStream(self, KafkaMessage) Thread.sleep(500) var streamRef: SourceQueueWithComplete[Any] = null expectMsgPF() { case ActivatedProducerStream(sr, kt) => if (kt == "TempChannel1") { streamRef = sr; () } else fail() } Thread.sleep(500) val resultMessage = ActivatedProducerStream(streamRef, "TempChannel1") testEventListener.expectMsgPF() { case ExampleAppEvent(_, _, m) => if (m == resultMessage.toString) () else fail() } } } "Sending InitializeProducerStream(self, ExampleAppEvent) to ProducerStreamManager" should { "initialize the stream for that particular message type, return ActivatedProducerStream(streaRef, \"TempChannel2\") and produce local event " in { testProducerStreamManager ! InitializeProducerStream(self, ExampleAppEvent) Thread.sleep(500) var streamRef: SourceQueueWithComplete[Any] = null expectMsgPF() { case ActivatedProducerStream(sr, kt) => if (kt == "TempChannel2") { streamRef = sr; () } else fail() } Thread.sleep(500) val resultMessage = ActivatedProducerStream(streamRef, "TempChannel2") testEventListener.expectMsgPF() { case ExampleAppEvent(_, _, m) => if (m == resultMessage.toString) () else fail() } } } }
Example 193
Source File: SourceCodeAnalyzerActorSpec.scala From CodeAnalyzerTutorial with Apache License 2.0 | 5 votes |
package tutor import akka.actor.ActorSystem import akka.testkit.TestProbe import org.scalatest.FunSpec import tutor.CodebaseAnalyzeAggregatorActor.Complete import tutor.SourceCodeAnalyzerActor.NewFile import scala.util.Success class SourceCodeAnalyzerActorSpec extends FunSpec { describe("SourceCodeAnalyzerActor"){ it("can analyze given file path, and reply with SourceCodeInfo"){ implicit val system = ActorSystem("SourceCodeAnalyzer") val probe = TestProbe() val sourceCodeAnalyzerActor = system.actorOf(SourceCodeAnalyzerActor.props()) sourceCodeAnalyzerActor.tell(NewFile("src/test/fixture/sub/SomeCode.scala"), probe.ref) probe.expectMsg(Complete(Success(SourceCodeInfo(path = "src/test/fixture/sub/SomeCode.scala", localPath = "SomeCode.scala", 16)))) } } }
Example 194
Source File: CodebaseAnalyzeAggregatorActorSpec.scala From CodeAnalyzerTutorial with Apache License 2.0 | 5 votes |
package tutor import akka.actor.ActorSystem import akka.testkit.TestProbe import org.scalatest.{FunSpec, Matchers} import tutor.CodebaseAnalyzeAggregatorActor.{AnalyzeDirectory, Report} import scala.concurrent.duration._ class CodebaseAnalyzeAggregatorActorSpec extends FunSpec with Matchers { describe("CodebaseAnalyzeAggregatorActor") { it("can analyze given file path, aggregate results of all individual files") { implicit val system = ActorSystem("CodebaseAnalyzeAggregator") val probe = TestProbe() val codebaseAnalyzeAggregator = system.actorOf(CodebaseAnalyzeAggregatorActor.props()) codebaseAnalyzeAggregator.tell(AnalyzeDirectory("src/test/fixture"), probe.ref) val result = probe.expectMsgType[Report](3 seconds).codebaseInfo result.totalFileNums shouldBe 2 result.fileTypeNums.keySet should have size 2 result.fileTypeNums("java") shouldBe 1 result.fileTypeNums("scala") shouldBe 1 result.totalLineCount shouldBe 31 result.avgLineCount shouldBe 15.0 result.longestFileInfo.get.localPath shouldBe "SomeCode.scala" result.top10Files should have size 2 result.top10Files.map(file => (file.localPath,file.lineCount)) should contain (("SomeCode.scala", 16)) } } }
Example 195
Source File: ActiveMqTestSpec.scala From reactive-activemq with Apache License 2.0 | 5 votes |
package akka.stream.integration package activemq import akka.NotUsed import akka.actor.ActorRef import akka.stream.integration.PersonDomain.Person import akka.stream.scaladsl.{ Flow, Keep } import akka.stream.testkit.scaladsl.{ TestSink, TestSource } import akka.stream.testkit.{ TestPublisher, TestSubscriber } import akka.testkit.TestActor.AutoPilot import akka.testkit.TestProbe import JsonCamelMessageExtractor._ import JsonCamelMessageBuilder._ import scala.util.{ Failure, Success, Try } implicit def function1ToAutoPilot[S, T](f: S => T): AutoPilot = new AutoPilot { override def run(sender: ActorRef, msg: Any): AutoPilot = msg match { case s: S => val tryT: Try[T] = Try(f(s)) tryT match { case Success(t) => sender ! t function1ToAutoPilot(f) case Failure(f) => fail(s"Failed to apply supplied function to received message: $s", f) } case _ => fail(s"Received message is not of the required type: $msg") } } }
Example 196
Source File: BenchBase.scala From changestream with MIT License | 5 votes |
package changestream.helpers import akka.actor.{ActorRefFactory, ActorSystem, Props} import akka.testkit.{TestActorRef, TestProbe} import com.github.mauricio.async.db.Configuration import com.github.mauricio.async.db.mysql.MySQLConnection import com.typesafe.config.ConfigFactory import org.scalameter.api._ import org.scalameter.picklers.Implicits._ import scala.concurrent.duration._ import scala.language.postfixOps import scala.concurrent.Await class BenchBase extends Bench[Double] { lazy val executor = LocalExecutor( new Executor.Warmer.Default, Aggregator.min[Double], measurer) lazy val measurer = new Measurer.Default lazy val reporter = new LoggingReporter[Double] lazy val persistor = Persistor.None implicit val system = ActorSystem("changestream", ConfigFactory.load("test.conf")) implicit val ec = system.dispatcher val probe = TestProbe() val maker = (_: ActorRefFactory) => probe.ref val testConfig = ConfigFactory.load("test.conf") def getProbedActorOf[K](klass: Predef.Class[K], configPath: String = "changestream") = TestActorRef(Props(klass, maker, testConfig.getConfig(configPath))) protected val config = testConfig.getConfig("changestream.mysql") protected val mysqlConfig = new Configuration( config.getString("user"), config.getString("host"), config.getInt("port"), Some(config.getString("password")) ) protected val connectionTimeout = config.getLong("timeout") protected val connection = new MySQLConnection(mysqlConfig) Await.result(connection.connect, connectionTimeout milliseconds) val result = connection.sendQuery("drop database if exists changestream_test") .flatMap(_ => connection.sendQuery("create database changestream_test")) .flatMap(_ => connection.sendQuery(s""" | CREATE TABLE changestream_test.users ( | `id` int(11) NOT NULL AUTO_INCREMENT, | `username` varchar(32) DEFAULT NULL, | `password` varchar(32) DEFAULT NULL, | `login_count` int(11) NOT NULL DEFAULT '0', | `bio` text DEFAULT NULL, | PRIMARY KEY (`id`) | ) ENGINE=InnoDB """.stripMargin)) Await.result(result, (connectionTimeout * 3) milliseconds) }
Example 197
Source File: StdoutActorSpec.scala From changestream with MIT License | 5 votes |
package changestream.actors import akka.actor.{ActorRefFactory, Props} import akka.testkit.{TestActorRef, TestProbe} import changestream.actors.PositionSaver.EmitterResult import changestream.helpers.{Config, Emitter} import scala.concurrent.duration._ import scala.language.postfixOps class StdoutActorSpec extends Emitter with Config { val probe = TestProbe() val maker = (_: ActorRefFactory) => probe.ref val actorRef = TestActorRef(Props(classOf[StdoutActor], maker, awsConfig)) "When StdoutActor receives a single valid message" should { "Print to stdout and forward result" in { actorRef ! message val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be(message.nextPosition) } } }
Example 198
Source File: SqsActorSpec.scala From changestream with MIT License | 5 votes |
package changestream.actors import akka.actor.{ActorRefFactory, Props} import akka.testkit.{TestActorRef, TestProbe} import changestream.actors.PositionSaver.EmitterResult import changestream.actors.SqsActor.BatchResult import changestream.helpers.{Config, Emitter} import scala.concurrent.duration._ import scala.language.postfixOps class SqsActorSpec extends Emitter with Config { val probe = TestProbe() val maker = (_: ActorRefFactory) => probe.ref val actorRef = TestActorRef(Props(classOf[SqsActor], maker, awsConfig)) "When SqsActor receives a single valid message" should { "Add the message to the SQS queue in a batch of one" in { actorRef ! message val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be(message.nextPosition) result.meta.get shouldBe a[BatchResult] result.meta.get.asInstanceOf[BatchResult].failed shouldBe empty result.meta.get.asInstanceOf[BatchResult].queued should have length 1 } } "When SqsActor receives multiple valid messages in quick succession" should { "Add the messages to the SQS queue in a batch of multiple" in { actorRef ! message actorRef ! message.copy(nextPosition = "FOOBAZ") val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be("FOOBAZ") result.meta.get shouldBe a[BatchResult] result.meta.get.asInstanceOf[BatchResult].failed shouldBe empty result.meta.get.asInstanceOf[BatchResult].queued should have length 2 } } "When SqsActor receives multiple valid messages in slow succession" should { "Add the messages to the SQS queue in multiple batches of one message" in { actorRef ! message Thread.sleep(500) actorRef ! message.copy(nextPosition = "FOOBAZ") val result1 = probe.expectMsgType[EmitterResult](5000 milliseconds) val result2 = probe.expectMsgType[EmitterResult](5000 milliseconds) result1.position should be(message.nextPosition) result1.meta.get shouldBe a[BatchResult] result1.meta.get.asInstanceOf[BatchResult].failed shouldBe empty result1.meta.get.asInstanceOf[BatchResult].queued should have length 1 result2.position should be("FOOBAZ") result2.meta.get shouldBe a[BatchResult] result2.meta.get.asInstanceOf[BatchResult].failed shouldBe empty result2.meta.get.asInstanceOf[BatchResult].queued should have length 1 } } }
Example 199
Source File: SnsActorSpec.scala From changestream with MIT License | 5 votes |
package changestream.actors import akka.actor.{ActorRefFactory, Props} import akka.testkit.{TestActorRef, TestProbe} import changestream.actors.PositionSaver.EmitterResult import changestream.helpers.{Config, Emitter} import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ import scala.language.postfixOps class SnsActorSpec extends Emitter with Config { val probe = TestProbe() val maker = (_: ActorRefFactory) => probe.ref val actorRef = TestActorRef(Props(classOf[SnsActor], maker, awsConfig)) val configWithInterpolation = ConfigFactory. parseString("aws.sns.topic = \"__integration_tests-{database}-{tableName}\""). withFallback(awsConfig) val snsWithInterpolation = TestActorRef(Props(classOf[SnsActor], maker, configWithInterpolation)) "When SnsActor receives a single valid message" should { "Immediately publish the message to SNS" in { actorRef ! message val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be(message.nextPosition) } } "When SnsActor receives a message" should { "Should correctly publish the message when the topic contains interpolated database and/or tableName" in { snsWithInterpolation ! message val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be(message.nextPosition) } } }
Example 200
Source File: S3ActorSpec.scala From changestream with MIT License | 5 votes |
package changestream.actors import akka.actor.{ActorRefFactory, Props} import akka.testkit.{TestActorRef, TestProbe} import changestream.actors.PositionSaver.EmitterResult import changestream.helpers.{Config, Emitter} import scala.concurrent.duration._ import com.typesafe.config.ConfigFactory import scala.language.postfixOps class S3ActorSpec extends Emitter with Config { val probe = TestProbe() val maker = (_: ActorRefFactory) => probe.ref val s3Config = ConfigFactory. parseString("aws.s3.batch-size = 2, aws.s3.flush-timeout = 1000"). withFallback(awsConfig) val actorRef = TestActorRef(Props(classOf[S3Actor], maker, s3Config)) "When S3Actor receives a single valid message" should { "Add the message to S3 in a batch of one" in { actorRef ! message val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be(message.nextPosition) result.meta.get.asInstanceOf[String] should endWith ("-1.json") } } "When S3Actor receives multiple valid messages in quick succession" should { "Add the messages to S3 in a batch of many" in { actorRef ! message actorRef ! message.copy(nextPosition = "FOOBAZ") val result = probe.expectMsgType[EmitterResult](5000 milliseconds) result.position should be("FOOBAZ") result.meta.get.asInstanceOf[String] should endWith ("-2.json") } } "When S3Actor receives multiple valid messages in slow succession" should { "Add the messages to the S3 queue in multiple batches of one message" in { actorRef ! message Thread.sleep(2000) actorRef ! message.copy(nextPosition = "FOOBAZ") val result1 = probe.expectMsgType[EmitterResult](5000 milliseconds) val result2 = probe.expectMsgType[EmitterResult](5000 milliseconds) result1.position should be(message.nextPosition) result1.meta.get.asInstanceOf[String] should endWith ("-1.json") result2.position should be("FOOBAZ") result2.meta.get.asInstanceOf[String] should endWith ("-1.json") } } "When S3Actor receives multiple valid messages that exceed the flush size" should { "Add the messages to the S3 queue in multiple batches" in { actorRef ! message actorRef ! message.copy(nextPosition = "FOOBAZ") actorRef ! message.copy(nextPosition = "BIPBOP") val result1 = probe.expectMsgType[EmitterResult](5000 milliseconds) val result2 = probe.expectMsgType[EmitterResult](5000 milliseconds) result1.position should be("FOOBAZ") result1.meta.get.asInstanceOf[String] should endWith ("-2.json") result2.position should be("BIPBOP") result2.meta.get.asInstanceOf[String] should endWith ("-1.json") } } }